diff --git a/.gitignore b/.gitignore index 434f20f..6fcce67 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,5 @@ build +html *.so *.o *.pyc @@ -7,6 +8,8 @@ build *.a *.log *.status +*.dSYM +*.nc bathy_smoother/Python/ examples/Yellow_Sea/Inputs/Initial/compute_remap_weights_in examples/Yellow_Sea/YELLOW_grd_v1.nc @@ -36,3 +39,4 @@ pyroms/external/scrip/source/makefile2 pyroms/external/scrip/source/scrip pyroms/external/scrip/source/scrip_test pyroms_toolbox/Python/ +mask_change.txt diff --git a/README b/README index a65d23d..228e86a 100644 --- a/README +++ b/README @@ -10,16 +10,11 @@ rough around the edges. Prerequisites ------------- -Meanwhile, you can prepare by installing Enthought Python -(http://www.enthought.com/) or by this collection of packages: +Now using Anaconda python to download python 3.x and a bunch of other stuff: - * Python itself. You will need a version that's at least 2.4 and - less than 3.0. + * Python itself. You will need a version that's at least 3.4. - * numpy. Get a version that's reasonably new, don't count on the - version 1.0.1 that you found already on the system. How do you - tell the version? At the interactive prompt from python, type - "import numpy" then "numpy.__version__". + * numpy. * scipy. @@ -39,10 +34,15 @@ Meanwhile, you can prepare by installing Enthought Python interactive fun. It preloads both numpy and the matplotlib pylab package. -We may also add pyngl and pynio from NCAR to this list. Or ESMPy. -Or Python 3... + * natgrid from http://github.com/matplotlib/natgrid for at least + one of the examples. -If you want a package manager for these bits, check out Anaconda. +Planning to move to ESMF. Download it with: + + conda install --channel https://conda.anaconda.org/conda-forge esmf + +We may also add pyngl and pynio from NCAR to this list. +Or jupyter. Installing ---------- @@ -53,31 +53,40 @@ setup.py scripts for each, plus an examples directory. For now, - cd pyroms_toolbox; python setup.py build --fcompiler=gnu95; + cd pyroms_toolbox; python setup.py build python setup.py install --prefix=xxx; cd .. -The above may or may not need something along these lines: -setenv LDFLAGS '-L/usr/local/pkg/python/python-2.7.2/lib -shared' - cd bathy_smoother; python setup.py build; python setup.py install --prefix=xxx; cd .. cd pyroms; check the paths in the install script and see if it runs... + Maybe cut and paste from it into a bash shell. + +I had to hack the scrip makefile for the fortran90 netcdf stuff. The +anaconda netcdf-fortran reports: + nf-config not yet implemented for cmake builds + +A note on the .so files from fortran: They might now end up with names like: -If you needed the LDFLAGS above, you'll need to add it to the -install script too, at least for the build of scrip. + scrip.cpython-35m-x86_64-linux-gnu.so -I'm getting inconsistent results with where the .so files need to be. Best -results for me are if I go to the site-packages directory where pycnal -got installed and copy all the .so files: +It's OK - they'll load as long as they are in your PYTHONPATH. You might +also need libgu.so to be in your LD_LIBRARY_PATH. I'm getting inconsistent +results with where the .so files need to be. Best results for me are if I go +to the site-packages directory where pyroms got installed and copy all the +.so files: - cp pycnal/*.so . - cp pycnal_toolbox/*.so . + cp pyroms/*.so . + cp pyroms_toolbox/*.so . cp bathy_smoother/*.so . -Make sure this site-packages directory is in your PYTHONPATH. +Make sure this site-packages directory is in your PYTHONPATH. As for +libgu.so, it went into $DESTDIR/lib, for me $HOME/python/lib - that's +what needs to be in the LD_LIBRARY_PATH. +Also, f2py may or may not be called f2py3, depending. It's listed explicitly +in the scrip makefile and under pyroms_toolbox/pyroms_toolbox/src/makefile. Running ------- @@ -86,3 +95,11 @@ We have a gridid.txt file that's pointed to by the PYROMS_GRIDID_FILE environment variable. If you are operating on files containing sufficient grid information already, you won't need to use this. An example is provided in the examples directory. + + +Doxygen +------- + +Running "doxygen .doxygen" in any of pyroms, pyroms_toolbox or +bathy_smoother will generate doxygen files. Edit the .doxygen files to +specify html vs. some other output format. diff --git a/bathy_smoother/.doxygen b/bathy_smoother/.doxygen new file mode 100644 index 0000000..3d79969 --- /dev/null +++ b/bathy_smoother/.doxygen @@ -0,0 +1,1519 @@ +# Doxyfile 1.6.1 + +# This file describes the settings to be used by the documentation system +# doxygen (www.doxygen.org) for a project +# +# All text after a hash (#) is considered a comment and will be ignored +# The format is: +# TAG = value [value, ...] +# For lists items can also be appended using: +# TAG += value [value, ...] +# Values that contain spaces should be placed between quotes (" ") + +#--------------------------------------------------------------------------- +# Project related configuration options +#--------------------------------------------------------------------------- + +# This tag specifies the encoding used for all characters in the config file +# that follow. The default is UTF-8 which is also the encoding used for all +# text before the first occurrence of this tag. Doxygen uses libiconv (or the +# iconv built into libc) for the transcoding. See +# http://www.gnu.org/software/libiconv for the list of possible encodings. + +DOXYFILE_ENCODING = UTF-8 + +# The PROJECT_NAME tag is a single word (or a sequence of words surrounded +# by quotes) that should identify the project. + +PROJECT_NAME = bathy_smoother + +# The PROJECT_NUMBER tag can be used to enter a project or revision number. +# This could be handy for archiving the generated documentation or +# if some version control system is used. + +PROJECT_NUMBER = + +# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) +# base path where the generated documentation will be put. +# If a relative path is entered, it will be relative to the location +# where doxygen was started. If left blank the current directory will be used. + +OUTPUT_DIRECTORY = + +# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create +# 4096 sub-directories (in 2 levels) under the output directory of each output +# format and will distribute the generated files over these directories. +# Enabling this option can be useful when feeding doxygen a huge amount of +# source files, where putting all generated files in the same directory would +# otherwise cause performance problems for the file system. + +CREATE_SUBDIRS = YES + +# The OUTPUT_LANGUAGE tag is used to specify the language in which all +# documentation generated by doxygen is written. Doxygen will use this +# information to generate all constant output in the proper language. +# The default language is English, other supported languages are: +# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, +# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, +# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English +# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, +# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrilic, Slovak, +# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. + +OUTPUT_LANGUAGE = English + +# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will +# include brief member descriptions after the members that are listed in +# the file and class documentation (similar to JavaDoc). +# Set to NO to disable this. + +BRIEF_MEMBER_DESC = YES + +# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend +# the brief description of a member or function before the detailed description. +# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the +# brief descriptions will be completely suppressed. + +REPEAT_BRIEF = YES + +# This tag implements a quasi-intelligent brief description abbreviator +# that is used to form the text in various listings. Each string +# in this list, if found as the leading text of the brief description, will be +# stripped from the text and the result after processing the whole list, is +# used as the annotated text. Otherwise, the brief description is used as-is. +# If left blank, the following values are used ("$name" is automatically +# replaced with the name of the entity): "The $name class" "The $name widget" +# "The $name file" "is" "provides" "specifies" "contains" +# "represents" "a" "an" "the" + +ABBREVIATE_BRIEF = + +# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then +# Doxygen will generate a detailed section even if there is only a brief +# description. + +ALWAYS_DETAILED_SEC = NO + +# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all +# inherited members of a class in the documentation of that class as if those +# members were ordinary class members. Constructors, destructors and assignment +# operators of the base classes will not be shown. + +INLINE_INHERITED_MEMB = NO + +# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full +# path before files name in the file list and in the header files. If set +# to NO the shortest path that makes the file name unique will be used. + +FULL_PATH_NAMES = YES + +# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag +# can be used to strip a user-defined part of the path. Stripping is +# only done if one of the specified strings matches the left-hand part of +# the path. The tag can be used to show relative paths in the file list. +# If left blank the directory from which doxygen is run is used as the +# path to strip. + +STRIP_FROM_PATH = + +# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of +# the path mentioned in the documentation of a class, which tells +# the reader which header file to include in order to use a class. +# If left blank only the name of the header file containing the class +# definition is used. Otherwise one should specify the include paths that +# are normally passed to the compiler using the -I flag. + +STRIP_FROM_INC_PATH = + +# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter +# (but less readable) file names. This can be useful is your file systems +# doesn't support long names like on DOS, Mac, or CD-ROM. + +SHORT_NAMES = NO + +# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen +# will interpret the first line (until the first dot) of a JavaDoc-style +# comment as the brief description. If set to NO, the JavaDoc +# comments will behave just like regular Qt-style comments +# (thus requiring an explicit @brief command for a brief description.) + +JAVADOC_AUTOBRIEF = NO + +# If the QT_AUTOBRIEF tag is set to YES then Doxygen will +# interpret the first line (until the first dot) of a Qt-style +# comment as the brief description. If set to NO, the comments +# will behave just like regular Qt-style comments (thus requiring +# an explicit \brief command for a brief description.) + +QT_AUTOBRIEF = NO + +# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen +# treat a multi-line C++ special comment block (i.e. a block of //! or /// +# comments) as a brief description. This used to be the default behaviour. +# The new default is to treat a multi-line C++ comment block as a detailed +# description. Set this tag to YES if you prefer the old behaviour instead. + +MULTILINE_CPP_IS_BRIEF = NO + +# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented +# member inherits the documentation from any documented member that it +# re-implements. + +INHERIT_DOCS = YES + +# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce +# a new page for each member. If set to NO, the documentation of a member will +# be part of the file/class/namespace that contains it. + +SEPARATE_MEMBER_PAGES = NO + +# The TAB_SIZE tag can be used to set the number of spaces in a tab. +# Doxygen uses this value to replace tabs by spaces in code fragments. + +TAB_SIZE = 8 + +# This tag can be used to specify a number of aliases that acts +# as commands in the documentation. An alias has the form "name=value". +# For example adding "sideeffect=\par Side Effects:\n" will allow you to +# put the command \sideeffect (or @sideeffect) in the documentation, which +# will result in a user-defined paragraph with heading "Side Effects:". +# You can put \n's in the value part of an alias to insert newlines. + +ALIASES = + +# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C +# sources only. Doxygen will then generate output that is more tailored for C. +# For instance, some of the names that are used will be different. The list +# of all members will be omitted, etc. + +OPTIMIZE_OUTPUT_FOR_C = NO + +# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java +# sources only. Doxygen will then generate output that is more tailored for +# Java. For instance, namespaces will be presented as packages, qualified +# scopes will look different, etc. + +OPTIMIZE_OUTPUT_JAVA = YES + +# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran +# sources only. Doxygen will then generate output that is more tailored for +# Fortran. + +OPTIMIZE_FOR_FORTRAN = NO + +# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL +# sources. Doxygen will then generate output that is tailored for +# VHDL. + +OPTIMIZE_OUTPUT_VHDL = NO + +# Doxygen selects the parser to use depending on the extension of the files it parses. +# With this tag you can assign which parser to use for a given extension. +# Doxygen has a built-in mapping, but you can override or extend it using this tag. +# The format is ext=language, where ext is a file extension, and language is one of +# the parsers supported by doxygen: IDL, Java, Javascript, C#, C, C++, D, PHP, +# Objective-C, Python, Fortran, VHDL, C, C++. For instance to make doxygen treat +# .inc files as Fortran files (default is PHP), and .f files as C (default is Fortran), +# use: inc=Fortran f=C. Note that for custom extensions you also need to set FILE_PATTERNS otherwise the files are not read by doxygen. + +EXTENSION_MAPPING = + +# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want +# to include (a tag file for) the STL sources as input, then you should +# set this tag to YES in order to let doxygen match functions declarations and +# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. +# func(std::string) {}). This also make the inheritance and collaboration +# diagrams that involve STL classes more complete and accurate. + +BUILTIN_STL_SUPPORT = NO + +# If you use Microsoft's C++/CLI language, you should set this option to YES to +# enable parsing support. + +CPP_CLI_SUPPORT = NO + +# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. +# Doxygen will parse them like normal C++ but will assume all classes use public +# instead of private inheritance when no explicit protection keyword is present. + +SIP_SUPPORT = NO + +# For Microsoft's IDL there are propget and propput attributes to indicate getter +# and setter methods for a property. Setting this option to YES (the default) +# will make doxygen to replace the get and set methods by a property in the +# documentation. This will only work if the methods are indeed getting or +# setting a simple type. If this is not the case, or you want to show the +# methods anyway, you should set this option to NO. + +IDL_PROPERTY_SUPPORT = YES + +# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC +# tag is set to YES, then doxygen will reuse the documentation of the first +# member in the group (if any) for the other members of the group. By default +# all members of a group must be documented explicitly. + +DISTRIBUTE_GROUP_DOC = NO + +# Set the SUBGROUPING tag to YES (the default) to allow class member groups of +# the same type (for instance a group of public functions) to be put as a +# subgroup of that type (e.g. under the Public Functions section). Set it to +# NO to prevent subgrouping. Alternatively, this can be done per class using +# the \nosubgrouping command. + +SUBGROUPING = YES + +# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum +# is documented as struct, union, or enum with the name of the typedef. So +# typedef struct TypeS {} TypeT, will appear in the documentation as a struct +# with name TypeT. When disabled the typedef will appear as a member of a file, +# namespace, or class. And the struct will be named TypeS. This can typically +# be useful for C code in case the coding convention dictates that all compound +# types are typedef'ed and only the typedef is referenced, never the tag name. + +TYPEDEF_HIDES_STRUCT = NO + +# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to +# determine which symbols to keep in memory and which to flush to disk. +# When the cache is full, less often used symbols will be written to disk. +# For small to medium size projects (<1000 input files) the default value is +# probably good enough. For larger projects a too small cache size can cause +# doxygen to be busy swapping symbols to and from disk most of the time +# causing a significant performance penality. +# If the system has enough physical memory increasing the cache will improve the +# performance by keeping more symbols in memory. Note that the value works on +# a logarithmic scale so increasing the size by one will rougly double the +# memory usage. The cache size is given by this formula: +# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, +# corresponding to a cache size of 2^16 = 65536 symbols + +SYMBOL_CACHE_SIZE = 0 + +#--------------------------------------------------------------------------- +# Build related configuration options +#--------------------------------------------------------------------------- + +# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in +# documentation are documented, even if no documentation was available. +# Private class members and static file members will be hidden unless +# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES + +EXTRACT_ALL = YES + +# If the EXTRACT_PRIVATE tag is set to YES all private members of a class +# will be included in the documentation. + +EXTRACT_PRIVATE = YES + +# If the EXTRACT_STATIC tag is set to YES all static members of a file +# will be included in the documentation. + +EXTRACT_STATIC = YES + +# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) +# defined locally in source files will be included in the documentation. +# If set to NO only classes defined in header files are included. + +EXTRACT_LOCAL_CLASSES = YES + +# This flag is only useful for Objective-C code. When set to YES local +# methods, which are defined in the implementation section but not in +# the interface are included in the documentation. +# If set to NO (the default) only methods in the interface are included. + +EXTRACT_LOCAL_METHODS = YES + +# If this flag is set to YES, the members of anonymous namespaces will be +# extracted and appear in the documentation as a namespace called +# 'anonymous_namespace{file}', where file will be replaced with the base +# name of the file that contains the anonymous namespace. By default +# anonymous namespace are hidden. + +EXTRACT_ANON_NSPACES = NO + +# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all +# undocumented members of documented classes, files or namespaces. +# If set to NO (the default) these members will be included in the +# various overviews, but no documentation section is generated. +# This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_MEMBERS = NO + +# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all +# undocumented classes that are normally visible in the class hierarchy. +# If set to NO (the default) these classes will be included in the various +# overviews. This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_CLASSES = NO + +# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all +# friend (class|struct|union) declarations. +# If set to NO (the default) these declarations will be included in the +# documentation. + +HIDE_FRIEND_COMPOUNDS = NO + +# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any +# documentation blocks found inside the body of a function. +# If set to NO (the default) these blocks will be appended to the +# function's detailed documentation block. + +HIDE_IN_BODY_DOCS = NO + +# The INTERNAL_DOCS tag determines if documentation +# that is typed after a \internal command is included. If the tag is set +# to NO (the default) then the documentation will be excluded. +# Set it to YES to include the internal documentation. + +INTERNAL_DOCS = YES + +# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate +# file names in lower-case letters. If set to YES upper-case letters are also +# allowed. This is useful if you have classes or files whose names only differ +# in case and if your file system supports case sensitive file names. Windows +# and Mac users are advised to set this option to NO. + +CASE_SENSE_NAMES = YES + +# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen +# will show members with their full class and namespace scopes in the +# documentation. If set to YES the scope will be hidden. + +HIDE_SCOPE_NAMES = NO + +# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen +# will put a list of the files that are included by a file in the documentation +# of that file. + +SHOW_INCLUDE_FILES = YES + +# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] +# is inserted in the documentation for inline members. + +INLINE_INFO = YES + +# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen +# will sort the (detailed) documentation of file and class members +# alphabetically by member name. If set to NO the members will appear in +# declaration order. + +SORT_MEMBER_DOCS = YES + +# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the +# brief documentation of file, namespace and class members alphabetically +# by member name. If set to NO (the default) the members will appear in +# declaration order. + +SORT_BRIEF_DOCS = NO + +# If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen will sort the (brief and detailed) documentation of class members so that constructors and destructors are listed first. If set to NO (the default) the constructors will appear in the respective orders defined by SORT_MEMBER_DOCS and SORT_BRIEF_DOCS. This tag will be ignored for brief docs if SORT_BRIEF_DOCS is set to NO and ignored for detailed docs if SORT_MEMBER_DOCS is set to NO. + +SORT_MEMBERS_CTORS_1ST = NO + +# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the +# hierarchy of group names into alphabetical order. If set to NO (the default) +# the group names will appear in their defined order. + +SORT_GROUP_NAMES = NO + +# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be +# sorted by fully-qualified names, including namespaces. If set to +# NO (the default), the class list will be sorted only by class name, +# not including the namespace part. +# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. +# Note: This option applies only to the class list, not to the +# alphabetical list. + +SORT_BY_SCOPE_NAME = NO + +# The GENERATE_TODOLIST tag can be used to enable (YES) or +# disable (NO) the todo list. This list is created by putting \todo +# commands in the documentation. + +GENERATE_TODOLIST = YES + +# The GENERATE_TESTLIST tag can be used to enable (YES) or +# disable (NO) the test list. This list is created by putting \test +# commands in the documentation. + +GENERATE_TESTLIST = YES + +# The GENERATE_BUGLIST tag can be used to enable (YES) or +# disable (NO) the bug list. This list is created by putting \bug +# commands in the documentation. + +GENERATE_BUGLIST = YES + +# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or +# disable (NO) the deprecated list. This list is created by putting +# \deprecated commands in the documentation. + +GENERATE_DEPRECATEDLIST= YES + +# The ENABLED_SECTIONS tag can be used to enable conditional +# documentation sections, marked by \if sectionname ... \endif. + +ENABLED_SECTIONS = + +# The MAX_INITIALIZER_LINES tag determines the maximum number of lines +# the initial value of a variable or define consists of for it to appear in +# the documentation. If the initializer consists of more lines than specified +# here it will be hidden. Use a value of 0 to hide initializers completely. +# The appearance of the initializer of individual variables and defines in the +# documentation can be controlled using \showinitializer or \hideinitializer +# command in the documentation regardless of this setting. + +MAX_INITIALIZER_LINES = 30 + +# Set the SHOW_USED_FILES tag to NO to disable the list of files generated +# at the bottom of the documentation of classes and structs. If set to YES the +# list will mention the files that were used to generate the documentation. + +SHOW_USED_FILES = YES + +# If the sources in your project are distributed over multiple directories +# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy +# in the documentation. The default is NO. + +SHOW_DIRECTORIES = YES + +# Set the SHOW_FILES tag to NO to disable the generation of the Files page. +# This will remove the Files entry from the Quick Index and from the +# Folder Tree View (if specified). The default is YES. + +SHOW_FILES = YES + +# Set the SHOW_NAMESPACES tag to NO to disable the generation of the +# Namespaces page. +# This will remove the Namespaces entry from the Quick Index +# and from the Folder Tree View (if specified). The default is YES. + +SHOW_NAMESPACES = YES + +# The FILE_VERSION_FILTER tag can be used to specify a program or script that +# doxygen should invoke to get the current version for each file (typically from +# the version control system). Doxygen will invoke the program by executing (via +# popen()) the command , where is the value of +# the FILE_VERSION_FILTER tag, and is the name of an input file +# provided by doxygen. Whatever the program writes to standard output +# is used as the file version. See the manual for examples. + +FILE_VERSION_FILTER = + +# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed by +# doxygen. The layout file controls the global structure of the generated output files +# in an output format independent way. The create the layout file that represents +# doxygen's defaults, run doxygen with the -l option. You can optionally specify a +# file name after the option, if omitted DoxygenLayout.xml will be used as the name +# of the layout file. + +LAYOUT_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to warning and progress messages +#--------------------------------------------------------------------------- + +# The QUIET tag can be used to turn on/off the messages that are generated +# by doxygen. Possible values are YES and NO. If left blank NO is used. + +QUIET = NO + +# The WARNINGS tag can be used to turn on/off the warning messages that are +# generated by doxygen. Possible values are YES and NO. If left blank +# NO is used. + +WARNINGS = YES + +# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings +# for undocumented members. If EXTRACT_ALL is set to YES then this flag will +# automatically be disabled. + +WARN_IF_UNDOCUMENTED = YES + +# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for +# potential errors in the documentation, such as not documenting some +# parameters in a documented function, or documenting parameters that +# don't exist or using markup commands wrongly. + +WARN_IF_DOC_ERROR = YES + +# This WARN_NO_PARAMDOC option can be abled to get warnings for +# functions that are documented, but have no documentation for their parameters +# or return value. If set to NO (the default) doxygen will only warn about +# wrong or incomplete parameter documentation, but not about the absence of +# documentation. + +WARN_NO_PARAMDOC = NO + +# The WARN_FORMAT tag determines the format of the warning messages that +# doxygen can produce. The string should contain the $file, $line, and $text +# tags, which will be replaced by the file and line number from which the +# warning originated and the warning text. Optionally the format may contain +# $version, which will be replaced by the version of the file (if it could +# be obtained via FILE_VERSION_FILTER) + +WARN_FORMAT = "$file:$line: $text" + +# The WARN_LOGFILE tag can be used to specify a file to which warning +# and error messages should be written. If left blank the output is written +# to stderr. + +WARN_LOGFILE = doxygen.log + +#--------------------------------------------------------------------------- +# configuration options related to the input files +#--------------------------------------------------------------------------- + +# The INPUT tag can be used to specify the files and/or directories that contain +# documented source files. You may enter file names like "myfile.cpp" or +# directories like "/usr/src/myproject". Separate the files or directories +# with spaces. + +INPUT = bathy_smoother + +# This tag can be used to specify the character encoding of the source files +# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is +# also the default input encoding. Doxygen uses libiconv (or the iconv built +# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for +# the list of possible encodings. + +INPUT_ENCODING = UTF-8 + +# If the value of the INPUT tag contains directories, you can use the +# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank the following patterns are tested: +# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx +# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py *.f90 + +FILE_PATTERNS = + +# The RECURSIVE tag can be used to turn specify whether or not subdirectories +# should be searched for input files as well. Possible values are YES and NO. +# If left blank NO is used. + +RECURSIVE = YES + +# The EXCLUDE tag can be used to specify files and/or directories that should +# excluded from the INPUT source files. This way you can easily exclude a +# subdirectory from a directory tree whose root is specified with the INPUT tag. + +EXCLUDE = + +# The EXCLUDE_SYMLINKS tag can be used select whether or not files or +# directories that are symbolic links (a Unix filesystem feature) are excluded +# from the input. + +EXCLUDE_SYMLINKS = NO + +# If the value of the INPUT tag contains directories, you can use the +# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude +# certain files from those directories. Note that the wildcards are matched +# against the file with absolute path, so to exclude all test directories +# for example use the pattern */test/* + +EXCLUDE_PATTERNS = + +# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names +# (namespaces, classes, functions, etc.) that should be excluded from the +# output. The symbol name can be a fully qualified name, a word, or if the +# wildcard * is used, a substring. Examples: ANamespace, AClass, +# AClass::ANamespace, ANamespace::*Test + +EXCLUDE_SYMBOLS = + +# The EXAMPLE_PATH tag can be used to specify one or more files or +# directories that contain example code fragments that are included (see +# the \include command). + +EXAMPLE_PATH = + +# If the value of the EXAMPLE_PATH tag contains directories, you can use the +# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank all files are included. + +EXAMPLE_PATTERNS = + +# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be +# searched for input files to be used with the \include or \dontinclude +# commands irrespective of the value of the RECURSIVE tag. +# Possible values are YES and NO. If left blank NO is used. + +EXAMPLE_RECURSIVE = NO + +# The IMAGE_PATH tag can be used to specify one or more files or +# directories that contain image that are included in the documentation (see +# the \image command). + +IMAGE_PATH = + +# The INPUT_FILTER tag can be used to specify a program that doxygen should +# invoke to filter for each input file. Doxygen will invoke the filter program +# by executing (via popen()) the command , where +# is the value of the INPUT_FILTER tag, and is the name of an +# input file. Doxygen will then use the output that the filter program writes +# to standard output. +# If FILTER_PATTERNS is specified, this tag will be +# ignored. + +INPUT_FILTER = + +# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern +# basis. +# Doxygen will compare the file name with each pattern and apply the +# filter if there is a match. +# The filters are a list of the form: +# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further +# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER +# is applied to all files. + +FILTER_PATTERNS = + +# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using +# INPUT_FILTER) will be used to filter the input files when producing source +# files to browse (i.e. when SOURCE_BROWSER is set to YES). + +FILTER_SOURCE_FILES = NO + +#--------------------------------------------------------------------------- +# configuration options related to source browsing +#--------------------------------------------------------------------------- + +# If the SOURCE_BROWSER tag is set to YES then a list of source files will +# be generated. Documented entities will be cross-referenced with these sources. +# Note: To get rid of all source code in the generated output, make sure also +# VERBATIM_HEADERS is set to NO. + +SOURCE_BROWSER = YES + +# Setting the INLINE_SOURCES tag to YES will include the body +# of functions and classes directly in the documentation. + +INLINE_SOURCES = YES + +# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct +# doxygen to hide any special comment blocks from generated source code +# fragments. Normal C and C++ comments will always remain visible. + +STRIP_CODE_COMMENTS = NO + +# If the REFERENCED_BY_RELATION tag is set to YES +# then for each documented function all documented +# functions referencing it will be listed. + +REFERENCED_BY_RELATION = YES + +# If the REFERENCES_RELATION tag is set to YES +# then for each documented function all documented entities +# called/used by that function will be listed. + +REFERENCES_RELATION = YES + +# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) +# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from +# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will +# link to the source code. +# Otherwise they will link to the documentation. + +REFERENCES_LINK_SOURCE = YES + +# If the USE_HTAGS tag is set to YES then the references to source code +# will point to the HTML generated by the htags(1) tool instead of doxygen +# built-in source browser. The htags tool is part of GNU's global source +# tagging system (see http://www.gnu.org/software/global/global.html). You +# will need version 4.8.6 or higher. + +USE_HTAGS = NO + +# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen +# will generate a verbatim copy of the header file for each class for +# which an include is specified. Set to NO to disable this. + +VERBATIM_HEADERS = YES + +#--------------------------------------------------------------------------- +# configuration options related to the alphabetical class index +#--------------------------------------------------------------------------- + +# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index +# of all compounds will be generated. Enable this if the project +# contains a lot of classes, structs, unions or interfaces. + +ALPHABETICAL_INDEX = YES + +# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then +# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns +# in which this list will be split (can be a number in the range [1..20]) + +COLS_IN_ALPHA_INDEX = 5 + +# In case all classes in a project start with a common prefix, all +# classes will be put under the same header in the alphabetical index. +# The IGNORE_PREFIX tag can be used to specify one or more prefixes that +# should be ignored while generating the index headers. + +IGNORE_PREFIX = + +#--------------------------------------------------------------------------- +# configuration options related to the HTML output +#--------------------------------------------------------------------------- + +# If the GENERATE_HTML tag is set to YES (the default) Doxygen will +# generate HTML output. + +GENERATE_HTML = YES + +# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `html' will be used as the default path. + +HTML_OUTPUT = html + +# The HTML_FILE_EXTENSION tag can be used to specify the file extension for +# each generated HTML page (for example: .htm,.php,.asp). If it is left blank +# doxygen will generate files with .html extension. + +HTML_FILE_EXTENSION = .html + +# The HTML_HEADER tag can be used to specify a personal HTML header for +# each generated HTML page. If it is left blank doxygen will generate a +# standard header. + +HTML_HEADER = + +# The HTML_FOOTER tag can be used to specify a personal HTML footer for +# each generated HTML page. If it is left blank doxygen will generate a +# standard footer. + +HTML_FOOTER = + +# If the HTML_TIMESTAMP tag is set to YES then the generated HTML +# documentation will contain the timesstamp. + +HTML_TIMESTAMP = NO + +# The HTML_STYLESHEET tag can be used to specify a user-defined cascading +# style sheet that is used by each HTML page. It can be used to +# fine-tune the look of the HTML output. If the tag is left blank doxygen +# will generate a default style sheet. Note that doxygen will try to copy +# the style sheet file to the HTML output directory, so don't put your own +# stylesheet in the HTML output directory as well, or it will be erased! + +HTML_STYLESHEET = + +# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, +# files or namespaces will be aligned in HTML using tables. If set to +# NO a bullet list will be used. + +HTML_ALIGN_MEMBERS = YES + +# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML +# documentation will contain sections that can be hidden and shown after the +# page has loaded. For this to work a browser that supports +# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox +# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari). + +HTML_DYNAMIC_SECTIONS = NO + +# If the GENERATE_DOCSET tag is set to YES, additional index files +# will be generated that can be used as input for Apple's Xcode 3 +# integrated development environment, introduced with OSX 10.5 (Leopard). +# To create a documentation set, doxygen will generate a Makefile in the +# HTML output directory. Running make will produce the docset in that +# directory and running "make install" will install the docset in +# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find +# it at startup. +# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html for more information. + +GENERATE_DOCSET = NO + +# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the +# feed. A documentation feed provides an umbrella under which multiple +# documentation sets from a single provider (such as a company or product suite) +# can be grouped. + +DOCSET_FEEDNAME = "Doxygen generated docs" + +# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that +# should uniquely identify the documentation set bundle. This should be a +# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen +# will append .docset to the name. + +DOCSET_BUNDLE_ID = org.doxygen.Project + +# If the GENERATE_HTMLHELP tag is set to YES, additional index files +# will be generated that can be used as input for tools like the +# Microsoft HTML help workshop to generate a compiled HTML help file (.chm) +# of the generated HTML documentation. + +GENERATE_HTMLHELP = NO + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can +# be used to specify the file name of the resulting .chm file. You +# can add a path in front of the file if the result should not be +# written to the html output directory. + +CHM_FILE = + +# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can +# be used to specify the location (absolute path including file name) of +# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run +# the HTML help compiler on the generated index.hhp. + +HHC_LOCATION = + +# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag +# controls if a separate .chi index file is generated (YES) or that +# it should be included in the master .chm file (NO). + +GENERATE_CHI = NO + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING +# is used to encode HtmlHelp index (hhk), content (hhc) and project file +# content. + +CHM_INDEX_ENCODING = + +# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag +# controls whether a binary table of contents is generated (YES) or a +# normal table of contents (NO) in the .chm file. + +BINARY_TOC = NO + +# The TOC_EXPAND flag can be set to YES to add extra items for group members +# to the contents of the HTML help documentation and to the tree view. + +TOC_EXPAND = NO + +# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and QHP_VIRTUAL_FOLDER +# are set, an additional index file will be generated that can be used as input for +# Qt's qhelpgenerator to generate a Qt Compressed Help (.qch) of the generated +# HTML documentation. + +GENERATE_QHP = NO + +# If the QHG_LOCATION tag is specified, the QCH_FILE tag can +# be used to specify the file name of the resulting .qch file. +# The path specified is relative to the HTML output folder. + +QCH_FILE = + +# The QHP_NAMESPACE tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#namespace + +QHP_NAMESPACE = + +# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#virtual-folders + +QHP_VIRTUAL_FOLDER = doc + +# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to add. +# For more information please see +# http://doc.trolltech.com/qthelpproject.html#custom-filters + +QHP_CUST_FILTER_NAME = + +# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the custom filter to add.For more information please see +# Qt Help Project / Custom Filters. + +QHP_CUST_FILTER_ATTRS = + +# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this project's +# filter section matches. +# Qt Help Project / Filter Attributes. + +QHP_SECT_FILTER_ATTRS = + +# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can +# be used to specify the location of Qt's qhelpgenerator. +# If non-empty doxygen will try to run qhelpgenerator on the generated +# .qhp file. + +QHG_LOCATION = + +# The DISABLE_INDEX tag can be used to turn on/off the condensed index at +# top of each HTML page. The value NO (the default) enables the index and +# the value YES disables it. + +DISABLE_INDEX = NO + +# This tag can be used to set the number of enum values (range [1..20]) +# that doxygen will group on one line in the generated HTML documentation. + +ENUM_VALUES_PER_LINE = 4 + +# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index +# structure should be generated to display hierarchical information. +# If the tag value is set to YES, a side panel will be generated +# containing a tree-like index structure (just like the one that +# is generated for HTML Help). For this to work a browser that supports +# JavaScript, DHTML, CSS and frames is required (i.e. any modern browser). +# Windows users are probably better off using the HTML help feature. + +GENERATE_TREEVIEW = YES + +# By enabling USE_INLINE_TREES, doxygen will generate the Groups, Directories, +# and Class Hierarchy pages using a tree view instead of an ordered list. + +USE_INLINE_TREES = NO + +# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be +# used to set the initial width (in pixels) of the frame in which the tree +# is shown. + +TREEVIEW_WIDTH = 250 + +# Use this tag to change the font size of Latex formulas included +# as images in the HTML documentation. The default is 10. Note that +# when you change the font size after a successful doxygen run you need +# to manually remove any form_*.png images from the HTML output directory +# to force them to be regenerated. + +FORMULA_FONTSIZE = 10 + +# When the SEARCHENGINE tag is enable doxygen will generate a search box for the HTML output. The underlying search engine uses javascript +# and DHTML and should work on any modern browser. Note that when using HTML help (GENERATE_HTMLHELP) or Qt help (GENERATE_QHP) +# there is already a search function so this one should typically +# be disabled. + +SEARCHENGINE = YES + +#--------------------------------------------------------------------------- +# configuration options related to the LaTeX output +#--------------------------------------------------------------------------- + +# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will +# generate Latex output. + +GENERATE_LATEX = NO + +# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `latex' will be used as the default path. + +LATEX_OUTPUT = latex + +# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be +# invoked. If left blank `latex' will be used as the default command name. + +LATEX_CMD_NAME = latex + +# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to +# generate index for LaTeX. If left blank `makeindex' will be used as the +# default command name. + +MAKEINDEX_CMD_NAME = makeindex + +# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact +# LaTeX documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_LATEX = NO + +# The PAPER_TYPE tag can be used to set the paper type that is used +# by the printer. Possible values are: a4, a4wide, letter, legal and +# executive. If left blank a4wide will be used. + +PAPER_TYPE = letter + +# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX +# packages that should be included in the LaTeX output. + +EXTRA_PACKAGES = + +# The LATEX_HEADER tag can be used to specify a personal LaTeX header for +# the generated latex document. The header should contain everything until +# the first chapter. If it is left blank doxygen will generate a +# standard header. Notice: only use this tag if you know what you are doing! + +LATEX_HEADER = + +# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated +# is prepared for conversion to pdf (using ps2pdf). The pdf file will +# contain links (just like the HTML output) instead of page references +# This makes the output suitable for online browsing using a pdf viewer. + +PDF_HYPERLINKS = YES + +# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of +# plain latex in the generated Makefile. Set this option to YES to get a +# higher quality PDF documentation. + +USE_PDFLATEX = YES + +# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. +# command to the generated LaTeX files. This will instruct LaTeX to keep +# running if errors occur, instead of asking the user for help. +# This option is also used when generating formulas in HTML. + +LATEX_BATCHMODE = NO + +# If LATEX_HIDE_INDICES is set to YES then doxygen will not +# include the index chapters (such as File Index, Compound Index, etc.) +# in the output. + +LATEX_HIDE_INDICES = NO + +# If LATEX_SOURCE_CODE is set to YES then doxygen will include source code with syntax highlighting in the LaTeX output. Note that which sources are shown also depends on other settings such as SOURCE_BROWSER. + +LATEX_SOURCE_CODE = NO + +#--------------------------------------------------------------------------- +# configuration options related to the RTF output +#--------------------------------------------------------------------------- + +# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output +# The RTF output is optimized for Word 97 and may not look very pretty with +# other RTF readers or editors. + +GENERATE_RTF = NO + +# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `rtf' will be used as the default path. + +RTF_OUTPUT = rtf + +# If the COMPACT_RTF tag is set to YES Doxygen generates more compact +# RTF documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_RTF = NO + +# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated +# will contain hyperlink fields. The RTF file will +# contain links (just like the HTML output) instead of page references. +# This makes the output suitable for online browsing using WORD or other +# programs which support those fields. +# Note: wordpad (write) and others do not support links. + +RTF_HYPERLINKS = NO + +# Load stylesheet definitions from file. Syntax is similar to doxygen's +# config file, i.e. a series of assignments. You only have to provide +# replacements, missing definitions are set to their default value. + +RTF_STYLESHEET_FILE = + +# Set optional variables used in the generation of an rtf document. +# Syntax is similar to doxygen's config file. + +RTF_EXTENSIONS_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to the man page output +#--------------------------------------------------------------------------- + +# If the GENERATE_MAN tag is set to YES (the default) Doxygen will +# generate man pages + +GENERATE_MAN = NO + +# The MAN_OUTPUT tag is used to specify where the man pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `man' will be used as the default path. + +MAN_OUTPUT = man + +# The MAN_EXTENSION tag determines the extension that is added to +# the generated man pages (default is the subroutine's section .3) + +MAN_EXTENSION = .3 + +# If the MAN_LINKS tag is set to YES and Doxygen generates man output, +# then it will generate one additional man file for each entity +# documented in the real man page(s). These additional files +# only source the real man page, but without them the man command +# would be unable to find the correct page. The default is NO. + +MAN_LINKS = NO + +#--------------------------------------------------------------------------- +# configuration options related to the XML output +#--------------------------------------------------------------------------- + +# If the GENERATE_XML tag is set to YES Doxygen will +# generate an XML file that captures the structure of +# the code including all documentation. + +GENERATE_XML = NO + +# The XML_OUTPUT tag is used to specify where the XML pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `xml' will be used as the default path. + +XML_OUTPUT = xml + +# The XML_SCHEMA tag can be used to specify an XML schema, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_SCHEMA = + +# The XML_DTD tag can be used to specify an XML DTD, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_DTD = + +# If the XML_PROGRAMLISTING tag is set to YES Doxygen will +# dump the program listings (including syntax highlighting +# and cross-referencing information) to the XML output. Note that +# enabling this will significantly increase the size of the XML output. + +XML_PROGRAMLISTING = YES + +#--------------------------------------------------------------------------- +# configuration options for the AutoGen Definitions output +#--------------------------------------------------------------------------- + +# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will +# generate an AutoGen Definitions (see autogen.sf.net) file +# that captures the structure of the code including all +# documentation. Note that this feature is still experimental +# and incomplete at the moment. + +GENERATE_AUTOGEN_DEF = NO + +#--------------------------------------------------------------------------- +# configuration options related to the Perl module output +#--------------------------------------------------------------------------- + +# If the GENERATE_PERLMOD tag is set to YES Doxygen will +# generate a Perl module file that captures the structure of +# the code including all documentation. Note that this +# feature is still experimental and incomplete at the +# moment. + +GENERATE_PERLMOD = NO + +# If the PERLMOD_LATEX tag is set to YES Doxygen will generate +# the necessary Makefile rules, Perl scripts and LaTeX code to be able +# to generate PDF and DVI output from the Perl module output. + +PERLMOD_LATEX = NO + +# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be +# nicely formatted so it can be parsed by a human reader. +# This is useful +# if you want to understand what is going on. +# On the other hand, if this +# tag is set to NO the size of the Perl module output will be much smaller +# and Perl will parse it just the same. + +PERLMOD_PRETTY = YES + +# The names of the make variables in the generated doxyrules.make file +# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. +# This is useful so different doxyrules.make files included by the same +# Makefile don't overwrite each other's variables. + +PERLMOD_MAKEVAR_PREFIX = + +#--------------------------------------------------------------------------- +# Configuration options related to the preprocessor +#--------------------------------------------------------------------------- + +# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will +# evaluate all C-preprocessor directives found in the sources and include +# files. + +ENABLE_PREPROCESSING = YES + +# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro +# names in the source code. If set to NO (the default) only conditional +# compilation will be performed. Macro expansion can be done in a controlled +# way by setting EXPAND_ONLY_PREDEF to YES. + +MACRO_EXPANSION = NO + +# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES +# then the macro expansion is limited to the macros specified with the +# PREDEFINED and EXPAND_AS_DEFINED tags. + +EXPAND_ONLY_PREDEF = NO + +# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files +# in the INCLUDE_PATH (see below) will be search if a #include is found. + +SEARCH_INCLUDES = YES + +# The INCLUDE_PATH tag can be used to specify one or more directories that +# contain include files that are not input files but should be processed by +# the preprocessor. + +INCLUDE_PATH = + +# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard +# patterns (like *.h and *.hpp) to filter out the header-files in the +# directories. If left blank, the patterns specified with FILE_PATTERNS will +# be used. + +INCLUDE_FILE_PATTERNS = + +# The PREDEFINED tag can be used to specify one or more macro names that +# are defined before the preprocessor is started (similar to the -D option of +# gcc). The argument of the tag is a list of macros of the form: name +# or name=definition (no spaces). If the definition and the = are +# omitted =1 is assumed. To prevent a macro definition from being +# undefined via #undef or recursively expanded use the := operator +# instead of the = operator. + +PREDEFINED = + +# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then +# this tag can be used to specify a list of macro names that should be expanded. +# The macro definition that is found in the sources will be used. +# Use the PREDEFINED tag if you want to use a different macro definition. + +EXPAND_AS_DEFINED = + +# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then +# doxygen's preprocessor will remove all function-like macros that are alone +# on a line, have an all uppercase name, and do not end with a semicolon. Such +# function macros are typically used for boiler-plate code, and will confuse +# the parser if not removed. + +SKIP_FUNCTION_MACROS = YES + +#--------------------------------------------------------------------------- +# Configuration::additions related to external references +#--------------------------------------------------------------------------- + +# The TAGFILES option can be used to specify one or more tagfiles. +# Optionally an initial location of the external documentation +# can be added for each tagfile. The format of a tag file without +# this location is as follows: +# +# TAGFILES = file1 file2 ... +# Adding location for the tag files is done as follows: +# +# TAGFILES = file1=loc1 "file2 = loc2" ... +# where "loc1" and "loc2" can be relative or absolute paths or +# URLs. If a location is present for each tag, the installdox tool +# does not have to be run to correct the links. +# Note that each tag file must have a unique name +# (where the name does NOT include the path) +# If a tag file is not located in the directory in which doxygen +# is run, you must also specify the path to the tagfile here. + +TAGFILES = + +# When a file name is specified after GENERATE_TAGFILE, doxygen will create +# a tag file that is based on the input files it reads. + +GENERATE_TAGFILE = + +# If the ALLEXTERNALS tag is set to YES all external classes will be listed +# in the class index. If set to NO only the inherited external classes +# will be listed. + +ALLEXTERNALS = NO + +# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed +# in the modules index. If set to NO, only the current project's groups will +# be listed. + +EXTERNAL_GROUPS = YES + +# The PERL_PATH should be the absolute path and name of the perl script +# interpreter (i.e. the result of `which perl'). + +PERL_PATH = /usr/bin/perl + +#--------------------------------------------------------------------------- +# Configuration options related to the dot tool +#--------------------------------------------------------------------------- + +# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will +# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base +# or super classes. Setting the tag to NO turns the diagrams off. Note that +# this option is superseded by the HAVE_DOT option below. This is only a +# fallback. It is recommended to install and use dot, since it yields more +# powerful graphs. + +CLASS_DIAGRAMS = YES + +# You can define message sequence charts within doxygen comments using the \msc +# command. Doxygen will then run the mscgen tool (see +# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the +# documentation. The MSCGEN_PATH tag allows you to specify the directory where +# the mscgen tool resides. If left empty the tool is assumed to be found in the +# default search path. + +MSCGEN_PATH = + +# If set to YES, the inheritance and collaboration graphs will hide +# inheritance and usage relations if the target is undocumented +# or is not a class. + +HIDE_UNDOC_RELATIONS = YES + +# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is +# available from the path. This tool is part of Graphviz, a graph visualization +# toolkit from AT&T and Lucent Bell Labs. The other options in this section +# have no effect if this option is set to NO (the default) + +HAVE_DOT = YES + +# By default doxygen will write a font called FreeSans.ttf to the output +# directory and reference it in all dot files that doxygen generates. This +# font does not include all possible unicode characters however, so when you need +# these (or just want a differently looking font) you can specify the font name +# using DOT_FONTNAME. You need need to make sure dot is able to find the font, +# which can be done by putting it in a standard location or by setting the +# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory +# containing the font. + +DOT_FONTNAME = FreeSans + +# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. +# The default size is 10pt. + +DOT_FONTSIZE = 10 + +# By default doxygen will tell dot to use the output directory to look for the +# FreeSans.ttf font (which doxygen will put there itself). If you specify a +# different font using DOT_FONTNAME you can set the path where dot +# can find it using this tag. + +DOT_FONTPATH = + +# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect inheritance relations. Setting this tag to YES will force the +# the CLASS_DIAGRAMS tag to NO. + +CLASS_GRAPH = YES + +# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect implementation dependencies (inheritance, containment, and +# class references variables) of the class with other documented classes. + +COLLABORATION_GRAPH = YES + +# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for groups, showing the direct groups dependencies + +GROUP_GRAPHS = YES + +# If the UML_LOOK tag is set to YES doxygen will generate inheritance and +# collaboration diagrams in a style similar to the OMG's Unified Modeling +# Language. + +UML_LOOK = NO + +# If set to YES, the inheritance and collaboration graphs will show the +# relations between templates and their instances. + +TEMPLATE_RELATIONS = NO + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT +# tags are set to YES then doxygen will generate a graph for each documented +# file showing the direct and indirect include dependencies of the file with +# other documented files. + +INCLUDE_GRAPH = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and +# HAVE_DOT tags are set to YES then doxygen will generate a graph for each +# documented header file showing the documented files that directly or +# indirectly include this file. + +INCLUDED_BY_GRAPH = YES + +# If the CALL_GRAPH and HAVE_DOT options are set to YES then +# doxygen will generate a call dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable call graphs +# for selected functions only using the \callgraph command. + +CALL_GRAPH = NO + +# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then +# doxygen will generate a caller dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable caller +# graphs for selected functions only using the \callergraph command. + +CALLER_GRAPH = NO + +# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen +# will graphical hierarchy of all classes instead of a textual one. + +GRAPHICAL_HIERARCHY = YES + +# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES +# then doxygen will show the dependencies a directory has on other directories +# in a graphical way. The dependency relations are determined by the #include +# relations between the files in the directories. + +DIRECTORY_GRAPH = YES + +# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images +# generated by dot. Possible values are png, jpg, or gif +# If left blank png will be used. + +DOT_IMAGE_FORMAT = png + +# The tag DOT_PATH can be used to specify the path where the dot tool can be +# found. If left blank, it is assumed the dot tool can be found in the path. + +DOT_PATH = + +# The DOTFILE_DIRS tag can be used to specify one or more directories that +# contain dot files that are included in the documentation (see the +# \dotfile command). + +DOTFILE_DIRS = + +# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of +# nodes that will be shown in the graph. If the number of nodes in a graph +# becomes larger than this value, doxygen will truncate the graph, which is +# visualized by representing a node as a red box. Note that doxygen if the +# number of direct children of the root node in a graph is already larger than +# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note +# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. + +DOT_GRAPH_MAX_NODES = 50 + +# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the +# graphs generated by dot. A depth value of 3 means that only nodes reachable +# from the root by following a path via at most 3 edges will be shown. Nodes +# that lay further from the root node will be omitted. Note that setting this +# option to 1 or 2 may greatly reduce the computation time needed for large +# code bases. Also note that the size of a graph can be further restricted by +# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. + +MAX_DOT_GRAPH_DEPTH = 0 + +# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent +# background. This is disabled by default, because dot on Windows does not +# seem to support this out of the box. Warning: Depending on the platform used, +# enabling this option may lead to badly anti-aliased labels on the edges of +# a graph (i.e. they become hard to read). + +DOT_TRANSPARENT = NO + +# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output +# files in one run (i.e. multiple -o and -T options on the command line). This +# makes dot run faster, but since only newer versions of dot (>1.8.10) +# support this, this feature is disabled by default. + +DOT_MULTI_TARGETS = NO + +# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will +# generate a legend page explaining the meaning of the various boxes and +# arrows in the dot generated graphs. + +GENERATE_LEGEND = YES + +# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will +# remove the intermediate dot files that are used to generate +# the various graphs. + +DOT_CLEANUP = YES diff --git a/bathy_smoother/bathy_smoother/LP_bathy_smoothing.py b/bathy_smoother/bathy_smoother/LP_bathy_smoothing.py index e974233..d18c745 100644 --- a/bathy_smoother/bathy_smoother/LP_bathy_smoothing.py +++ b/bathy_smoother/bathy_smoother/LP_bathy_smoothing.py @@ -62,7 +62,7 @@ def LP_smoothing_rx0(MSK, Hobs, rx0max, SignConst, AmpConst): ValueFct, ValueVar, testfeasibility = LP_tools.SolveLinearProgram(iList, jList, sList, Constant, ObjectiveFct) if (testfeasibility == 0): NewBathy = NaN * np.ones((eta_rho,xi_rho)) - raise ValueError, 'Feasibility test failed. testfeasibility = 0.' + raise ValueError('Feasibility test failed. testfeasibility = 0.') correctionBathy = np.zeros((eta_rho,xi_rho)) nbVert = 0 @@ -75,7 +75,7 @@ def LP_smoothing_rx0(MSK, Hobs, rx0max, SignConst, AmpConst): NewBathy = Hobs + correctionBathy RMat = bathy_tools.RoughnessMatrix(NewBathy, MSK) MaxRx0 = RMat.max() - print 'rx0max = ', rx0max, ' MaxRx0 = ', MaxRx0 + print('rx0max = ', rx0max, ' MaxRx0 = ', MaxRx0) return NewBathy @@ -124,7 +124,7 @@ def LP_smoothing_rx0_heuristic(MSK, Hobs, rx0max, SignConst, AmpConst): Kbad = np.where(MSKbad == 1) nbKbad = np.size(Kbad,1) ListIdx = np.zeros((eta_rho,xi_rho), dtype=np.int) - ListIdx[Kbad] = range(nbKbad) + ListIdx[Kbad] = list(range(nbKbad)) ListEdges = [] nbEdge = 0 @@ -146,11 +146,11 @@ def LP_smoothing_rx0_heuristic(MSK, Hobs, rx0max, SignConst, AmpConst): NewBathy = Hobs.copy() for iColor in range(1,nbColor+1): - print '---------------------------------------------------------------' + print('---------------------------------------------------------------') MSKcolor = np.zeros((eta_rho, xi_rho)) K = np.where(ListVertexStatus == iColor) nbK = np.size(K,1) - print 'iColor = ', iColor, ' nbK = ', nbK + print('iColor = ', iColor, ' nbK = ', nbK) for iVertex in range(nbKbad): if (ListVertexStatus[iVertex,0] == iColor): iEta, iXi = Kbad[0][iVertex], Kbad[1][iVertex] @@ -166,10 +166,10 @@ def LP_smoothing_rx0_heuristic(MSK, Hobs, rx0max, SignConst, AmpConst): TheNewBathy = LP_smoothing_rx0(MSKcolor, MSKHobs, rx0max, SignConst, AmpConst) NewBathy[K] = TheNewBathy[K].copy() - print 'Final obtained bathymetry' + print('Final obtained bathymetry') RMat = bathy_tools.RoughnessMatrix(NewBathy, MSK) MaxRx0 = RMat.max() - print 'rx0max = ', rx0max, ' MaxRx0 = ', MaxRx0 + print('rx0max = ', rx0max, ' MaxRx0 = ', MaxRx0) return NewBathy diff --git a/bathy_smoother/bathy_smoother/LP_bathy_tools.py b/bathy_smoother/bathy_smoother/LP_bathy_tools.py index 15b69a2..1fbdbff 100644 --- a/bathy_smoother/bathy_smoother/LP_bathy_tools.py +++ b/bathy_smoother/bathy_smoother/LP_bathy_tools.py @@ -12,7 +12,7 @@ def GetIJS_rx0(MSK, DEP, r): eta_rho, xi_rho = DEP.shape - print 'eta_rho = ', eta_rho, ' xi_rho = ', xi_rho + print('eta_rho = ', eta_rho, ' xi_rho = ', xi_rho) nbVert = 0 ListCoord = np.zeros((eta_rho, xi_rho)) @@ -23,8 +23,8 @@ def GetIJS_rx0(MSK, DEP, r): ListCoord[iEta,iXi] = nbVert TotalNbVert = nbVert - print 'ListCoord built' - print 'Computing inequalities for r = ', r + print('ListCoord built') + print('Computing inequalities for r = ', r) TotalNbConstant = 0 TotalNbEntry = 0 @@ -80,7 +80,7 @@ def GetIJS_rx0(MSK, DEP, r): sList[nbEntry,0] = 1-r nbEntry = nbEntry + 1 - print 'Inequalities for dh(iEta,iXi) and dh(iEta+1,iXi)' + print('Inequalities for dh(iEta,iXi) and dh(iEta+1,iXi)') for iEta in range(eta_rho): for iXi in range(xi_rho-1): @@ -112,7 +112,7 @@ def GetIJS_rx0(MSK, DEP, r): sList[nbEntry,0] = 1-r nbEntry = nbEntry + 1 - print 'Inequalities for dh(iEta,iXi) and dh(iEta,iXi+1)' + print('Inequalities for dh(iEta,iXi) and dh(iEta,iXi+1)') for iEta in range(eta_rho): for iXi in range(xi_rho): @@ -141,16 +141,16 @@ def GetIJS_rx0(MSK, DEP, r): sList[nbEntry,0] = -1 nbEntry = nbEntry + 1 - print 'Inequalities dh <= ad and -dh <= ad' + print('Inequalities dh <= ad and -dh <= ad') - print 'rx0: nbEntry = ', nbEntry, ' nbConst = ', nbConst - print ' ' + print('rx0: nbEntry = ', nbEntry, ' nbConst = ', nbConst) + print(' ') if (abs(nbEntry - TotalNbEntry) > 0): - raise ValueError, 'We have a coding inconsistency for nbEntry. Please correct' + raise ValueError('We have a coding inconsistency for nbEntry. Please correct') if (abs(nbConst - TotalNbConstant) > 0): - raise ValueError, 'We have a coding inconsistency for nbConst. Please correct' + raise ValueError('We have a coding inconsistency for nbConst. Please correct') return iList, jList, sList, Constant @@ -161,7 +161,7 @@ def GetIJS_maxamp(MSK, DEP, AmpConst): eta_rho, xi_rho = DEP.shape - print 'eta_rho = ', eta_rho, ' xi_rho = ', xi_rho + print('eta_rho = ', eta_rho, ' xi_rho = ', xi_rho) nbVert = 0 ListCoord = np.zeros((eta_rho, xi_rho)) @@ -178,8 +178,8 @@ def GetIJS_maxamp(MSK, DEP, AmpConst): if (MSK[iEta,iXi] == 1): alpha = AmpConst[iEta,iXi] if (alpha < 9999): - TotalNbConstant = TotalNbConstant + 2 - TotalNbEntry = TotalNbEntry + 2 + TotalNbConstant = TotalNbConstant + 2 + TotalNbEntry = TotalNbEntry + 2 nbConst = 0 nbEntry = 0 @@ -195,29 +195,29 @@ def GetIJS_maxamp(MSK, DEP, AmpConst): alpha = AmpConst[iEta,iXi] if (alpha < 9999): - Constant[nbConst,0] = alpha * DEP[iEta,iXi] - iList[nbEntry,0] = nbConst + 1 - jList[nbEntry,0] = idx - sList[nbEntry,0] = -1 - nbConst = nbConst + 1 + Constant[nbConst,0] = alpha * DEP[iEta,iXi] + iList[nbEntry,0] = nbConst + 1 + jList[nbEntry,0] = idx + sList[nbEntry,0] = -1 + nbConst = nbConst + 1 nbEntry = nbEntry + 1 - Constant[nbConst,0] = alpha * DEP[iEta,iXi] - iList[nbEntry,0] = nbConst + 1 - jList[nbEntry,0] = idx - sList[nbEntry,0] = 1 - nbConst = nbConst + 1 - nbEntry = nbEntry + 1 + Constant[nbConst,0] = alpha * DEP[iEta,iXi] + iList[nbEntry,0] = nbConst + 1 + jList[nbEntry,0] = idx + sList[nbEntry,0] = 1 + nbConst = nbConst + 1 + nbEntry = nbEntry + 1 - print 'Inequalities |h^{new} - h^{old}| <= alpha h^{old}' - print 'maxamp: nbEntry = ', nbEntry, ' nbConst = ', nbConst - print ' ' + print('Inequalities |h^{new} - h^{old}| <= alpha h^{old}') + print('maxamp: nbEntry = ', nbEntry, ' nbConst = ', nbConst) + print(' ') if (abs(nbEntry - TotalNbEntry) > 0): - raise ValueError, 'We have a coding inconsistency for nbEntry. Please correct' + raise ValueError('We have a coding inconsistency for nbEntry. Please correct') if (abs(nbConst - TotalNbConstant) > 0): - raise ValueError, 'We have a coding inconsistency for nbConst. Please correct' + raise ValueError('We have a coding inconsistency for nbConst. Please correct') return iList, jList, sList, Constant @@ -227,7 +227,7 @@ def GetIJS_maxamp(MSK, DEP, AmpConst): def GetIJS_signs(MSK, SignConst): eta_rho, xi_rho = MSK.shape - print 'eta_rho = ', eta_rho, ' xi_rho = ', xi_rho + print('eta_rho = ', eta_rho, ' xi_rho = ', xi_rho) nbVert = 0 ListCoord = np.zeros((eta_rho, xi_rho)) @@ -242,8 +242,8 @@ def GetIJS_signs(MSK, SignConst): for iEta in range(eta_rho): for iXi in range(xi_rho): if (MSK[iEta,iXi] == 1 and SignConst[iEta,iXi] != 0): - TotalNbConstant = TotalNbConstant + 1 - TotalNbEntry = TotalNbEntry + 1 + TotalNbConstant = TotalNbConstant + 1 + TotalNbEntry = TotalNbEntry + 1 nbConst = 0 nbEntry = 0 @@ -267,18 +267,18 @@ def GetIJS_signs(MSK, SignConst): elif (SignConst[iEta, iXi] == -1): sList[nbEntry,0] = 1 else: - raise ValueError, 'Wrong assigning please check SignConst' + raise ValueError('Wrong assigning please check SignConst') nbEntry = nbEntry + 1 - print 'Inequalities dh >= 0 or dh <= 0' - print 'signs: nbEntry = ', nbEntry, ' nbConst = ', nbConst - print ' ' + print('Inequalities dh >= 0 or dh <= 0') + print('signs: nbEntry = ', nbEntry, ' nbConst = ', nbConst) + print(' ') if (abs(nbEntry - TotalNbEntry) > 0): - raise ValueError, 'We have a coding inconsistency for nbEntry. Please correct' + raise ValueError('We have a coding inconsistency for nbEntry. Please correct') if (abs(nbConst - TotalNbConstant) > 0): - raise ValueError, 'We have a coding inconsistency for nbConst. Please correct' + raise ValueError('We have a coding inconsistency for nbConst. Please correct') return iList, jList, sList, Constant @@ -348,9 +348,9 @@ def Neighborhood(MSK, iEta, iXi, Kdist): nbPt = 1 List4dir = np.array([[1, 0], - [0, 1], - [-1, 0], - [0, -1]]) + [0, 1], + [-1, 0], + [0, -1]]) for iK in range(1,Kdist+1): nbPtOld = nbPt @@ -359,7 +359,7 @@ def Neighborhood(MSK, iEta, iXi, Kdist): iEta = ListNeigh[iPt,0] iXi = ListNeigh[iPt,1] for ineigh in range(4): - iEtaN = iEta + List4dir[ineigh,0] + iEtaN = iEta + List4dir[ineigh,0] iXiN = iXi + List4dir[ineigh,1] if (iEtaN <= eta_rho-1 and iEtaN >= 0 and iXiN <= xi_rho-1 \ and iXiN >= 0 and MSK[iEtaN,iXiN] == 1): @@ -399,7 +399,7 @@ def ConnectedComponent(ListEdges, nbVert): ListAdjacency[eVert,eDeg-1] = fVert ListAdjacency[fVert,fDeg-1] = eVert - + MaxDeg = ListDegree.max() ListAdjacency = ListAdjacency[:,:MaxDeg] @@ -424,7 +424,7 @@ def ConnectedComponent(ListEdges, nbVert): for iH in range(np.size(H, 1)): eVert = H[0][iH] for iV in range(ListDegree[eVert, 0]): - ListNewHot[ListAdjacency[eVert, iV],0] = 1 + ListNewHot[ListAdjacency[eVert, iV],0] = 1 ListHot = ListNotDone * ListNewHot SumH = sum(ListHot) diff --git a/bathy_smoother/bathy_smoother/LP_tools.py b/bathy_smoother/bathy_smoother/LP_tools.py index 4cd41e8..2217bf4 100644 --- a/bathy_smoother/bathy_smoother/LP_tools.py +++ b/bathy_smoother/bathy_smoother/LP_tools.py @@ -5,8 +5,8 @@ try: from lpsolve55 import * except: - print 'lpsolve55.so not found.' - print 'Linear programming method will not be available.' + print('lpsolve55.so not found.') + print('Linear programming method will not be available.') # This code is adapted from the matlab code @@ -24,9 +24,9 @@ def WriteLinearProgram(FileName, iList, jList, sList, Constant, ObjectiveFct): nbVar = ObjectiveFct.shape[0] nbConst = Constant.shape[0] - print 'Write linear program' - print 'nbvar = ', nbVar, ' nbConst = ', nbConst - print ' ' + print('Write linear program') + print('nbvar = ', nbVar, ' nbConst = ', nbConst) + print(' ') f = open(FileName,'w') @@ -62,7 +62,7 @@ def WriteLinearProgram(FileName, iList, jList, sList, Constant, ObjectiveFct): if (Constant[iConst,0] < -tolCrit): testfeasibility = 0 return testfeasibility - + else: string = 'row%s: ' %str(iConst+1) f.write(string) @@ -71,9 +71,9 @@ def WriteLinearProgram(FileName, iList, jList, sList, Constant, ObjectiveFct): sL = sList[H[iH],0] string = '%.2f X%d ' %(sL, jL) if (sL > 0): - add='+' + add='+' else: - add='' + add='' string = '%s%s' %(add, string) f.write(string) @@ -105,7 +105,7 @@ def SolveLinearProgram(iList, jList, sList, Constant, ObjectiveFct): nbVar = ObjectiveFct.shape[0] nbConstraint = Constant.shape[0] - print 'Solving a linear program of ', nbVar, ' variables and ', nbConstraint, ' Constraints' + print('Solving a linear program of ', nbVar, ' variables and ', nbConstraint, ' Constraints') while(1): H = localtime() @@ -120,20 +120,20 @@ def SolveLinearProgram(iList, jList, sList, Constant, ObjectiveFct): if (os.path.exists(FileInput) is False and os.path.exists(FileOutput) is False): break - print 'We failed with FileInput = ', FileInput + print('We failed with FileInput = ', FileInput) testfeasibility = WriteLinearProgram(FileInput, iList, jList, sList, Constant, ObjectiveFct) if (testfeasibility == 0): - raise ValueError, 'Feasibility test failed. testfeasibility = 0.' + raise ValueError('Feasibility test failed. testfeasibility = 0.') - print 'Linear program written in FileOutput=', FileOutput + print('Linear program written in FileOutput=', FileOutput) lp_handle = lpsolve('read_lp_file', FileInput) result = lpsolve('solve', lp_handle) obj, ValueVar, ValueFct, testfeasibility = lpsolve('get_solution', result) lpsolve('delete_lp', result) - print 'Linear program solved' + print('Linear program solved') return ValueFct, ValueVar, testfeasibility diff --git a/bathy_smoother/bathy_smoother/__init__.py b/bathy_smoother/bathy_smoother/__init__.py index c86fae9..fe325df 100644 --- a/bathy_smoother/bathy_smoother/__init__.py +++ b/bathy_smoother/bathy_smoother/__init__.py @@ -4,11 +4,11 @@ (ripped from matlab script LP_bathymetry) ''' -import bathy_smoothing -import bathy_tools -import LP_bathy_smoothing -import LP_bathy_tools -import LP_tools +from . import bathy_smoothing +from . import bathy_tools +from . import LP_bathy_smoothing +from . import LP_bathy_tools +from . import LP_tools __authors__ = ['Frederic Castruccio '] diff --git a/bathy_smoother/bathy_smoother/bathy_smoothing.py b/bathy_smoother/bathy_smoother/bathy_smoothing.py index a30c84d..0e9c3f6 100644 --- a/bathy_smoother/bathy_smoother/bathy_smoothing.py +++ b/bathy_smoother/bathy_smoother/bathy_smoothing.py @@ -27,9 +27,9 @@ def smoothing_Positive_rx0(MSK, Hobs, rx0max): eta_rho, xi_rho = Hobs.shape ListNeigh = np.array([[1, 0], - [0, 1], - [-1, 0], - [0, -1]]) + [0, 1], + [-1, 0], + [0, -1]]) RetBathy = Hobs.copy() @@ -41,21 +41,21 @@ def smoothing_Positive_rx0(MSK, Hobs, rx0max): for iEta in range(eta_rho): for iXi in range(xi_rho): if (MSK[iEta,iXi] == 1): - for ineigh in range(4): - iEtaN = iEta + ListNeigh[ineigh,0] - iXiN = iXi + ListNeigh[ineigh,1] + for ineigh in range(4): + iEtaN = iEta + ListNeigh[ineigh,0] + iXiN = iXi + ListNeigh[ineigh,1] if (iEtaN <= eta_rho-1 and iEtaN >= 0 and iXiN <= xi_rho-1 \ and iXiN >= 0 and MSK[iEtaN,iXiN] == 1): - LowerBound = RetBathy[iEtaN,iXiN] * (1-rx0max)/(1+rx0max) - if ((RetBathy[iEta,iXi] - LowerBound) < -tol): - IsFinished = 0 - RetBathy[iEta,iXi] = LowerBound - nbModif = nbModif + 1 + LowerBound = RetBathy[iEtaN,iXiN] * (1-rx0max)/(1+rx0max) + if ((RetBathy[iEta,iXi] - LowerBound) < -tol): + IsFinished = 0 + RetBathy[iEta,iXi] = LowerBound + nbModif = nbModif + 1 if (IsFinished == 1): break - print ' nbModif=', nbModif + print(' nbModif=', nbModif) return RetBathy @@ -63,8 +63,8 @@ def smoothing_Positive_rx0(MSK, Hobs, rx0max): def smoothing_Negative_rx0(MSK, Hobs, rx0max): """ - This program use an opposite methode to the direct iterative method from - Martinho and Batteen (2006). This program optimizes the bathymetry for + This program use an opposite methode to the direct iterative method from + Martinho and Batteen (2006). This program optimizes the bathymetry for a given rx0 factor by decreasing it. Usage: @@ -80,9 +80,9 @@ def smoothing_Negative_rx0(MSK, Hobs, rx0max): eta_rho, xi_rho = Hobs.shape ListNeigh = np.array([[1, 0], - [0, 1], - [-1, 0], - [0, -1]]) + [0, 1], + [-1, 0], + [0, -1]]) RetBathy = Hobs.copy() @@ -94,21 +94,21 @@ def smoothing_Negative_rx0(MSK, Hobs, rx0max): for iEta in range(eta_rho): for iXi in range(xi_rho): if (MSK[iEta, iXi] == 1): - for ineigh in range(4): - iEtaN = iEta + ListNeigh[ineigh,0] - iXiN = iXi + ListNeigh[ineigh,1] + for ineigh in range(4): + iEtaN = iEta + ListNeigh[ineigh,0] + iXiN = iXi + ListNeigh[ineigh,1] if (iEtaN <= eta_rho-1 and iEtaN >= 0 and iXiN <= xi_rho-1 \ and iXiN >= 0 and MSK[iEtaN,iXiN] == 1): - UpperBound = RetBathy[iEtaN, iXiN] * (1+rx0max)/(1-rx0max) - if (RetBathy[iEta,iXi] > (UpperBound + tol)): - IsFinished = 0 - RetBathy[iEta, iXi] = UpperBound - nbModif = nbModif + 1 + UpperBound = RetBathy[iEtaN, iXiN] * (1+rx0max)/(1-rx0max) + if (RetBathy[iEta,iXi] > (UpperBound + tol)): + IsFinished = 0 + RetBathy[iEta, iXi] = UpperBound + nbModif = nbModif + 1 if (IsFinished == 1): break - print ' nbModif=', nbModif + print(' nbModif=', nbModif) return RetBathy @@ -117,8 +117,8 @@ def smoothing_Negative_rx0(MSK, Hobs, rx0max): def smoothing_PositiveVolume_rx0(MSK, Hobs, rx0max, AreaMatrix): """ This program use the direct iterative method from Martinho and Batteen (2006) - The bathymetry is optimized for a given rx0 factor by increasing it. All depth - are then multiplied by the coeficient K = Vol_init/Vol_final in order to + The bathymetry is optimized for a given rx0 factor by increasing it. All depth + are then multiplied by the coeficient K = Vol_init/Vol_final in order to insure volume conservation. Usage: @@ -136,9 +136,9 @@ def smoothing_PositiveVolume_rx0(MSK, Hobs, rx0max, AreaMatrix): eta_rho, xi_rho = Hobs.shape ListNeigh = np.array([[1, 0], - [0, 1], - [-1, 0], - [0, -1]]) + [0, 1], + [-1, 0], + [0, -1]]) WorkBathy = Hobs.copy() @@ -150,21 +150,21 @@ def smoothing_PositiveVolume_rx0(MSK, Hobs, rx0max, AreaMatrix): for iEta in range(eta_rho): for iXi in range(xi_rho): if (MSK[iEta, iXi] == 1): - for ineigh in range(4): - iEtaN = iEta + ListNeigh[ineigh,0] - iXiN = iXi + ListNeigh[ineigh,1] + for ineigh in range(4): + iEtaN = iEta + ListNeigh[ineigh,0] + iXiN = iXi + ListNeigh[ineigh,1] if (iEtaN <= eta_rho-1 and iEtaN >= 0 and iXiN <= xi_rho-1 \ and iXiN >= 0 and MSK[iEtaN,iXiN] == 1): - LowerBound = RetBathy[iEtaN, iXiN] * (1-rx0max)/(1+rx0max) - if ((WorkBathy[iEta,iXi] - LowerBound) < -tol): - IsFinished = 0 - WorkBathy[iEta, iXi] = LowerBound - nbModif = nbModif + 1 + LowerBound = RetBathy[iEtaN, iXiN] * (1-rx0max)/(1+rx0max) + if ((WorkBathy[iEta,iXi] - LowerBound) < -tol): + IsFinished = 0 + WorkBathy[iEta, iXi] = LowerBound + nbModif = nbModif + 1 if (IsFinished == 1): break - print ' nbModif=', nbModif + print(' nbModif=', nbModif) VolOrig=0 VolWork=0 @@ -182,9 +182,9 @@ def smoothing_PositiveVolume_rx0(MSK, Hobs, rx0max, AreaMatrix): def smoothing_NegativeVolume_rx0(MSK, Hobs, rx0maxi, AreaMatrix): """ - This program use an opposite methode to the direct iterative method from - Martinho and Batteen (2006). This program optimizes the bathymetry for - a given rx0 factor by decreasing it. All depth are then multiplied by + This program use an opposite methode to the direct iterative method from + Martinho and Batteen (2006). This program optimizes the bathymetry for + a given rx0 factor by decreasing it. All depth are then multiplied by the coeficient K = Vol_init/Vol_final in order to insure volume conservation. Usage: @@ -202,9 +202,9 @@ def smoothing_NegativeVolume_rx0(MSK, Hobs, rx0maxi, AreaMatrix): eta_rho, xi_rho = Hobs.shape ListNeigh = np.array([[1, 0], - [0, 1], - [-1, 0], - [0, -1]]) + [0, 1], + [-1, 0], + [0, -1]]) WorkBathy = Hobs.copy() @@ -216,21 +216,21 @@ def smoothing_NegativeVolume_rx0(MSK, Hobs, rx0maxi, AreaMatrix): for iEta in range(eta_rho): for iXi in range(xi_rho): if (MSK[iEta, iXi] == 1): - for ineigh in range(4): - iEtaN = iEta + ListNeigh[ineigh,0] - iXiN = iXi + ListNeigh[ineigh,1] + for ineigh in range(4): + iEtaN = iEta + ListNeigh[ineigh,0] + iXiN = iXi + ListNeigh[ineigh,1] if (iEtaN <= eta_rho-1 and iEtaN >= 0 and iXiN <= xi_rho-1 \ and iXiN >= 0 and MSK[iEtaN,iXiN] == 1): - UpperBound = RetBathy[iEtaN, iXiN] * (1+rx0max)/(1-rx0max) - if (WorkBathy[iEta,iXi] > (UpperBound + tol)): - IsFinished = 0 - WorkBathy[iEta, iXi] = UpperBound - nbModif = nbModif + 1 + UpperBound = RetBathy[iEtaN, iXiN] * (1+rx0max)/(1-rx0max) + if (WorkBathy[iEta,iXi] > (UpperBound + tol)): + IsFinished = 0 + WorkBathy[iEta, iXi] = UpperBound + nbModif = nbModif + 1 if (IsFinished == 1): break - print ' nbModif=', nbModif + print(' nbModif=', nbModif) VolOrig=0 VolWork=0 @@ -267,9 +267,9 @@ def smoothing_PlusMinus_rx0(MSK, Hobs, rx0max, AreaMatrix): eta_rho, xi_rho = Hobs.shape ListNeigh = np.array([[1, 0], - [0, 1], - [-1, 0], - [0, -1]]) + [0, 1], + [-1, 0], + [0, -1]]) RetBathy = Hobs.copy() @@ -283,15 +283,15 @@ def smoothing_PlusMinus_rx0(MSK, Hobs, rx0max, AreaMatrix): for iEta in range(eta_rho): for iXi in range(xi_rho): if (MSK[iEta, iXi] == 1): - Area = AreaMatrix[iEta, iXi] - for ineigh in range(4): - iEtaN = iEta + ListNeigh[ineigh,0] - iXiN = iXi + ListNeigh[ineigh,1] + Area = AreaMatrix[iEta, iXi] + for ineigh in range(4): + iEtaN = iEta + ListNeigh[ineigh,0] + iXiN = iXi + ListNeigh[ineigh,1] if (iEtaN <= eta_rho-1 and iEtaN >= 0 and iXiN <= xi_rho-1 \ and iXiN >= 0 and MSK[iEtaN,iXiN] == 1): - AreaN = AreaMatrix[iEtaN,iXiN] - LowerBound = RetBathy[iEtaN,iXiN] * TheMultiplier - if ((RetBathy[iEta,iXi] - LowerBound) < -tol): + AreaN = AreaMatrix[iEtaN,iXiN] + LowerBound = RetBathy[iEtaN,iXiN] * TheMultiplier + if ((RetBathy[iEta,iXi] - LowerBound) < -tol): IsFinished = 0 h = (TheMultiplier * RetBathy[iEtaN,iXiN] - RetBathy[iEta,iXi]) \ / (AreaN + TheMultiplier * Area) @@ -308,7 +308,7 @@ def smoothing_PlusMinus_rx0(MSK, Hobs, rx0max, AreaMatrix): H = AreaMatrix * RetBathy * MSK TheBathymetry2 = H.sum() DeltaBathymetry = TheBathymetry1 - TheBathymetry2 - print 'DeltaBathymetry = ', DeltaBathymetry + print('DeltaBathymetry = ', DeltaBathymetry) return RetBathy, HmodifVal, ValueFct @@ -332,9 +332,9 @@ def smoothing_Laplacian_rx0(MSK, Hobs, rx0max): eta_rho, xi_rho = Hobs.shape ListNeigh = np.array([[1, 0], - [0, 1], - [-1, 0], - [0, -1]]) + [0, 1], + [-1, 0], + [0, -1]]) RetBathy = Hobs.copy() @@ -344,8 +344,8 @@ def smoothing_Laplacian_rx0(MSK, Hobs, rx0max): for iXi in range(xi_rho): WeightSum = 0 for ineigh in range(4): - iEtaN = iEta + ListNeigh[ineigh,0] - iXiN = iXi + ListNeigh[ineigh,1] + iEtaN = iEta + ListNeigh[ineigh,0] + iXiN = iXi + ListNeigh[ineigh,1] if (iEtaN <= eta_rho-1 and iEtaN >= 0 and iXiN <= xi_rho-1 \ and iXiN >= 0 and MSK[iEtaN,iXiN] == 1): WeightSum = WeightSum + 1 @@ -364,7 +364,7 @@ def smoothing_Laplacian_rx0(MSK, Hobs, rx0max): nbPointMod = 0 AdditionalDone = np.zeros((eta_rho, xi_rho)) for iEta in range(eta_rho): - for iXi in range(xi_rho): + for iXi in range(xi_rho): Weight = 0 WeightSum = 0 for ineigh in range(4): @@ -378,17 +378,17 @@ def smoothing_Laplacian_rx0(MSK, Hobs, rx0max): TheWeight = WeightMatrix[iEta,iXi] WeDo = 0 if TheWeight > tol: - if RoughMat[iEta,iXi] > rx0max: - WeDo = 1 + if RoughMat[iEta,iXi] > rx0max: + WeDo = 1 if NumberDones[iEta,iXi] > 0: - WeDo = 1 + WeDo = 1 if WeDo == 1: - IsFinished = 0 - TheDelta = (Weight - TheWeight * RetBathy[iEta,iXi]) / (2 * TheWeight) - TheCorrect[iEta,iXi] = TheCorrect[iEta,iXi] + TheDelta - nbPointMod = nbPointMod + 1 - NumberDones[iEta,iXi] = 1 + IsFinished = 0 + TheDelta = (Weight - TheWeight * RetBathy[iEta,iXi]) / (2 * TheWeight) + TheCorrect[iEta,iXi] = TheCorrect[iEta,iXi] + TheDelta + nbPointMod = nbPointMod + 1 + NumberDones[iEta,iXi] = 1 NumberDones = NumberDones + AdditionalDone RetBathy = RetBathy + TheCorrect @@ -403,13 +403,13 @@ def smoothing_Laplacian_rx0(MSK, Hobs, rx0max): eStr=''; NumberDones = np.zeros((eta_rho, xi_rho)) - print 'Iteration #', Iter - print 'current r=', realR, ' nbPointMod=', nbPointMod, eStr - print ' ' + print('Iteration #', Iter) + print('current r=', realR, ' nbPointMod=', nbPointMod, eStr) + print(' ') Iter = Iter + 1 if (IsFinished == 1): break - + return RetBathy diff --git a/bathy_smoother/bathy_smoother/bathy_tools.py b/bathy_smoother/bathy_smoother/bathy_tools.py index 58126c8..cc298ba 100644 --- a/bathy_smoother/bathy_smoother/bathy_tools.py +++ b/bathy_smoother/bathy_smoother/bathy_tools.py @@ -19,9 +19,9 @@ def RoughnessMatrix(DEP, MSK): eta_rho, xi_rho = DEP.shape Umat = np.array([[0, 1], - [1, 0], - [0, -1], - [-1, 0]]) + [1, 0], + [0, -1], + [-1, 0]]) RoughMat = np.zeros(DEP.shape) diff --git a/bathy_smoother/docs/api-objects.txt b/bathy_smoother/docs/api-objects.txt deleted file mode 100644 index 34b58b9..0000000 --- a/bathy_smoother/docs/api-objects.txt +++ /dev/null @@ -1,151 +0,0 @@ -bathy_smoother bathy_smoother-module.html -bathy_smoother.LP_bathy_smoothing bathy_smoother.LP_bathy_smoothing-module.html -bathy_smoother.LP_bathy_smoothing.LP_smoothing_rx0 bathy_smoother.LP_bathy_smoothing-module.html#LP_smoothing_rx0 -bathy_smoother.LP_bathy_smoothing.LP_smoothing_rx0_heuristic bathy_smoother.LP_bathy_smoothing-module.html#LP_smoothing_rx0_heuristic -bathy_smoother.LP_bathy_tools bathy_smoother.LP_bathy_tools-module.html -bathy_smoother.LP_bathy_tools.MergeIJS_listings bathy_smoother.LP_bathy_tools-module.html#MergeIJS_listings -bathy_smoother.LP_bathy_tools.ConnectedComponent bathy_smoother.LP_bathy_tools-module.html#ConnectedComponent -bathy_smoother.LP_bathy_tools.Neighborhood bathy_smoother.LP_bathy_tools-module.html#Neighborhood -bathy_smoother.LP_bathy_tools.GetBadPoints bathy_smoother.LP_bathy_tools-module.html#GetBadPoints -bathy_smoother.LP_bathy_tools.GetIJS_rx0 bathy_smoother.LP_bathy_tools-module.html#GetIJS_rx0 -bathy_smoother.LP_bathy_tools.GetIJS_maxamp bathy_smoother.LP_bathy_tools-module.html#GetIJS_maxamp -bathy_smoother.LP_bathy_tools.GetIJS_signs bathy_smoother.LP_bathy_tools-module.html#GetIJS_signs -bathy_smoother.LP_tools bathy_smoother.LP_tools-module.html -bathy_smoother.LP_tools.MSG_MILPBETTER bathy_smoother.LP_tools-module.html#MSG_MILPBETTER -bathy_smoother.LP_tools.PRICE_AUTOPARTIAL bathy_smoother.LP_tools-module.html#PRICE_AUTOPARTIAL -bathy_smoother.LP_tools.CRASH_NONE bathy_smoother.LP_tools-module.html#CRASH_NONE -bathy_smoother.LP_tools.NODE_AUTOORDER bathy_smoother.LP_tools-module.html#NODE_AUTOORDER -bathy_smoother.LP_tools.PRESOLVE_COLFIXDUAL bathy_smoother.LP_tools-module.html#PRESOLVE_COLFIXDUAL -bathy_smoother.LP_tools.MSG_PRESOLVE bathy_smoother.LP_tools-module.html#MSG_PRESOLVE -bathy_smoother.LP_tools.NODE_FIRSTSELECT bathy_smoother.LP_tools-module.html#NODE_FIRSTSELECT -bathy_smoother.LP_tools.PRESOLVE_MERGEROWS bathy_smoother.LP_tools-module.html#PRESOLVE_MERGEROWS -bathy_smoother.LP_tools.SCALE_QUADRATIC bathy_smoother.LP_tools-module.html#SCALE_QUADRATIC -bathy_smoother.LP_tools.NOFEASFOUND bathy_smoother.LP_tools-module.html#NOFEASFOUND -bathy_smoother.LP_tools.PRESOLVE_ROWDOMINATE bathy_smoother.LP_tools-module.html#PRESOLVE_ROWDOMINATE -bathy_smoother.LP_tools.ANTIDEGEN_RHSPERTURB bathy_smoother.LP_tools-module.html#ANTIDEGEN_RHSPERTURB -bathy_smoother.LP_tools.ANTIDEGEN_DURINGBB bathy_smoother.LP_tools-module.html#ANTIDEGEN_DURINGBB -bathy_smoother.LP_tools.PRESOLVE_DUALS bathy_smoother.LP_tools-module.html#PRESOLVE_DUALS -bathy_smoother.LP_tools.UNBOUNDED bathy_smoother.LP_tools-module.html#UNBOUNDED -bathy_smoother.LP_tools.ANTIDEGEN_NUMFAILURE bathy_smoother.LP_tools-module.html#ANTIDEGEN_NUMFAILURE -bathy_smoother.LP_tools.SUBOPTIMAL bathy_smoother.LP_tools-module.html#SUBOPTIMAL -bathy_smoother.LP_tools.SolveLinearProgram bathy_smoother.LP_tools-module.html#SolveLinearProgram -bathy_smoother.LP_tools.IMPROVE_DUALFEAS bathy_smoother.LP_tools-module.html#IMPROVE_DUALFEAS -bathy_smoother.LP_tools.CRITICAL bathy_smoother.LP_tools-module.html#CRITICAL -bathy_smoother.LP_tools.PRESOLVE_KNAPSACK bathy_smoother.LP_tools-module.html#PRESOLVE_KNAPSACK -bathy_smoother.LP_tools.PRICER_FIRSTINDEX bathy_smoother.LP_tools-module.html#PRICER_FIRSTINDEX -bathy_smoother.LP_tools.ANTIDEGEN_LOSTFEAS bathy_smoother.LP_tools-module.html#ANTIDEGEN_LOSTFEAS -bathy_smoother.LP_tools.PRESOLVE_COLDOMINATE bathy_smoother.LP_tools-module.html#PRESOLVE_COLDOMINATE -bathy_smoother.LP_tools.PRICE_HARRISTWOPASS bathy_smoother.LP_tools-module.html#PRICE_HARRISTWOPASS -bathy_smoother.LP_tools.IMPROVE_THETAGAP bathy_smoother.LP_tools-module.html#IMPROVE_THETAGAP -bathy_smoother.LP_tools.GE bathy_smoother.LP_tools-module.html#GE -bathy_smoother.LP_tools.NODE_RCOSTFIXING bathy_smoother.LP_tools-module.html#NODE_RCOSTFIXING -bathy_smoother.LP_tools.SCALE_NONE bathy_smoother.LP_tools-module.html#SCALE_NONE -bathy_smoother.LP_tools.PRICER_DEVEX bathy_smoother.LP_tools-module.html#PRICER_DEVEX -bathy_smoother.LP_tools.SCALE_LOGARITHMIC bathy_smoother.LP_tools-module.html#SCALE_LOGARITHMIC -bathy_smoother.LP_tools.PRICER_STEEPESTEDGE bathy_smoother.LP_tools-module.html#PRICER_STEEPESTEDGE -bathy_smoother.LP_tools.BRANCH_FLOOR bathy_smoother.LP_tools-module.html#BRANCH_FLOOR -bathy_smoother.LP_tools.ANTIDEGEN_BOUNDFLIP bathy_smoother.LP_tools-module.html#ANTIDEGEN_BOUNDFLIP -bathy_smoother.LP_tools.SCALE_USERWEIGHT bathy_smoother.LP_tools-module.html#SCALE_USERWEIGHT -bathy_smoother.LP_tools.NEUTRAL bathy_smoother.LP_tools-module.html#NEUTRAL -bathy_smoother.LP_tools.NODE_RANGESELECT bathy_smoother.LP_tools-module.html#NODE_RANGESELECT -bathy_smoother.LP_tools.SCALE_EQUILIBRATE bathy_smoother.LP_tools-module.html#SCALE_EQUILIBRATE -bathy_smoother.LP_tools.PRESOLVE_REDUCEGCD bathy_smoother.LP_tools-module.html#PRESOLVE_REDUCEGCD -bathy_smoother.LP_tools.SCALE_COLSONLY bathy_smoother.LP_tools-module.html#SCALE_COLSONLY -bathy_smoother.LP_tools.NODE_PSEUDONONINTSELECT bathy_smoother.LP_tools-module.html#NODE_PSEUDONONINTSELECT -bathy_smoother.LP_tools.SCALE_POWER2 bathy_smoother.LP_tools-module.html#SCALE_POWER2 -bathy_smoother.LP_tools.NORMAL bathy_smoother.LP_tools-module.html#NORMAL -bathy_smoother.LP_tools.MSG_MILPEQUAL bathy_smoother.LP_tools-module.html#MSG_MILPEQUAL -bathy_smoother.LP_tools.ANTIDEGEN_COLUMNCHECK bathy_smoother.LP_tools-module.html#ANTIDEGEN_COLUMNCHECK -bathy_smoother.LP_tools.DEGENERATE bathy_smoother.LP_tools-module.html#DEGENERATE -bathy_smoother.LP_tools.NODE_GAPSELECT bathy_smoother.LP_tools-module.html#NODE_GAPSELECT -bathy_smoother.LP_tools.NODE_PSEUDOCOSTMODE bathy_smoother.LP_tools-module.html#NODE_PSEUDOCOSTMODE -bathy_smoother.LP_tools.NODE_DYNAMICMODE bathy_smoother.LP_tools-module.html#NODE_DYNAMICMODE -bathy_smoother.LP_tools.NODE_PSEUDORATIOSELECT bathy_smoother.LP_tools-module.html#NODE_PSEUDORATIOSELECT -bathy_smoother.LP_tools.NODE_DEPTHFIRSTMODE bathy_smoother.LP_tools-module.html#NODE_DEPTHFIRSTMODE -bathy_smoother.LP_tools.SIMPLEX_DUAL_PRIMAL bathy_smoother.LP_tools-module.html#SIMPLEX_DUAL_PRIMAL -bathy_smoother.LP_tools.SCALE_RANGE bathy_smoother.LP_tools-module.html#SCALE_RANGE -bathy_smoother.LP_tools.NODE_RANDOMIZEMODE bathy_smoother.LP_tools-module.html#NODE_RANDOMIZEMODE -bathy_smoother.LP_tools.NUMFAILURE bathy_smoother.LP_tools-module.html#NUMFAILURE -bathy_smoother.LP_tools.SCALE_ROWSONLY bathy_smoother.LP_tools-module.html#SCALE_ROWSONLY -bathy_smoother.LP_tools.NODE_FRACTIONSELECT bathy_smoother.LP_tools-module.html#NODE_FRACTIONSELECT -bathy_smoother.LP_tools.WriteLinearProgram bathy_smoother.LP_tools-module.html#WriteLinearProgram -bathy_smoother.LP_tools.PRESOLVE_IMPLIEDSLK bathy_smoother.LP_tools-module.html#PRESOLVE_IMPLIEDSLK -bathy_smoother.LP_tools.PRESOLVE_ROWS bathy_smoother.LP_tools-module.html#PRESOLVE_ROWS -bathy_smoother.LP_tools.EQ bathy_smoother.LP_tools-module.html#EQ -bathy_smoother.LP_tools.SCALE_DYNUPDATE bathy_smoother.LP_tools-module.html#SCALE_DYNUPDATE -bathy_smoother.LP_tools.SCALE_GEOMETRIC bathy_smoother.LP_tools-module.html#SCALE_GEOMETRIC -bathy_smoother.LP_tools.MSG_LPOPTIMAL bathy_smoother.LP_tools-module.html#MSG_LPOPTIMAL -bathy_smoother.LP_tools.SCALE_INTEGERS bathy_smoother.LP_tools-module.html#SCALE_INTEGERS -bathy_smoother.LP_tools.MSG_MILPFEASIBLE bathy_smoother.LP_tools-module.html#MSG_MILPFEASIBLE -bathy_smoother.LP_tools.NODE_GREEDYMODE bathy_smoother.LP_tools-module.html#NODE_GREEDYMODE -bathy_smoother.LP_tools.SEVERE bathy_smoother.LP_tools-module.html#SEVERE -bathy_smoother.LP_tools.PRESOLVE_REDUCEMIP bathy_smoother.LP_tools-module.html#PRESOLVE_REDUCEMIP -bathy_smoother.LP_tools.BRANCH_AUTOMATIC bathy_smoother.LP_tools-module.html#BRANCH_AUTOMATIC -bathy_smoother.LP_tools.SCALE_EXTREME bathy_smoother.LP_tools-module.html#SCALE_EXTREME -bathy_smoother.LP_tools.IMPROVE_SOLUTION bathy_smoother.LP_tools-module.html#IMPROVE_SOLUTION -bathy_smoother.LP_tools.DETAILED bathy_smoother.LP_tools-module.html#DETAILED -bathy_smoother.LP_tools.PRESOLVE_PROBEREDUCE bathy_smoother.LP_tools-module.html#PRESOLVE_PROBEREDUCE -bathy_smoother.LP_tools.FR bathy_smoother.LP_tools-module.html#FR -bathy_smoother.LP_tools.PRICE_PARTIAL bathy_smoother.LP_tools-module.html#PRICE_PARTIAL -bathy_smoother.LP_tools.PRICE_LOOPALTERNATE bathy_smoother.LP_tools-module.html#PRICE_LOOPALTERNATE -bathy_smoother.LP_tools.ANTIDEGEN_STALLING bathy_smoother.LP_tools-module.html#ANTIDEGEN_STALLING -bathy_smoother.LP_tools.PRESOLVE_PROBEFIX bathy_smoother.LP_tools-module.html#PRESOLVE_PROBEFIX -bathy_smoother.LP_tools.PRICE_RANDOMIZE bathy_smoother.LP_tools-module.html#PRICE_RANDOMIZE -bathy_smoother.LP_tools.ANTIDEGEN_DYNAMIC bathy_smoother.LP_tools-module.html#ANTIDEGEN_DYNAMIC -bathy_smoother.LP_tools.IMPORTANT bathy_smoother.LP_tools-module.html#IMPORTANT -bathy_smoother.LP_tools.SCALE_CURTISREID bathy_smoother.LP_tools-module.html#SCALE_CURTISREID -bathy_smoother.LP_tools.Infinite bathy_smoother.LP_tools-module.html#Infinite -bathy_smoother.LP_tools.PRESOLVE_SOS bathy_smoother.LP_tools-module.html#PRESOLVE_SOS -bathy_smoother.LP_tools.ANTIDEGEN_NONE bathy_smoother.LP_tools-module.html#ANTIDEGEN_NONE -bathy_smoother.LP_tools.NOMEMORY bathy_smoother.LP_tools-module.html#NOMEMORY -bathy_smoother.LP_tools.PRESOLVE_BOUNDS bathy_smoother.LP_tools-module.html#PRESOLVE_BOUNDS -bathy_smoother.LP_tools.PRICE_MULTIPLE bathy_smoother.LP_tools-module.html#PRICE_MULTIPLE -bathy_smoother.LP_tools.PRICE_PRIMALFALLBACK bathy_smoother.LP_tools-module.html#PRICE_PRIMALFALLBACK -bathy_smoother.LP_tools.NODE_BREADTHFIRSTMODE bathy_smoother.LP_tools-module.html#NODE_BREADTHFIRSTMODE -bathy_smoother.LP_tools.NODE_BRANCHREVERSEMODE bathy_smoother.LP_tools-module.html#NODE_BRANCHREVERSEMODE -bathy_smoother.LP_tools.IMPROVE_BBSIMPLEX bathy_smoother.LP_tools-module.html#IMPROVE_BBSIMPLEX -bathy_smoother.LP_tools.SIMPLEX_PRIMAL_DUAL bathy_smoother.LP_tools-module.html#SIMPLEX_PRIMAL_DUAL -bathy_smoother.LP_tools.OPTIMAL bathy_smoother.LP_tools-module.html#OPTIMAL -bathy_smoother.LP_tools.NODE_STRONGINIT bathy_smoother.LP_tools-module.html#NODE_STRONGINIT -bathy_smoother.LP_tools.FEASFOUND bathy_smoother.LP_tools-module.html#FEASFOUND -bathy_smoother.LP_tools.FULL bathy_smoother.LP_tools-module.html#FULL -bathy_smoother.LP_tools.PRESOLVE_ELIMEQ2 bathy_smoother.LP_tools-module.html#PRESOLVE_ELIMEQ2 -bathy_smoother.LP_tools.NODE_WEIGHTREVERSEMODE bathy_smoother.LP_tools-module.html#NODE_WEIGHTREVERSEMODE -bathy_smoother.LP_tools.NODE_PSEUDOCOSTSELECT bathy_smoother.LP_tools-module.html#NODE_PSEUDOCOSTSELECT -bathy_smoother.LP_tools.PROCFAIL bathy_smoother.LP_tools-module.html#PROCFAIL -bathy_smoother.LP_tools.PROCBREAK bathy_smoother.LP_tools-module.html#PROCBREAK -bathy_smoother.LP_tools.TIMEOUT bathy_smoother.LP_tools-module.html#TIMEOUT -bathy_smoother.LP_tools.MSG_LPFEASIBLE bathy_smoother.LP_tools-module.html#MSG_LPFEASIBLE -bathy_smoother.LP_tools.PRICE_ADAPTIVE bathy_smoother.LP_tools-module.html#PRICE_ADAPTIVE -bathy_smoother.LP_tools.PRESOLVE_COLS bathy_smoother.LP_tools-module.html#PRESOLVE_COLS -bathy_smoother.LP_tools.SCALE_MEAN bathy_smoother.LP_tools-module.html#SCALE_MEAN -bathy_smoother.LP_tools.BRANCH_CEILING bathy_smoother.LP_tools-module.html#BRANCH_CEILING -bathy_smoother.LP_tools.ANTIDEGEN_INFEASIBLE bathy_smoother.LP_tools-module.html#ANTIDEGEN_INFEASIBLE -bathy_smoother.LP_tools.NODE_RESTARTMODE bathy_smoother.LP_tools-module.html#NODE_RESTARTMODE -bathy_smoother.LP_tools.LE bathy_smoother.LP_tools-module.html#LE -bathy_smoother.LP_tools.PRICE_LOOPLEFT bathy_smoother.LP_tools-module.html#PRICE_LOOPLEFT -bathy_smoother.LP_tools.PRESOLVE_NONE bathy_smoother.LP_tools-module.html#PRESOLVE_NONE -bathy_smoother.LP_tools.PRICER_DANTZIG bathy_smoother.LP_tools-module.html#PRICER_DANTZIG -bathy_smoother.LP_tools.SIMPLEX_DUAL_DUAL bathy_smoother.LP_tools-module.html#SIMPLEX_DUAL_DUAL -bathy_smoother.LP_tools.SIMPLEX_PRIMAL_PRIMAL bathy_smoother.LP_tools-module.html#SIMPLEX_PRIMAL_PRIMAL -bathy_smoother.LP_tools.CRASH_LEASTDEGENERATE bathy_smoother.LP_tools-module.html#CRASH_LEASTDEGENERATE -bathy_smoother.LP_tools.PRESOLVE_SENSDUALS bathy_smoother.LP_tools-module.html#PRESOLVE_SENSDUALS -bathy_smoother.LP_tools.ANTIDEGEN_FIXEDVARS bathy_smoother.LP_tools-module.html#ANTIDEGEN_FIXEDVARS -bathy_smoother.LP_tools.USERABORT bathy_smoother.LP_tools-module.html#USERABORT -bathy_smoother.LP_tools.PRICE_TRUENORMINIT bathy_smoother.LP_tools-module.html#PRICE_TRUENORMINIT -bathy_smoother.LP_tools.PRESOLVE_IMPLIEDFREE bathy_smoother.LP_tools-module.html#PRESOLVE_IMPLIEDFREE -bathy_smoother.LP_tools.PRESOLVE_LINDEP bathy_smoother.LP_tools-module.html#PRESOLVE_LINDEP -bathy_smoother.LP_tools.IMPROVE_NONE bathy_smoother.LP_tools-module.html#IMPROVE_NONE -bathy_smoother.LP_tools.NODE_GUBMODE bathy_smoother.LP_tools-module.html#NODE_GUBMODE -bathy_smoother.LP_tools.NODE_USERSELECT bathy_smoother.LP_tools-module.html#NODE_USERSELECT -bathy_smoother.LP_tools.CRASH_MOSTFEASIBLE bathy_smoother.LP_tools-module.html#CRASH_MOSTFEASIBLE -bathy_smoother.LP_tools.INFEASIBLE bathy_smoother.LP_tools-module.html#INFEASIBLE -bathy_smoother.bathy_smoothing bathy_smoother.bathy_smoothing-module.html -bathy_smoother.bathy_smoothing.smoothing_Positive_rx0 bathy_smoother.bathy_smoothing-module.html#smoothing_Positive_rx0 -bathy_smoother.bathy_smoothing.smoothing_PlusMinus_rx0 bathy_smoother.bathy_smoothing-module.html#smoothing_PlusMinus_rx0 -bathy_smoother.bathy_smoothing.smoothing_NegativeVolume_rx0 bathy_smoother.bathy_smoothing-module.html#smoothing_NegativeVolume_rx0 -bathy_smoother.bathy_smoothing.smoothing_PositiveVolume_rx0 bathy_smoother.bathy_smoothing-module.html#smoothing_PositiveVolume_rx0 -bathy_smoother.bathy_smoothing.smoothing_Laplacian_rx0 bathy_smoother.bathy_smoothing-module.html#smoothing_Laplacian_rx0 -bathy_smoother.bathy_smoothing.smoothing_Negative_rx0 bathy_smoother.bathy_smoothing-module.html#smoothing_Negative_rx0 -bathy_smoother.bathy_tools bathy_smoother.bathy_tools-module.html -bathy_smoother.bathy_tools.RoughnessMatrix bathy_smoother.bathy_tools-module.html#RoughnessMatrix diff --git a/bathy_smoother/docs/bathy_smoother-module.html b/bathy_smoother/docs/bathy_smoother-module.html deleted file mode 100644 index 901d539..0000000 --- a/bathy_smoother/docs/bathy_smoother-module.html +++ /dev/null @@ -1,141 +0,0 @@ - - - - - bathy_smoother - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package bathy_smoother - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Package bathy_smoother

source code

-

ROMS_bathy_smoother is a toolkit for working with ROMS bathymetry - (ripped from matlab script LP_bathymetry)

- -
-

Version: - 0.1 -

-

Author: - Frederic Castruccio <frederic@marine.rutgers.edu> -

-
- - - - - - -
- - - - - -
Submodules[hide private]
-
-
- -
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/bathy_smoother/docs/bathy_smoother-pysrc.html b/bathy_smoother/docs/bathy_smoother-pysrc.html deleted file mode 100644 index c11e609..0000000 --- a/bathy_smoother/docs/bathy_smoother-pysrc.html +++ /dev/null @@ -1,125 +0,0 @@ - - - - - bathy_smoother - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package bathy_smoother - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Package bathy_smoother

-
- 1  # encoding: utf-8 
- 2  '''  
- 3  ROMS_bathy_smoother is a toolkit for working with ROMS bathymetry 
- 4  (ripped from matlab script LP_bathymetry) 
- 5  ''' 
- 6   
- 7  import bathy_smoothing 
- 8  import bathy_tools 
- 9  import LP_bathy_smoothing 
-10  import LP_bathy_tools 
-11  import LP_tools 
-12   
-13  __authors__ = ['Frederic Castruccio <frederic@marine.rutgers.edu>'] 
-14   
-15  __version__ = '0.1' 
-16   
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/bathy_smoother/docs/bathy_smoother.LP_bathy_smoothing-module.html b/bathy_smoother/docs/bathy_smoother.LP_bathy_smoothing-module.html deleted file mode 100644 index 4340bac..0000000 --- a/bathy_smoother/docs/bathy_smoother.LP_bathy_smoothing-module.html +++ /dev/null @@ -1,287 +0,0 @@ - - - - - bathy_smoother.LP_bathy_smoothing - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package bathy_smoother :: - Module LP_bathy_smoothing - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module LP_bathy_smoothing

source code

- - - - - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
LP_smoothing_rx0(MSK, - Hobs, - rx0max, - SignConst, - AmpConst)
- This program perform a linear programming method in order to -optimize the bathymetry for a fixed factor r.
- source code - -
- -
-   - - - - - - -
LP_smoothing_rx0_heuristic(MSK, - Hobs, - rx0max, - SignConst, - AmpConst)
- This program perform a linear programming method in order to -optimize the bathymetry for a fixed factor r.
- source code - -
- -
- - - - - - -
- - - - - -
Function Details[hide private]
-
- -
- -
- - -
-

LP_smoothing_rx0(MSK, - Hobs, - rx0max, - SignConst, - AmpConst) -

-
source code  -
- -
-
-This program perform a linear programming method in order to 
-optimize the bathymetry for a fixed factor r.
-The inequality |H(e)-H(e')| / (H(e)-H(e')) <= r where H(e)=h(e)+dh(e) 
-can be rewritten as two linear inequalities on dh(e) and dh(e'). 
-The optimal bathymetry is obtain by minimising the perturbation
-P = sum_e(|dh(e)| under the above inequalitie constraintes.
-
-Usage:
-NewBathy = LP_smoothing_rx0(MSK, Hobs, rx0max, SignConst, AmpConst)
-
----MSK(eta_rho,xi_rho) is the mask of the grd
-     1 for sea
-     0 for land
----Hobs(eta_rho,xi_rho) is the raw depth of the grid
----rx0max is the target rx0 roughness factor
----SignConst(eta_rho,xi_rho) matrix of 0, +1, -1
-     +1  only bathymetry increase are allowed.
-     -1  only bathymetry decrease are allowed.
-     0   increase and decrease are allowed.
-     (put 0 if you are indifferent)
----AmpConst(eta_rho,xi_rho)  matrix of reals.
-     coefficient alpha such that the new bathymetry should
-     satisfy to  |h^{new} - h^{raw}| <= alpha h^{raw}
-     (put 10000 if you are indifferent)
-
-
-
-
-
-
- -
- -
- - -
-

LP_smoothing_rx0_heuristic(MSK, - Hobs, - rx0max, - SignConst, - AmpConst) -

-
source code  -
- -
-
-This program perform a linear programming method in order to 
-optimize the bathymetry for a fixed factor r.
-The inequality |H(e)-H(e')| / (H(e)-H(e')) <= r where H(e)=h(e)+dh(e) 
-can be rewritten as two linear inequalities on dh(e) and dh(e'). 
-The optimal bathymetry is obtain by minimising the perturbation
-P = sum_e(|dh(e)| under the above inequalitie constraintes.
-In order to reduce the computation time, an heurastic method is
-used.
-
-Usage:
-NewBathy = LP_smoothing_rx0_heuristic(MSK, Hobs, rx0max, SignConst, AmpConst)
-
----MSK(eta_rho,xi_rho) is the mask of the grd
-     1 for sea
-     0 for land
----Hobs(eta_rho,xi_rho) is the raw depth of the grid
----rx0max is the target rx0 roughness factor
----SignConst(eta_rho,xi_rho) matrix of 0, +1, -1
-     +1  only bathymetry increase are allowed.
-     -1  only bathymetry decrease are allowed.
-     0   increase and decrease are allowed.
-     (put 0 if you are indifferent)
----AmpConst(eta_rho,xi_rho)  matrix of reals.
-     coefficient alpha such that the new bathymetry should
-     satisfy to  |h^{new} - h^{raw}| <= alpha h^{raw}
-     (put 10000 if you are indifferent)
-
-
-
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/bathy_smoother/docs/bathy_smoother.LP_bathy_smoothing-pysrc.html b/bathy_smoother/docs/bathy_smoother.LP_bathy_smoothing-pysrc.html deleted file mode 100644 index f626bd6..0000000 --- a/bathy_smoother/docs/bathy_smoother.LP_bathy_smoothing-pysrc.html +++ /dev/null @@ -1,278 +0,0 @@ - - - - - bathy_smoother.LP_bathy_smoothing - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package bathy_smoother :: - Module LP_bathy_smoothing - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module bathy_smoother.LP_bathy_smoothing

-
-  1  import numpy as np 
-  2  from ROMS_bathy_smoother import LP_bathy_tools 
-  3  from ROMS_bathy_smoother import LP_tools 
-  4  from ROMS_bathy_smoother import bathy_tools 
-  5   
-  6  import matplotlib.pyplot as plt 
-  7   
-  8   
-
9 -def LP_smoothing_rx0(MSK, Hobs, rx0max, SignConst, AmpConst): -
10 """ - 11 This program perform a linear programming method in order to - 12 optimize the bathymetry for a fixed factor r. - 13 The inequality |H(e)-H(e')| / (H(e)-H(e')) <= r where H(e)=h(e)+dh(e) - 14 can be rewritten as two linear inequalities on dh(e) and dh(e'). - 15 The optimal bathymetry is obtain by minimising the perturbation - 16 P = sum_e(|dh(e)| under the above inequalitie constraintes. - 17 - 18 Usage: - 19 NewBathy = LP_smoothing_rx0(MSK, Hobs, rx0max, SignConst, AmpConst) - 20 - 21 ---MSK(eta_rho,xi_rho) is the mask of the grd - 22 1 for sea - 23 0 for land - 24 ---Hobs(eta_rho,xi_rho) is the raw depth of the grid - 25 ---rx0max is the target rx0 roughness factor - 26 ---SignConst(eta_rho,xi_rho) matrix of 0, +1, -1 - 27 +1 only bathymetry increase are allowed. - 28 -1 only bathymetry decrease are allowed. - 29 0 increase and decrease are allowed. - 30 (put 0 if you are indifferent) - 31 ---AmpConst(eta_rho,xi_rho) matrix of reals. - 32 coefficient alpha such that the new bathymetry should - 33 satisfy to |h^{new} - h^{raw}| <= alpha h^{raw} - 34 (put 10000 if you are indifferent) - 35 """ - 36 - 37 eta_rho, xi_rho = MSK.shape - 38 - 39 iList, jList, sList, Constant = LP_bathy_tools.GetIJS_rx0(MSK, Hobs, rx0max) - 40 - 41 iListApp, jListApp, sListApp, ConstantApp = LP_bathy_tools.GetIJS_maxamp(MSK, Hobs, AmpConst) - 42 - 43 iList, jList, sList, Constant = LP_bathy_tools.MergeIJS_listings(iList, jList, sList, Constant, iListApp, jListApp, sListApp, ConstantApp) - 44 - 45 iListApp, jListApp, sListApp, ConstantApp = LP_bathy_tools.GetIJS_signs(MSK, SignConst) - 46 - 47 iList, jList, sList, Constant = LP_bathy_tools.MergeIJS_listings(iList, jList, sList, Constant, iListApp, jListApp, sListApp, ConstantApp) - 48 - 49 TotalNbVert = int(MSK.sum()) - 50 - 51 ObjectiveFct = np.zeros((2*TotalNbVert,1)) - 52 for iVert in range(TotalNbVert): - 53 ObjectiveFct[TotalNbVert+iVert,0] = 1 - 54 - 55 ValueFct, ValueVar, testfeasibility = LP_tools.SolveLinearProgram(iList, jList, sList, Constant, ObjectiveFct) - 56 if (testfeasibility == 0): - 57 NewBathy = NaN * np.ones((eta_rho,xi_rho)) - 58 raise ValueError, 'Feasibility test failed. testfeasibility = 0.' - 59 - 60 correctionBathy = np.zeros((eta_rho,xi_rho)) - 61 nbVert = 0 - 62 for iEta in range(eta_rho): - 63 for iXi in range(xi_rho): - 64 if (MSK[iEta,iXi] == 1): - 65 correctionBathy[iEta,iXi] = ValueVar[nbVert] - 66 nbVert = nbVert + 1 - 67 - 68 NewBathy = Hobs + correctionBathy - 69 RMat = bathy_tools.RoughnessMatrix(NewBathy, MSK) - 70 MaxRx0 = RMat.max() - 71 print 'rx0max = ', rx0max, ' MaxRx0 = ', MaxRx0 - 72 - 73 return NewBathy -
74 - 75 - 76 - 77 - 78 -
79 -def LP_smoothing_rx0_heuristic(MSK, Hobs, rx0max, SignConst, AmpConst): -
80 """ - 81 This program perform a linear programming method in order to - 82 optimize the bathymetry for a fixed factor r. - 83 The inequality |H(e)-H(e')| / (H(e)-H(e')) <= r where H(e)=h(e)+dh(e) - 84 can be rewritten as two linear inequalities on dh(e) and dh(e'). - 85 The optimal bathymetry is obtain by minimising the perturbation - 86 P = sum_e(|dh(e)| under the above inequalitie constraintes. - 87 In order to reduce the computation time, an heurastic method is - 88 used. - 89 - 90 Usage: - 91 NewBathy = LP_smoothing_rx0_heuristic(MSK, Hobs, rx0max, SignConst, AmpConst) - 92 - 93 ---MSK(eta_rho,xi_rho) is the mask of the grd - 94 1 for sea - 95 0 for land - 96 ---Hobs(eta_rho,xi_rho) is the raw depth of the grid - 97 ---rx0max is the target rx0 roughness factor - 98 ---SignConst(eta_rho,xi_rho) matrix of 0, +1, -1 - 99 +1 only bathymetry increase are allowed. -100 -1 only bathymetry decrease are allowed. -101 0 increase and decrease are allowed. -102 (put 0 if you are indifferent) -103 ---AmpConst(eta_rho,xi_rho) matrix of reals. -104 coefficient alpha such that the new bathymetry should -105 satisfy to |h^{new} - h^{raw}| <= alpha h^{raw} -106 (put 10000 if you are indifferent) -107 """ -108 -109 -110 # the points that need to be modified -111 MSKbad = LP_bathy_tools.GetBadPoints(MSK, Hobs, rx0max) -112 -113 eta_rho, xi_rho = MSK.shape -114 -115 Kdist = 5 -116 -117 Kbad = np.where(MSKbad == 1) -118 nbKbad = np.size(Kbad,1) -119 ListIdx = np.zeros((eta_rho,xi_rho), dtype=np.int) -120 ListIdx[Kbad] = range(nbKbad) -121 -122 ListEdges = [] -123 nbEdge = 0 -124 for iK in range(nbKbad): -125 iEta, iXi = Kbad[0][iK], Kbad[1][iK] -126 ListNeigh = LP_bathy_tools.Neighborhood(MSK, iEta, iXi, 2*Kdist+1) -127 nbNeigh = np.size(ListNeigh, 0) -128 for iNeigh in range(nbNeigh): -129 iEtaN, iXiN = ListNeigh[iNeigh] -130 if (MSKbad[iEtaN,iXiN] == 1): -131 idx = ListIdx[iEtaN,iXiN] -132 if (idx > iK): -133 nbEdge = nbEdge + 1 -134 ListEdges.append([iK, idx]) -135 -136 ListEdges = np.array(ListEdges) -137 ListVertexStatus = LP_bathy_tools.ConnectedComponent(ListEdges, nbKbad) -138 nbColor = ListVertexStatus.max() -139 -140 NewBathy = Hobs.copy() -141 for iColor in range(1,nbColor+1): -142 print '---------------------------------------------------------------' -143 MSKcolor = np.zeros((eta_rho, xi_rho)) -144 K = np.where(ListVertexStatus == iColor) -145 nbK = np.size(K,1) -146 print 'iColor = ', iColor, ' nbK = ', nbK -147 for iVertex in range(nbKbad): -148 if (ListVertexStatus[iVertex,0] == iColor): -149 iEta, iXi = Kbad[0][iVertex], Kbad[1][iVertex] -150 MSKcolor[iEta, iXi] = 1 -151 ListNeigh = LP_bathy_tools.Neighborhood(MSK, iEta, iXi, Kdist) -152 nbNeigh = np.size(ListNeigh, 0) -153 for iNeigh in range(nbNeigh): -154 iEtaN, iXiN = ListNeigh[iNeigh] -155 MSKcolor[iEtaN,iXiN] = 1 -156 K = np.where(MSKcolor == 1) -157 MSKHobs = np.zeros((eta_rho, xi_rho)) -158 MSKHobs[K] = Hobs[K].copy() -159 TheNewBathy = LP_smoothing_rx0(MSKcolor, MSKHobs, rx0max, SignConst, AmpConst) -160 NewBathy[K] = TheNewBathy[K].copy() -161 -162 print 'Final obtained bathymetry' -163 RMat = bathy_tools.RoughnessMatrix(NewBathy, MSK) -164 MaxRx0 = RMat.max() -165 print 'rx0max = ', rx0max, ' MaxRx0 = ', MaxRx0 -166 -167 return NewBathy -
168 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/bathy_smoother/docs/bathy_smoother.LP_bathy_tools-module.html b/bathy_smoother/docs/bathy_smoother.LP_bathy_tools-module.html deleted file mode 100644 index 02a1948..0000000 --- a/bathy_smoother/docs/bathy_smoother.LP_bathy_tools-module.html +++ /dev/null @@ -1,254 +0,0 @@ - - - - - bathy_smoother.LP_bathy_tools - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package bathy_smoother :: - Module LP_bathy_tools - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module LP_bathy_tools

source code

- - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
GetIJS_rx0(MSK, - DEP, - r) - source code - -
- -
-   - - - - - - -
GetIJS_maxamp(MSK, - DEP, - AmpConst) - source code - -
- -
-   - - - - - - -
GetIJS_signs(MSK, - SignConst) - source code - -
- -
-   - - - - - - -
MergeIJS_listings(iList1, - jList1, - sList1, - Constant1, - iList2, - jList2, - sList2, - Constant2) - source code - -
- -
-   - - - - - - -
GetBadPoints(MSK, - DEP, - rx0max) - source code - -
- -
-   - - - - - - -
Neighborhood(MSK, - iEta, - iXi, - Kdist) - source code - -
- -
-   - - - - - - -
ConnectedComponent(ListEdges, - nbVert)
- compute the vector of connected component belonging using a - representation and an algorithm well suited for sparse graphs.
- source code - -
- -
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/bathy_smoother/docs/bathy_smoother.LP_bathy_tools-pysrc.html b/bathy_smoother/docs/bathy_smoother.LP_bathy_tools-pysrc.html deleted file mode 100644 index 4105c0e..0000000 --- a/bathy_smoother/docs/bathy_smoother.LP_bathy_tools-pysrc.html +++ /dev/null @@ -1,541 +0,0 @@ - - - - - bathy_smoother.LP_bathy_tools - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package bathy_smoother :: - Module LP_bathy_tools - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module bathy_smoother.LP_bathy_tools

-
-  1  import numpy as np 
-  2  from ROMS_bathy_smoother import bathy_smoothing 
-  3   
-
4 -def GetIJS_rx0(MSK, DEP, r): -
5 - 6 eta_rho, xi_rho = DEP.shape - 7 print 'eta_rho = ', eta_rho, ' xi_rho = ', xi_rho - 8 - 9 nbVert = 0 - 10 ListCoord = np.zeros((eta_rho, xi_rho)) - 11 for iEta in range(eta_rho): - 12 for iXi in range(xi_rho): - 13 if (MSK[iEta,iXi] == 1): - 14 nbVert = nbVert + 1 - 15 ListCoord[iEta,iXi] = nbVert - 16 - 17 TotalNbVert = nbVert - 18 print 'ListCoord built' - 19 print 'Computing inequalities for r = ', r - 20 - 21 TotalNbConstant = 0 - 22 TotalNbEntry = 0 - 23 for iEta in range(eta_rho-1): - 24 for iXi in range(xi_rho): - 25 if (MSK[iEta,iXi] == 1 and MSK[iEta+1,iXi] == 1): - 26 TotalNbConstant = TotalNbConstant + 2 - 27 TotalNbEntry = TotalNbEntry + 4 - 28 - 29 for iEta in range(eta_rho): - 30 for iXi in range(xi_rho-1): - 31 if (MSK[iEta,iXi] == 1 and MSK[iEta,iXi+1] == 1): - 32 TotalNbConstant = TotalNbConstant + 2 - 33 TotalNbEntry = TotalNbEntry + 4 - 34 - 35 TotalNbConstant = TotalNbConstant + 2 * TotalNbVert - 36 TotalNbEntry = TotalNbEntry + 4 * TotalNbVert - 37 - 38 Constant = np.zeros((TotalNbConstant,1)) - 39 iList = np.zeros((TotalNbEntry,1)) - 40 jList=np.zeros((TotalNbEntry,1)) - 41 sList=np.zeros((TotalNbEntry,1)) - 42 - 43 nbConst=0; - 44 nbEntry=0; - 45 for iEta in range(eta_rho-1): - 46 for iXi in range(xi_rho): - 47 if (MSK[iEta,iXi] == 1 and MSK[iEta+1,iXi] == 1): - 48 idx1 = ListCoord[iEta,iXi] - 49 idx2 = ListCoord[iEta+1,iXi] - 50 - 51 CST = (1+r) * DEP[iEta+1,iXi] + (-1+r) * DEP[iEta,iXi] - 52 Constant[nbConst,0] = CST - 53 nbConst = nbConst + 1 - 54 iList[nbEntry,0] = nbConst - 55 jList[nbEntry,0] = idx2 - 56 sList[nbEntry,0] = -1-r - 57 nbEntry = nbEntry + 1 - 58 iList[nbEntry,0] = nbConst - 59 jList[nbEntry,0] = idx1 - 60 sList[nbEntry,0] = 1-r - 61 nbEntry = nbEntry + 1 - 62 - 63 CST = (1+r) * DEP[iEta,iXi] + (-1+r) * DEP[iEta+1,iXi] - 64 Constant[nbConst,0] = CST - 65 nbConst = nbConst + 1 - 66 iList[nbEntry,0] = nbConst - 67 jList[nbEntry,0] = idx1 - 68 sList[nbEntry,0] = -r-1 - 69 nbEntry = nbEntry + 1 - 70 iList[nbEntry,0] = nbConst - 71 jList[nbEntry,0] = idx2 - 72 sList[nbEntry,0] = 1-r - 73 nbEntry = nbEntry + 1 - 74 - 75 print 'Inequalities for dh(iEta,iXi) and dh(iEta+1,iXi)' - 76 - 77 for iEta in range(eta_rho): - 78 for iXi in range(xi_rho-1): - 79 if (MSK[iEta,iXi] == 1 and MSK[iEta, iXi+1] == 1): - 80 idx1 = ListCoord[iEta,iXi] - 81 idx2 = ListCoord[iEta,iXi+1] - 82 - 83 CST = (1+r) * DEP[iEta,iXi+1] + (r-1) * DEP[iEta,iXi] - 84 Constant[nbConst,0] = CST - 85 nbConst = nbConst + 1 - 86 iList[nbEntry,0] = nbConst - 87 jList[nbEntry,0] = idx2 - 88 sList[nbEntry,0] = -r-1 - 89 nbEntry = nbEntry + 1 - 90 iList[nbEntry,0] = nbConst - 91 jList[nbEntry,0] = idx1 - 92 sList[nbEntry,0] = 1-r - 93 nbEntry = nbEntry + 1 - 94 - 95 CST = (1+r) * DEP[iEta,iXi] + (r-1) * DEP[iEta,iXi+1] - 96 Constant[nbConst,0] = CST - 97 nbConst = nbConst + 1 - 98 iList[nbEntry,0] = nbConst - 99 jList[nbEntry,0] = idx1 -100 sList[nbEntry,0] = -r-1 -101 nbEntry = nbEntry + 1 -102 iList[nbEntry,0] = nbConst -103 jList[nbEntry,0] = idx2 -104 sList[nbEntry,0] = 1-r -105 nbEntry = nbEntry + 1 -106 -107 print 'Inequalities for dh(iEta,iXi) and dh(iEta,iXi+1)' -108 -109 for iEta in range(eta_rho): -110 for iXi in range(xi_rho): -111 if (MSK[iEta,iXi] == 1): -112 idx = ListCoord[iEta,iXi] -113 -114 Constant[nbConst,0] = 0 -115 nbConst = nbConst + 1 -116 iList[nbEntry,0] = nbConst -117 jList[nbEntry,0] = TotalNbVert + idx -118 sList[nbEntry,0] = -1 -119 nbEntry = nbEntry + 1 -120 iList[nbEntry,0] = nbConst -121 jList[nbEntry,0] = idx -122 sList[nbEntry,0] = 1 -123 nbEntry = nbEntry + 1 -124 -125 Constant[nbConst,0] = 0 -126 nbConst = nbConst + 1 -127 iList[nbEntry,0] = nbConst -128 jList[nbEntry,0] = TotalNbVert+idx -129 sList[nbEntry,0] = -1 -130 nbEntry = nbEntry + 1 -131 iList[nbEntry,0] = nbConst -132 jList[nbEntry,0] = idx -133 sList[nbEntry,0] = -1 -134 nbEntry = nbEntry + 1 -135 -136 print 'Inequalities dh <= ad and -dh <= ad' -137 -138 print 'rx0: nbEntry = ', nbEntry, ' nbConst = ', nbConst -139 print ' ' -140 -141 if (abs(nbEntry - TotalNbEntry) > 0): -142 raise ValueError, 'We have a coding inconsistency for nbEntry. Please correct' -143 -144 if (abs(nbConst - TotalNbConstant) > 0): -145 raise ValueError, 'We have a coding inconsistency for nbConst. Please correct' -146 -147 -148 return iList, jList, sList, Constant -
149 -150 -151 -
152 -def GetIJS_maxamp(MSK, DEP, AmpConst): -
153 -154 -155 eta_rho, xi_rho = DEP.shape -156 print 'eta_rho = ', eta_rho, ' xi_rho = ', xi_rho -157 -158 nbVert = 0 -159 ListCoord = np.zeros((eta_rho, xi_rho)) -160 for iEta in range(eta_rho): -161 for iXi in range(xi_rho): -162 if (MSK[iEta,iXi] == 1): -163 nbVert = nbVert + 1 -164 ListCoord[iEta,iXi] = nbVert -165 -166 TotalNbConstant = 0 -167 TotalNbEntry = 0 -168 for iEta in range(eta_rho): -169 for iXi in range(xi_rho): -170 if (MSK[iEta,iXi] == 1): -171 alpha = AmpConst[iEta,iXi] -172 if (alpha < 9999): -173 TotalNbConstant = TotalNbConstant + 2 -174 TotalNbEntry = TotalNbEntry + 2 -175 -176 nbConst = 0 -177 nbEntry = 0 -178 Constant = np.zeros((TotalNbConstant,1)) -179 iList = np.zeros((TotalNbEntry,1)) -180 jList = np.zeros((TotalNbEntry,1)) -181 sList = np.zeros((TotalNbEntry,1)) -182 -183 for iEta in range(eta_rho): -184 for iXi in range(xi_rho): -185 if (MSK[iEta,iXi] == 1): -186 idx = ListCoord[iEta,iXi] -187 alpha = AmpConst[iEta,iXi] -188 -189 if (alpha < 9999): -190 Constant[nbConst,0] = alpha * DEP[iEta,iXi] -191 iList[nbEntry,0] = nbConst + 1 -192 jList[nbEntry,0] = idx -193 sList[nbEntry,0] = -1 -194 nbConst = nbConst + 1 -195 nbEntry = nbEntry + 1 -196 -197 Constant[nbConst,0] = alpha * DEP[iEta,iXi] -198 iList[nbEntry,0] = nbConst + 1 -199 jList[nbEntry,0] = idx -200 sList[nbEntry,0] = 1 -201 nbConst = nbConst + 1 -202 nbEntry = nbEntry + 1 -203 -204 print 'Inequalities |h^{new} - h^{old}| <= alpha h^{old}' -205 print 'maxamp: nbEntry = ', nbEntry, ' nbConst = ', nbConst -206 print ' ' -207 -208 if (abs(nbEntry - TotalNbEntry) > 0): -209 raise ValueError, 'We have a coding inconsistency for nbEntry. Please correct' -210 -211 if (abs(nbConst - TotalNbConstant) > 0): -212 raise ValueError, 'We have a coding inconsistency for nbConst. Please correct' -213 -214 -215 return iList, jList, sList, Constant -
216 -217 -218 -
219 -def GetIJS_signs(MSK, SignConst): -
220 -221 eta_rho, xi_rho = MSK.shape -222 print 'eta_rho = ', eta_rho, ' xi_rho = ', xi_rho -223 -224 nbVert = 0 -225 ListCoord = np.zeros((eta_rho, xi_rho)) -226 for iEta in range(eta_rho): -227 for iXi in range(xi_rho): -228 if (MSK[iEta,iXi] == 1): -229 nbVert = nbVert + 1 -230 ListCoord[iEta,iXi] = nbVert -231 -232 TotalNbConstant = 0 -233 TotalNbEntry = 0 -234 for iEta in range(eta_rho): -235 for iXi in range(xi_rho): -236 if (MSK[iEta,iXi] == 1 and SignConst[iEta,iXi] != 0): -237 TotalNbConstant = TotalNbConstant + 1 -238 TotalNbEntry = TotalNbEntry + 1 -239 -240 nbConst = 0 -241 nbEntry = 0 -242 Constant = np.zeros((TotalNbConstant,1)) -243 iList = np.zeros((TotalNbEntry,1)) -244 jList = np.zeros((TotalNbEntry,1)) -245 sList = np.zeros((TotalNbEntry,1)) -246 -247 -248 for iEta in range(eta_rho): -249 for iXi in range(xi_rho): -250 if (MSK[iEta,iXi] == 1 and SignConst[iEta,iXi] != 0): -251 idx = ListCoord[iEta,iXi] -252 -253 Constant[nbConst,0] = 0 -254 nbConst = nbConst + 1 -255 iList[nbEntry,0] = nbConst -256 jList[nbEntry,0] = idx -257 if (SignConst[iEta,iXi] == 1): -258 sList[nbEntry,0] = -1 -259 elif (SignConst[iEta, iXi] == -1): -260 sList[nbEntry,0] = 1 -261 else: -262 raise ValueError, 'Wrong assigning please check SignConst' -263 nbEntry = nbEntry + 1 -264 -265 print 'Inequalities dh >= 0 or dh <= 0' -266 print 'signs: nbEntry = ', nbEntry, ' nbConst = ', nbConst -267 print ' ' -268 -269 if (abs(nbEntry - TotalNbEntry) > 0): -270 raise ValueError, 'We have a coding inconsistency for nbEntry. Please correct' -271 -272 if (abs(nbConst - TotalNbConstant) > 0): -273 raise ValueError, 'We have a coding inconsistency for nbConst. Please correct' -274 -275 -276 return iList, jList, sList, Constant -
277 -278 -279 -
280 -def MergeIJS_listings(iList1, jList1, sList1, Constant1, iList2, jList2, sList2, Constant2): -
281 -282 # Suppose we have two sets of inequalities for two linear programs -283 # with the same set of variables presented in sparse form. -284 # The two descriptions are merge. -285 -286 nbConst1 = Constant1.shape[0] -287 nbConst2 = Constant2.shape[0] -288 nbEnt1 = iList1.shape[0] -289 nbEnt2 = iList2.shape[0] -290 -291 Constant = np.zeros((nbConst1+nbConst2,1)) -292 iList = np.zeros((nbEnt1+nbEnt2,1)) -293 jList = np.zeros((nbEnt1+nbEnt2,1)) -294 sList = np.zeros((nbEnt1+nbEnt2,1)) -295 -296 for iCons in range(nbConst1): -297 Constant[iCons,0] = Constant1[iCons,0] -298 -299 for iCons in range(nbConst2): -300 Constant[nbConst1+iCons,0] = Constant2[iCons,0] -301 -302 for iEnt in range(nbEnt1): -303 iList[iEnt,0] = iList1[iEnt,0] -304 jList[iEnt,0] = jList1[iEnt,0] -305 sList[iEnt,0] = sList1[iEnt,0] -306 -307 for iEnt in range(nbEnt2): -308 iList[nbEnt1+iEnt,0] = nbConst1 + iList2[iEnt,0] -309 jList[nbEnt1+iEnt,0] = jList2[iEnt,0] -310 sList[nbEnt1+iEnt,0] = sList2[iEnt,0] -311 -312 return iList, jList, sList, Constant -
313 -314 -
315 -def GetBadPoints(MSK, DEP, rx0max): -
316 -317 RetBathy = bathy_smoothing.smoothing_Positive_rx0(MSK, DEP, rx0max) -318 K1 = np.where(RetBathy != DEP) -319 -320 eta_rho, xi_rho = MSK.shape -321 MSKbad = np.zeros((eta_rho,xi_rho)) -322 MSKbad[K1] = 1 -323 -324 return MSKbad -
325 -326 -
327 -def Neighborhood(MSK, iEta, iXi, Kdist): -
328 -329 eta_rho, xi_rho = MSK.shape -330 MaxSiz = (2 * Kdist + 1) * (2 * Kdist + 1) -331 ListNeigh = np.zeros((MaxSiz,2), dtype=np.int) -332 ListStatus = -1 * np.ones((MaxSiz,1), dtype=np.int) -333 ListKeys = np.zeros((MaxSiz,1), dtype=np.int) -334 -335 eKey = iEta + (eta_rho+1) * iXi -336 ListNeigh[0,0] = iEta -337 ListNeigh[0,1] = iXi -338 ListStatus[0,0] = 0 -339 ListKeys[0,0] = eKey -340 nbPt = 1 -341 -342 List4dir = np.array([[1, 0], -343 [0, 1], -344 [-1, 0], -345 [0, -1]]) -346 -347 for iK in range(1,Kdist+1): -348 nbPtOld = nbPt -349 for iPt in range(nbPtOld): -350 if (ListStatus[iPt,0] == iK-1): -351 iEta = ListNeigh[iPt,0] -352 iXi = ListNeigh[iPt,1] -353 for ineigh in range(4): -354 iEtaN = iEta + List4dir[ineigh,0] -355 iXiN = iXi + List4dir[ineigh,1] -356 if (iEtaN <= eta_rho-1 and iEtaN >= 0 and iXiN <= xi_rho-1 \ -357 and iXiN >= 0 and MSK[iEtaN,iXiN] == 1): -358 eKeyN = iEtaN + (eta_rho+1)*iXiN -359 Kf = np.where(ListKeys == eKeyN) -360 nbKf = np.size(Kf,1) -361 if (nbKf == 0): -362 ListNeigh[nbPt,0] = iEtaN -363 ListNeigh[nbPt,1] = iXiN -364 ListStatus[nbPt,0] = iK -365 ListKeys[nbPt,0] = eKeyN -366 nbPt = nbPt + 1 -367 -368 ListNeighRet = ListNeigh[1:nbPt,:] -369 -370 return ListNeighRet -
371 -372 -
373 -def ConnectedComponent(ListEdges, nbVert): -
374 """ -375 compute the vector of connected component belonging -376 using a representation and an algorithm well suited -377 for sparse graphs. -378 """ -379 -380 nbEdge = np.size(ListEdges, 0) -381 ListDegree = np.zeros((nbVert,1), dtype=np.int) -382 ListAdjacency = np.zeros((nbVert,10000), dtype=np.int) -383 -384 for iEdge in range(nbEdge): -385 eVert = ListEdges[iEdge,0] -386 fVert = ListEdges[iEdge,1] -387 eDeg = ListDegree[eVert,0] + 1 -388 fDeg = ListDegree[fVert,0] + 1 -389 ListDegree[eVert,0] = eDeg -390 ListDegree[fVert,0] = fDeg -391 ListAdjacency[eVert,eDeg-1] = fVert -392 ListAdjacency[fVert,fDeg-1] = eVert -393 -394 -395 MaxDeg = ListDegree.max() -396 ListAdjacency = ListAdjacency[:,:MaxDeg] -397 -398 ListVertexStatus = np.zeros((nbVert,1)) -399 ListHot = np.zeros((nbVert,1)) -400 ListNotDone = np.ones((nbVert,1)) -401 -402 iComp = 0 -403 while(1): -404 H = np.where(ListNotDone == 1) -405 nb = np.size(H, 1) -406 if (nb == 0): -407 break; -408 -409 iComp = iComp + 1 -410 ListVertexStatus[H[0][0],0] = iComp -411 ListHot[H[0][0],0] = 1 -412 while(1): -413 H = np.where(ListHot == 1) -414 ListNotDone[H] = 0 -415 ListNewHot = np.zeros((nbVert,1)) -416 for iH in range(np.size(H, 1)): -417 eVert = H[0][iH] -418 for iV in range(ListDegree[eVert, 0]): -419 ListNewHot[ListAdjacency[eVert, iV],0] = 1 -420 -421 ListHot = ListNotDone * ListNewHot -422 SumH = sum(ListHot) -423 if (SumH == 0): -424 break -425 -426 H2 = np.where(ListHot == 1) -427 ListVertexStatus[H2] = iComp -428 -429 -430 return ListVertexStatus -
431 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/bathy_smoother/docs/bathy_smoother.LP_tools-module.html b/bathy_smoother/docs/bathy_smoother.LP_tools-module.html deleted file mode 100644 index 3eaaa10..0000000 --- a/bathy_smoother/docs/bathy_smoother.LP_tools-module.html +++ /dev/null @@ -1,1070 +0,0 @@ - - - - - bathy_smoother.LP_tools - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package bathy_smoother :: - Module LP_tools - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module LP_tools

source code

- - - - - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
WriteLinearProgram(FileName, - iList, - jList, - sList, - Constant, - ObjectiveFct) - source code - -
- -
-   - - - - - - -
SolveLinearProgram(iList, - jList, - sList, - Constant, - ObjectiveFct) - source code - -
- -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - -
Variables[hide private]
-
-   - - ANTIDEGEN_BOUNDFLIP = 512 -
-   - - ANTIDEGEN_COLUMNCHECK = 2 -
-   - - ANTIDEGEN_DURINGBB = 128 -
-   - - ANTIDEGEN_DYNAMIC = 64 -
-   - - ANTIDEGEN_FIXEDVARS = 1 -
-   - - ANTIDEGEN_INFEASIBLE = 32 -
-   - - ANTIDEGEN_LOSTFEAS = 16 -
-   - - ANTIDEGEN_NONE = 0 -
-   - - ANTIDEGEN_NUMFAILURE = 8 -
-   - - ANTIDEGEN_RHSPERTURB = 256 -
-   - - ANTIDEGEN_STALLING = 4 -
-   - - BRANCH_AUTOMATIC = 2 -
-   - - BRANCH_CEILING = 0 -
-   - - BRANCH_FLOOR = 1 -
-   - - CRASH_LEASTDEGENERATE = 3 -
-   - - CRASH_MOSTFEASIBLE = 2 -
-   - - CRASH_NONE = 0 -
-   - - CRITICAL = 1 -
-   - - DEGENERATE = 4 -
-   - - DETAILED = 5 -
-   - - EQ = 3 -
-   - - FEASFOUND = 12 -
-   - - FR = 0 -
-   - - FULL = 6 -
-   - - GE = 2 -
-   - - IMPORTANT = 3 -
-   - - IMPROVE_BBSIMPLEX = 8 -
-   - - IMPROVE_DUALFEAS = 2 -
-   - - IMPROVE_NONE = 0 -
-   - - IMPROVE_SOLUTION = 1 -
-   - - IMPROVE_THETAGAP = 4 -
-   - - INFEASIBLE = 2 -
-   - - Infinite = 1e+30 -
-   - - LE = 1 -
-   - - MSG_LPFEASIBLE = 8 -
-   - - MSG_LPOPTIMAL = 16 -
-   - - MSG_MILPBETTER = 512 -
-   - - MSG_MILPEQUAL = 256 -
-   - - MSG_MILPFEASIBLE = 128 -
-   - - MSG_PRESOLVE = 1 -
-   - - NEUTRAL = 0 -
-   - - NODE_AUTOORDER = 8192 -
-   - - NODE_BRANCHREVERSEMODE = 16 -
-   - - NODE_BREADTHFIRSTMODE = 4096 -
-   - - NODE_DEPTHFIRSTMODE = 128 -
-   - - NODE_DYNAMICMODE = 1024 -
-   - - NODE_FIRSTSELECT = 0 -
-   - - NODE_FRACTIONSELECT = 3 -
-   - - NODE_GAPSELECT = 1 -
-   - - NODE_GREEDYMODE = 32 -
-   - - NODE_GUBMODE = 512 -
-   - - NODE_PSEUDOCOSTMODE = 64 -
-   - - NODE_PSEUDOCOSTSELECT = 4 -
-   - - NODE_PSEUDONONINTSELECT = 5 -
-   - - NODE_PSEUDORATIOSELECT = 6 -
-   - - NODE_RANDOMIZEMODE = 256 -
-   - - NODE_RANGESELECT = 2 -
-   - - NODE_RCOSTFIXING = 16384 -
-   - - NODE_RESTARTMODE = 2048 -
-   - - NODE_STRONGINIT = 32768 -
-   - - NODE_USERSELECT = 7 -
-   - - NODE_WEIGHTREVERSEMODE = 8 -
-   - - NOFEASFOUND = 13 -
-   - - NOMEMORY = -2 -
-   - - NORMAL = 4 -
-   - - NUMFAILURE = 5 -
-   - - OPTIMAL = 0 -
-   - - PRESOLVE_BOUNDS = 262144 -
-   - - PRESOLVE_COLDOMINATE = 16384 -
-   - - PRESOLVE_COLFIXDUAL = 131072 -
-   - - PRESOLVE_COLS = 2 -
-   - - PRESOLVE_DUALS = 524288 -
-   - - PRESOLVE_ELIMEQ2 = 256 -
-   - - PRESOLVE_IMPLIEDFREE = 512 -
-   - - PRESOLVE_IMPLIEDSLK = 65536 -
-   - - PRESOLVE_KNAPSACK = 128 -
-   - - PRESOLVE_LINDEP = 4 -
-   - - PRESOLVE_MERGEROWS = 32768 -
-   - - PRESOLVE_NONE = 0 -
-   - - PRESOLVE_PROBEFIX = 2048 -
-   - - PRESOLVE_PROBEREDUCE = 4096 -
-   - - PRESOLVE_REDUCEGCD = 1024 -
-   - - PRESOLVE_REDUCEMIP = 64 -
-   - - PRESOLVE_ROWDOMINATE = 8192 -
-   - - PRESOLVE_ROWS = 1 -
-   - - PRESOLVE_SENSDUALS = 1048576 -
-   - - PRESOLVE_SOS = 32 -
-   - - PRICER_DANTZIG = 1 -
-   - - PRICER_DEVEX = 2 -
-   - - PRICER_FIRSTINDEX = 0 -
-   - - PRICER_STEEPESTEDGE = 3 -
-   - - PRICE_ADAPTIVE = 32 -
-   - - PRICE_AUTOPARTIAL = 256 -
-   - - PRICE_HARRISTWOPASS = 4096 -
-   - - PRICE_LOOPALTERNATE = 2048 -
-   - - PRICE_LOOPLEFT = 1024 -
-   - - PRICE_MULTIPLE = 8 -
-   - - PRICE_PARTIAL = 16 -
-   - - PRICE_PRIMALFALLBACK = 4 -
-   - - PRICE_RANDOMIZE = 128 -
-   - - PRICE_TRUENORMINIT = 16384 -
-   - - PROCBREAK = 11 -
-   - - PROCFAIL = 10 -
-   - - SCALE_COLSONLY = 1024 -
-   - - SCALE_CURTISREID = 7 -
-   - - SCALE_DYNUPDATE = 256 -
-   - - SCALE_EQUILIBRATE = 64 -
-   - - SCALE_EXTREME = 1 -
-   - - SCALE_GEOMETRIC = 4 -
-   - - SCALE_INTEGERS = 128 -
-   - - SCALE_LOGARITHMIC = 16 -
-   - - SCALE_MEAN = 3 -
-   - - SCALE_NONE = 0 -
-   - - SCALE_POWER2 = 32 -
-   - - SCALE_QUADRATIC = 8 -
-   - - SCALE_RANGE = 2 -
-   - - SCALE_ROWSONLY = 512 -
-   - - SCALE_USERWEIGHT = 31 -
-   - - SEVERE = 2 -
-   - - SIMPLEX_DUAL_DUAL = 10 -
-   - - SIMPLEX_DUAL_PRIMAL = 6 -
-   - - SIMPLEX_PRIMAL_DUAL = 9 -
-   - - SIMPLEX_PRIMAL_PRIMAL = 5 -
-   - - SUBOPTIMAL = 1 -
-   - - TIMEOUT = 7 -
-   - - UNBOUNDED = 3 -
-   - - USERABORT = 6 -
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/bathy_smoother/docs/bathy_smoother.LP_tools-pysrc.html b/bathy_smoother/docs/bathy_smoother.LP_tools-pysrc.html deleted file mode 100644 index 4d51485..0000000 --- a/bathy_smoother/docs/bathy_smoother.LP_tools-pysrc.html +++ /dev/null @@ -1,243 +0,0 @@ - - - - - bathy_smoother.LP_tools - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package bathy_smoother :: - Module LP_tools - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module bathy_smoother.LP_tools

-
-  1  import numpy as np 
-  2  from numpy.random import random 
-  3  from time import localtime 
-  4  import os 
-  5  try: 
-  6      from lpsolve55 import * 
-  7  except: 
-  8      print 'lpsolve55.so not found.' 
-  9      print 'Linear programming method will not be available.' 
- 10   
- 11   
- 12   
-
13 -def WriteLinearProgram(FileName, iList, jList, sList, Constant, ObjectiveFct): -
14 - 15 # Do not remove the test of feasibility ! - 16 # It is a preliminary check and it is actually useful. - 17 - 18 nbVar = ObjectiveFct.shape[0] - 19 nbConst = Constant.shape[0] - 20 print 'Write linear program' - 21 print 'nbvar = ', nbVar, ' nbConst = ', nbConst - 22 print ' ' - 23 - 24 f = open(FileName,'w') - 25 - 26 # write the function to minimise - 27 # we want to minimize the perturbation P = sum_e(|dh(e)| - 28 # as the absolute value is not a linear function, we use a trick: - 29 # we introduce an additional variable ad(e) satisfying +/- dh(e) <= ad(e) - 30 # and we minimize sum_e(ad(e)) - 31 f.write('min: ') - 32 for iVar in range(nbVar): - 33 eVal = ObjectiveFct[iVar,0] - 34 if (eVal != 0): - 35 if (eVal > 0): - 36 add='+' - 37 else: - 38 add='' - 39 - 40 string = '%s%f X%d ' %(add,eVal,iVar+1) - 41 f.write(string) - 42 else: - 43 string = '+0 X%d ' %(iVar+1) - 44 f.write(string) - 45 - 46 f.write(';\n') - 47 f.write('\n') - 48 - 49 #write the inequality constraintes - 50 tolCrit = 1e-6 - 51 for iConst in range(nbConst): - 52 H = np.where(iList == iConst+1)[0] - 53 nbH = H.shape[0] - 54 if (nbH == 0): - 55 if (Constant[iConst,0] < -tolCrit): - 56 testfeasibility = 0 - 57 return testfeasibility - 58 - 59 else: - 60 string = 'row%s: ' %str(iConst+1) - 61 f.write(string) - 62 for iH in range(nbH): - 63 jL = jList[H[iH],0] - 64 sL = sList[H[iH],0] - 65 string = '%.2f X%d ' %(sL, jL) - 66 if (sL > 0): - 67 add='+' - 68 else: - 69 add='' - 70 - 71 string = '%s%s' %(add, string) - 72 f.write(string) - 73 - 74 string = '<= %.8e ;\n' %Constant[iConst,0] - 75 f.write(string) - 76 - 77 f.write('\n') - 78 - 79 # the free command does not seem to work as advertised - 80 f.write('free') - 81 for iVar in range(nbVar): - 82 if (iVar+1 > 1): - 83 f.write(',') - 84 - 85 string = ' X%d' %(iVar+1) - 86 f.write(string) - 87 - 88 f.write(';\n') - 89 f.close() - 90 testfeasibility = 1 - 91 - 92 - 93 return testfeasibility -
94 - 95 - 96 -
97 -def SolveLinearProgram(iList, jList, sList, Constant, ObjectiveFct): -
98 - 99 nbVar = ObjectiveFct.shape[0] -100 nbConstraint = Constant.shape[0] -101 print 'Solving a linear program of ', nbVar, ' variables and ', nbConstraint, ' Constraints' -102 -103 while(1): -104 H = localtime() -105 V0 = H[3] -106 V1 = H[4] -107 V2 = np.floor(H[5]) -108 V3 = np.ceil(10 * random()) -109 Prefix = '/tmp/lp_%s_%s_%s_%s' %(str(V0),str(V1),str(V2),str(V3)) -110 -111 FileInput = '%s_input.lp' %Prefix -112 FileOutput = '%s_output.lp' %Prefix -113 if (os.path.exists(FileInput) is False and os.path.exists(FileOutput) is False): -114 break -115 -116 print 'We failed with FileInput = ', FileInput -117 -118 testfeasibility = WriteLinearProgram(FileInput, iList, jList, sList, Constant, ObjectiveFct) -119 if (testfeasibility == 0): -120 raise ValueError, 'Feasibility test failed. testfeasibility = 0.' -121 -122 print 'Linear program written in FileOutput=', FileOutput -123 -124 lp_handle = lpsolve('read_lp_file', FileInput) -125 result = lpsolve('solve', lp_handle) -126 obj, ValueVar, ValueFct, testfeasibility = lpsolve('get_solution', result) -127 lpsolve('delete_lp', result) -128 -129 print 'Linear program solved' -130 -131 -132 return ValueFct, ValueVar, testfeasibility -
133 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/bathy_smoother/docs/bathy_smoother.bathy_smoothing-module.html b/bathy_smoother/docs/bathy_smoother.bathy_smoothing-module.html deleted file mode 100644 index c2916d3..0000000 --- a/bathy_smoother/docs/bathy_smoother.bathy_smoothing-module.html +++ /dev/null @@ -1,488 +0,0 @@ - - - - - bathy_smoother.bathy_smoothing - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package bathy_smoother :: - Module bathy_smoothing - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module bathy_smoothing

source code

- - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
smoothing_Positive_rx0(MSK, - Hobs, - rx0max)
- This program use the direct iterative method from Martinho and Batteen (2006) -The bathymetry is optimized for a given rx0 factor by increasing it.
- source code - -
- -
-   - - - - - - -
smoothing_Negative_rx0(MSK, - Hobs, - rx0max)
- This program use an opposite methode to the direct iterative method from -Martinho and Batteen (2006).
- source code - -
- -
-   - - - - - - -
smoothing_PositiveVolume_rx0(MSK, - Hobs, - rx0max, - AreaMatrix)
- This program use the direct iterative method from Martinho and Batteen (2006) -The bathymetry is optimized for a given rx0 factor by increasing it.
- source code - -
- -
-   - - - - - - -
smoothing_NegativeVolume_rx0(MSK, - Hobs, - rx0maxi, - AreaMatrix)
- This program use an opposite methode to the direct iterative method from -Martinho and Batteen (2006).
- source code - -
- -
-   - - - - - - -
smoothing_PlusMinus_rx0(MSK, - Hobs, - rx0max, - AreaMatrix)
- This program use the Mellor-Ezer-Oey method (Mellor et al., 1994).
- source code - -
- -
-   - - - - - - -
smoothing_Laplacian_rx0(MSK, - Hobs, - rx0max)
- This program use Laplacian filter.
- source code - -
- -
- - - - - - -
- - - - - -
Function Details[hide private]
-
- -
- -
- - -
-

smoothing_Positive_rx0(MSK, - Hobs, - rx0max) -

-
source code  -
- -
-
-This program use the direct iterative method from Martinho and Batteen (2006)
-The bathymetry is optimized for a given rx0 factor by increasing it.
-
-Usage:
-RetBathy = smoothing_Positive_rx0(MSK, Hobs, rx0max)
-
----MSK(eta_rho,xi_rho) is the mask of the grid
-     1 for sea
-     0 for land
----Hobs(eta_rho,xi_rho) is the raw depth of the grid
----rx0max is the target rx0 roughness factor
-
-
-
-
-
-
- -
- -
- - -
-

smoothing_Negative_rx0(MSK, - Hobs, - rx0max) -

-
source code  -
- -
-
-This program use an opposite methode to the direct iterative method from 
-Martinho and Batteen (2006). This program optimizes the bathymetry for 
-a given rx0 factor by decreasing it.
-
-Usage:
-RetBathy = smoothing_Negative_rx0(MSK, Hobs, rx0max)
-
----MSK(eta_rho,xi_rho) is the mask of the grid
-     1 for sea
-     0 for land
----Hobs(eta_rho,xi_rho) is the raw depth of the grid
----rx0max is the target rx0 roughness factor
-
-
-
-
-
-
- -
- -
- - -
-

smoothing_PositiveVolume_rx0(MSK, - Hobs, - rx0max, - AreaMatrix) -

-
source code  -
- -
-
-This program use the direct iterative method from Martinho and Batteen (2006)
-The bathymetry is optimized for a given rx0 factor by increasing it. All depth 
-are then multiplied by the coeficient K = Vol_init/Vol_final in order to 
-insure volume conservation.
-
-Usage:
-RetBathy = smoothing_Positive_rx0(MSK, Hobs, rx0max, AreaMatrix)
-
----MSK(eta_rho,xi_rho) is the mask of the grid
-     1 for sea
-     0 for land
----Hobs(eta_rho,xi_rho) is the raw depth of the grid
----rx0max is the target rx0 roughness factor
----AreaMatrix(eta_rho,xi_rho) is the matrix of areas at
-   rho point
-
-
-
-
-
-
- -
- -
- - -
-

smoothing_NegativeVolume_rx0(MSK, - Hobs, - rx0maxi, - AreaMatrix) -

-
source code  -
- -
-
-This program use an opposite methode to the direct iterative method from 
-Martinho and Batteen (2006). This program optimizes the bathymetry for 
-a given rx0 factor by decreasing it. All depth are then multiplied by 
-the coeficient K = Vol_init/Vol_final in order to insure volume conservation.
-
-Usage:
-RetBathy = smoothing_Negative_rx0(MSK, Hobs, rx0max, AreaMatrix)
-
----MSK(eta_rho,xi_rho) is the mask of the grid
-     1 for sea
-     0 for land
----Hobs(eta_rho,xi_rho) is the raw depth of the grid
----rx0max is the target rx0 roughness factor
----AreaMatrix(eta_rho,xi_rho) is the matrix of areas at
-   rho point
-
-
-
-
-
-
- -
- -
- - -
-

smoothing_PlusMinus_rx0(MSK, - Hobs, - rx0max, - AreaMatrix) -

-
source code  -
- -
-
-This program use the Mellor-Ezer-Oey method (Mellor et al., 1994).
-The bathymetry is optimized for a given rx0 factor by doing a sequence
-of increase/decrease at adjacent cells.
-
-Usage:
-RetBathy, HmodifVal, ValueFct = smoothing_PlusMinus_rx0(MSK, Hobs, rx0max, AreaMatrix)
-
----MSK(eta_rho,xi_rho) is the mask of the grid
-     1 for sea
-     0 for land
----Hobs(eta_rho,xi_rho) is the raw depth of the grid
----rx0max is the target rx0 roughness factor
----AreaMatrix(eta_rho,xi_rho) is the matrix of areas at
-   rho-points.
-
-
-
-
-
-
- -
- -
- - -
-

smoothing_Laplacian_rx0(MSK, - Hobs, - rx0max) -

-
source code  -
- -
-
-This program use Laplacian filter.
-The bathymetry is optimized for a given rx0 factor by doing an iterated
-sequence of Laplacian filterings.
-
-Usage:
-RetBathy = smoothing_Laplacian_rx0(MSK, Hobs, rx0max)
-
----MSK(eta_rho,xi_rho) is the mask of the grid
-     1 for sea
-     0 for land
----Hobs(eta_rho,xi_rho) is the raw depth of the grid
----rx0max is the target rx0 roughness factor
-
-
-
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/bathy_smoother/docs/bathy_smoother.bathy_smoothing-pysrc.html b/bathy_smoother/docs/bathy_smoother.bathy_smoothing-pysrc.html deleted file mode 100644 index 418e6a1..0000000 --- a/bathy_smoother/docs/bathy_smoother.bathy_smoothing-pysrc.html +++ /dev/null @@ -1,519 +0,0 @@ - - - - - bathy_smoother.bathy_smoothing - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package bathy_smoother :: - Module bathy_smoothing - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module bathy_smoother.bathy_smoothing

-
-  1  import numpy as np 
-  2  from ROMS_bathy_smoother import bathy_tools 
-  3   
-  4   
-
5 -def smoothing_Positive_rx0(MSK, Hobs, rx0max): -
6 """ - 7 This program use the direct iterative method from Martinho and Batteen (2006) - 8 The bathymetry is optimized for a given rx0 factor by increasing it. - 9 - 10 Usage: - 11 RetBathy = smoothing_Positive_rx0(MSK, Hobs, rx0max) - 12 - 13 ---MSK(eta_rho,xi_rho) is the mask of the grid - 14 1 for sea - 15 0 for land - 16 ---Hobs(eta_rho,xi_rho) is the raw depth of the grid - 17 ---rx0max is the target rx0 roughness factor - 18 """ - 19 - 20 eta_rho, xi_rho = Hobs.shape - 21 - 22 ListNeigh = np.array([[1, 0], - 23 [0, 1], - 24 [-1, 0], - 25 [0, -1]]) - 26 - 27 RetBathy = Hobs.copy() - 28 - 29 nbModif = 0 - 30 tol = 0.000001 - 31 - 32 while(True): - 33 IsFinished = 1 - 34 for iEta in range(eta_rho): - 35 for iXi in range(xi_rho): - 36 if (MSK[iEta,iXi] == 1): - 37 for ineigh in range(4): - 38 iEtaN = iEta + ListNeigh[ineigh,0] - 39 iXiN = iXi + ListNeigh[ineigh,1] - 40 if (iEtaN <= eta_rho-1 and iEtaN >= 0 and iXiN <= xi_rho-1 \ - 41 and iXiN >= 0 and MSK[iEtaN,iXiN] == 1): - 42 LowerBound = RetBathy[iEtaN,iXiN] * (1-rx0max)/(1+rx0max) - 43 if ((RetBathy[iEta,iXi] - LowerBound) < -tol): - 44 IsFinished = 0 - 45 RetBathy[iEta,iXi] = LowerBound - 46 nbModif = nbModif + 1 - 47 - 48 if (IsFinished == 1): - 49 break - 50 - 51 print ' nbModif=', nbModif - 52 - 53 return RetBathy -
54 - 55 - 56 -
57 -def smoothing_Negative_rx0(MSK, Hobs, rx0max): -
58 """ - 59 This program use an opposite methode to the direct iterative method from - 60 Martinho and Batteen (2006). This program optimizes the bathymetry for - 61 a given rx0 factor by decreasing it. - 62 - 63 Usage: - 64 RetBathy = smoothing_Negative_rx0(MSK, Hobs, rx0max) - 65 - 66 ---MSK(eta_rho,xi_rho) is the mask of the grid - 67 1 for sea - 68 0 for land - 69 ---Hobs(eta_rho,xi_rho) is the raw depth of the grid - 70 ---rx0max is the target rx0 roughness factor - 71 """ - 72 - 73 eta_rho, xi_rho = Hobs.shape - 74 - 75 ListNeigh = np.array([[1, 0], - 76 [0, 1], - 77 [-1, 0], - 78 [0, -1]]) - 79 - 80 RetBathy = Hobs.copy() - 81 - 82 nbModif = 0 - 83 tol = 0.000001 - 84 - 85 while(True): - 86 IsFinished = 1 - 87 for iEta in range(eta_rho): - 88 for iXi in range(xi_rho): - 89 if (MSK[iEta, iXi] == 1): - 90 for ineigh in range(4): - 91 iEtaN = iEta + ListNeigh[ineigh,0] - 92 iXiN = iXi + ListNeigh[ineigh,1] - 93 if (iEtaN <= eta_rho-1 and iEtaN >= 0 and iXiN <= xi_rho-1 \ - 94 and iXiN >= 0 and MSK[iEtaN,iXiN] == 1): - 95 UpperBound = RetBathy[iEtaN, iXiN] * (1+rx0max)/(1-rx0max) - 96 if (RetBathy[iEta,iXi] > (UpperBound + tol)): - 97 IsFinished = 0 - 98 RetBathy[iEta, iXi] = UpperBound - 99 nbModif = nbModif + 1 -100 -101 if (IsFinished == 1): -102 break -103 -104 print ' nbModif=', nbModif -105 -106 return RetBathy -
107 -108 -109 -
110 -def smoothing_PositiveVolume_rx0(MSK, Hobs, rx0max, AreaMatrix): -
111 """ -112 This program use the direct iterative method from Martinho and Batteen (2006) -113 The bathymetry is optimized for a given rx0 factor by increasing it. All depth -114 are then multiplied by the coeficient K = Vol_init/Vol_final in order to -115 insure volume conservation. -116 -117 Usage: -118 RetBathy = smoothing_Positive_rx0(MSK, Hobs, rx0max, AreaMatrix) -119 -120 ---MSK(eta_rho,xi_rho) is the mask of the grid -121 1 for sea -122 0 for land -123 ---Hobs(eta_rho,xi_rho) is the raw depth of the grid -124 ---rx0max is the target rx0 roughness factor -125 ---AreaMatrix(eta_rho,xi_rho) is the matrix of areas at -126 rho point -127 """ -128 -129 eta_rho, xi_rho = Hobs.shape -130 -131 ListNeigh = np.array([[1, 0], -132 [0, 1], -133 [-1, 0], -134 [0, -1]]) -135 -136 WorkBathy = Hobs.copy() -137 -138 nbModif = 0 -139 tol = 0.000001 -140 -141 while(True): -142 IsFinished = 1 -143 for iEta in range(eta_rho): -144 for iXi in range(xi_rho): -145 if (MSK[iEta, iXi] == 1): -146 for ineigh in range(4): -147 iEtaN = iEta + ListNeigh[ineigh,0] -148 iXiN = iXi + ListNeigh[ineigh,1] -149 if (iEtaN <= eta_rho-1 and iEtaN >= 0 and iXiN <= xi_rho-1 \ -150 and iXiN >= 0 and MSK[iEtaN,iXiN] == 1): -151 LowerBound = RetBathy[iEtaN, iXiN] * (1-rx0max)/(1+rx0max) -152 if ((WorkBathy[iEta,iXi] - LowerBound) < -tol): -153 IsFinished = 0 -154 WorkBathy[iEta, iXi] = LowerBound -155 nbModif = nbModif + 1 -156 -157 if (IsFinished == 1): -158 break -159 -160 print ' nbModif=', nbModif -161 -162 VolOrig=0 -163 VolWork=0 -164 for iEta in range(eta_rho): -165 for iXi in range(xi_rho): -166 if (MSK[iEta, iXi] == 1): -167 VolOrig = VolOrig + AreaMatrix[iEta,iXi] * Hobs[iEta,iXi] -168 VolWork = VolWork + AreaMatrix[iEta,iXi] * WorkBathy[iEta,iXi] -169 -170 RetBathy = WorkBathy * (VolOrig / VolWork) -171 -172 return RetBathy -
173 -174 -175 -
176 -def smoothing_NegativeVolume_rx0(MSK, Hobs, rx0maxi, AreaMatrix): -
177 """ -178 This program use an opposite methode to the direct iterative method from -179 Martinho and Batteen (2006). This program optimizes the bathymetry for -180 a given rx0 factor by decreasing it. All depth are then multiplied by -181 the coeficient K = Vol_init/Vol_final in order to insure volume conservation. -182 -183 Usage: -184 RetBathy = smoothing_Negative_rx0(MSK, Hobs, rx0max, AreaMatrix) -185 -186 ---MSK(eta_rho,xi_rho) is the mask of the grid -187 1 for sea -188 0 for land -189 ---Hobs(eta_rho,xi_rho) is the raw depth of the grid -190 ---rx0max is the target rx0 roughness factor -191 ---AreaMatrix(eta_rho,xi_rho) is the matrix of areas at -192 rho point -193 """ -194 -195 eta_rho, xi_rho = Hobs.shape -196 -197 ListNeigh = np.array([[1, 0], -198 [0, 1], -199 [-1, 0], -200 [0, -1]]) -201 -202 WorkBathy = Hobs.copy() -203 -204 nbModif = 0 -205 tol = 0.000001 -206 -207 while(True): -208 IsFinished = 1 -209 for iEta in range(eta_rho): -210 for iXi in range(xi_rho): -211 if (MSK[iEta, iXi] == 1): -212 for ineigh in range(4): -213 iEtaN = iEta + ListNeigh[ineigh,0] -214 iXiN = iXi + ListNeigh[ineigh,1] -215 if (iEtaN <= eta_rho-1 and iEtaN >= 0 and iXiN <= xi_rho-1 \ -216 and iXiN >= 0 and MSK[iEtaN,iXiN] == 1): -217 UpperBound = RetBathy[iEtaN, iXiN] * (1+rx0max)/(1-rx0max) -218 if (WorkBathy[iEta,iXi] > (UpperBound + tol)): -219 IsFinished = 0 -220 WorkBathy[iEta, iXi] = UpperBound -221 nbModif = nbModif + 1 -222 -223 if (IsFinished == 1): -224 break -225 -226 print ' nbModif=', nbModif -227 -228 VolOrig=0 -229 VolWork=0 -230 for iEta in range(eta_rho): -231 for iXi in range(xi_rho): -232 if (MSK[iEta, iXi] == 1): -233 VolOrig = VolOrig + AreaMatrix[iEta,iXi] * Hobs[iEta,iXi] -234 VolWork = VolWork + AreaMatrix[iEta,iXi] * WorkBathy[iEta,iXi] -235 -236 RetBathy = WorkBathy * (VolOrig / VolWork) -237 -238 return RetBathy -
239 -240 -241 -
242 -def smoothing_PlusMinus_rx0(MSK, Hobs, rx0max, AreaMatrix): -
243 """ -244 This program use the Mellor-Ezer-Oey method (Mellor et al., 1994). -245 The bathymetry is optimized for a given rx0 factor by doing a sequence -246 of increase/decrease at adjacent cells. -247 -248 Usage: -249 RetBathy, HmodifVal, ValueFct = smoothing_PlusMinus_rx0(MSK, Hobs, rx0max, AreaMatrix) -250 -251 ---MSK(eta_rho,xi_rho) is the mask of the grid -252 1 for sea -253 0 for land -254 ---Hobs(eta_rho,xi_rho) is the raw depth of the grid -255 ---rx0max is the target rx0 roughness factor -256 ---AreaMatrix(eta_rho,xi_rho) is the matrix of areas at -257 rho-points. -258 """ -259 -260 eta_rho, xi_rho = Hobs.shape -261 -262 ListNeigh = np.array([[1, 0], -263 [0, 1], -264 [-1, 0], -265 [0, -1]]) -266 -267 RetBathy = Hobs.copy() -268 -269 HmodifVal = 0 -270 TheMultiplier = (1 - rx0max) / (1 + rx0max) -271 tol = 0.000001 -272 ValueFct = 0 -273 -274 while(True): -275 IsFinished = 1 -276 for iEta in range(eta_rho): -277 for iXi in range(xi_rho): -278 if (MSK[iEta, iXi] == 1): -279 Area = AreaMatrix[iEta, iXi] -280 for ineigh in range(4): -281 iEtaN = iEta + ListNeigh[ineigh,0] -282 iXiN = iXi + ListNeigh[ineigh,1] -283 if (iEtaN <= eta_rho-1 and iEtaN >= 0 and iXiN <= xi_rho-1 \ -284 and iXiN >= 0 and MSK[iEtaN,iXiN] == 1): -285 AreaN = AreaMatrix[iEtaN,iXiN] -286 LowerBound = RetBathy[iEtaN,iXiN] * TheMultiplier -287 if ((RetBathy[iEta,iXi] - LowerBound) < -tol): -288 IsFinished = 0 -289 h = (TheMultiplier * RetBathy[iEtaN,iXiN] - RetBathy[iEta,iXi]) \ -290 / (AreaN + TheMultiplier * Area) -291 RetBathy[iEta,iXi] = RetBathy[iEta,iXi] + AreaN * h -292 RetBathy[iEtaN,iXiN] = RetBathy[iEtaN,iXiN] - Area * h -293 HmodifVal = HmodifVal + abs(h) -294 ValueFct = ValueFct + abs(h) * (Area + AreaN) -295 -296 if (IsFinished == 1): -297 break -298 -299 H = AreaMatrix * Hobs * MSK -300 TheBathymetry1 = H.sum() -301 H = AreaMatrix * RetBathy * MSK -302 TheBathymetry2 = H.sum() -303 DeltaBathymetry = TheBathymetry1 - TheBathymetry2 -304 print 'DeltaBathymetry = ', DeltaBathymetry -305 -306 return RetBathy, HmodifVal, ValueFct -
307 -308 -
309 -def smoothing_Laplacian_rx0(MSK, Hobs, rx0max): -
310 """ -311 This program use Laplacian filter. -312 The bathymetry is optimized for a given rx0 factor by doing an iterated -313 sequence of Laplacian filterings. -314 -315 Usage: -316 RetBathy = smoothing_Laplacian_rx0(MSK, Hobs, rx0max) -317 -318 ---MSK(eta_rho,xi_rho) is the mask of the grid -319 1 for sea -320 0 for land -321 ---Hobs(eta_rho,xi_rho) is the raw depth of the grid -322 ---rx0max is the target rx0 roughness factor -323 """ -324 -325 eta_rho, xi_rho = Hobs.shape -326 -327 ListNeigh = np.array([[1, 0], -328 [0, 1], -329 [-1, 0], -330 [0, -1]]) -331 -332 RetBathy = Hobs.copy() -333 -334 tol = 0.00001 -335 WeightMatrix = np.zeros((eta_rho, xi_rho)) -336 for iEta in range(eta_rho): -337 for iXi in range(xi_rho): -338 WeightSum = 0 -339 for ineigh in range(4): -340 iEtaN = iEta + ListNeigh[ineigh,0] -341 iXiN = iXi + ListNeigh[ineigh,1] -342 if (iEtaN <= eta_rho-1 and iEtaN >= 0 and iXiN <= xi_rho-1 \ -343 and iXiN >= 0 and MSK[iEtaN,iXiN] == 1): -344 WeightSum = WeightSum + 1 -345 -346 WeightMatrix[iEta,iXi] = WeightSum -347 -348 Iter = 1 -349 NumberDones = np.zeros((eta_rho, xi_rho)) -350 while(True): -351 RoughMat = bathy_tools.RoughnessMatrix(RetBathy, MSK) -352 Kbefore = np.where(RoughMat > rx0max) -353 nbPtBefore = np.size(Kbefore, 1) -354 realR = RoughMat.max() -355 TheCorrect = np.zeros((eta_rho,xi_rho)) -356 IsFinished = 1 -357 nbPointMod = 0 -358 AdditionalDone = np.zeros((eta_rho, xi_rho)) -359 for iEta in range(eta_rho): -360 for iXi in range(xi_rho): -361 Weight = 0 -362 WeightSum = 0 -363 for ineigh in range(4): -364 iEtaN = iEta + ListNeigh[ineigh,0] -365 iXiN = iXi + ListNeigh[ineigh,1] -366 if (iEtaN <= eta_rho-1 and iEtaN >= 0 and iXiN <= xi_rho-1 \ -367 and iXiN >= 0 and MSK[iEtaN,iXiN] == 1): -368 Weight = Weight + RetBathy[iEtaN,iXiN] -369 AdditionalDone[iEtaN,iXiN] = AdditionalDone[iEtaN,iXiN] + NumberDones[iEta,iXi] -370 -371 TheWeight = WeightMatrix[iEta,iXi] -372 WeDo = 0 -373 if TheWeight > tol: -374 if RoughMat[iEta,iXi] > rx0max: -375 WeDo = 1 -376 if NumberDones[iEta,iXi] > 0: -377 WeDo = 1 -378 -379 if WeDo == 1: -380 IsFinished = 0 -381 TheDelta = (Weight - TheWeight * RetBathy[iEta,iXi]) / (2 * TheWeight) -382 TheCorrect[iEta,iXi] = TheCorrect[iEta,iXi] + TheDelta -383 nbPointMod = nbPointMod + 1 -384 NumberDones[iEta,iXi] = 1 -385 -386 NumberDones = NumberDones + AdditionalDone -387 RetBathy = RetBathy + TheCorrect -388 NewRoughMat = bathy_tools.RoughnessMatrix(RetBathy, MSK) -389 Kafter = np.where(NewRoughMat > rx0max) -390 nbPtAfter = np.size(Kafter, 1) -391 TheProd = (RoughMat > rx0max) * (NewRoughMat > rx0max) -392 nbPtInt = TheProd.sum() -393 if (nbPtInt == nbPtAfter and nbPtBefore == nbPtAfter): -394 eStr=' no erase' -395 else: -396 eStr=''; -397 NumberDones = np.zeros((eta_rho, xi_rho)) -398 -399 print 'Iteration #', Iter -400 print 'current r=', realR, ' nbPointMod=', nbPointMod, eStr -401 print ' ' -402 -403 Iter = Iter + 1 -404 -405 if (IsFinished == 1): -406 break -407 -408 return RetBathy -
409 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/bathy_smoother/docs/bathy_smoother.bathy_tools-module.html b/bathy_smoother/docs/bathy_smoother.bathy_tools-module.html deleted file mode 100644 index d1b1a2e..0000000 --- a/bathy_smoother/docs/bathy_smoother.bathy_tools-module.html +++ /dev/null @@ -1,181 +0,0 @@ - - - - - bathy_smoother.bathy_tools - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package bathy_smoother :: - Module bathy_tools - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module bathy_tools

source code

- - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
RoughnessMatrix(DEP, - MSK)
- RoughMat=GRID_RoughnessMatrix(DEP, MSK)
- source code - -
- -
- - - - - - -
- - - - - -
Function Details[hide private]
-
- -
- -
- - -
-

RoughnessMatrix(DEP, - MSK) -

-
source code  -
- -

RoughMat=GRID_RoughnessMatrix(DEP, MSK)

-

---DEP is the bathymetry of the grid ---MSK is the mask of the - grid

-
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/bathy_smoother/docs/bathy_smoother.bathy_tools-pysrc.html b/bathy_smoother/docs/bathy_smoother.bathy_tools-pysrc.html deleted file mode 100644 index 11fb3cd..0000000 --- a/bathy_smoother/docs/bathy_smoother.bathy_tools-pysrc.html +++ /dev/null @@ -1,148 +0,0 @@ - - - - - bathy_smoother.bathy_tools - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package bathy_smoother :: - Module bathy_tools - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module bathy_smoother.bathy_tools

-
- 1  import numpy as np 
- 2   
- 3   
-
4 -def RoughnessMatrix(DEP, MSK): -
5 """ - 6 RoughMat=GRID_RoughnessMatrix(DEP, MSK) - 7 - 8 ---DEP is the bathymetry of the grid - 9 ---MSK is the mask of the grid -10 """ -11 -12 eta_rho, xi_rho = DEP.shape -13 -14 Umat = np.array([[0, 1], -15 [1, 0], -16 [0, -1], -17 [-1, 0]]) -18 -19 RoughMat = np.zeros(DEP.shape) -20 -21 for iEta in range(1,eta_rho-1): -22 for iXi in range(1,xi_rho-1): -23 if (MSK[iEta,iXi] == 1): -24 rough = 0 -25 for i in range(4): -26 iEtaB = iEta + Umat[i,0] -27 iXiB = iXi + Umat[i,1] -28 if (MSK[iEtaB,iXiB] == 1): -29 dep1 = DEP[iEta,iXi] -30 dep2 = DEP[iEtaB,iXiB] -31 delta = abs((dep1 - dep2) / (dep1 + dep2)) -32 rough = np.maximum(rough, delta) -33 -34 RoughMat[iEta,iXi] = rough -35 -36 -37 return RoughMat -
38 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/bathy_smoother/docs/crarr.png b/bathy_smoother/docs/crarr.png deleted file mode 100644 index 26b43c5..0000000 Binary files a/bathy_smoother/docs/crarr.png and /dev/null differ diff --git a/bathy_smoother/docs/epydoc.css b/bathy_smoother/docs/epydoc.css deleted file mode 100644 index 86d4170..0000000 --- a/bathy_smoother/docs/epydoc.css +++ /dev/null @@ -1,322 +0,0 @@ - - -/* Epydoc CSS Stylesheet - * - * This stylesheet can be used to customize the appearance of epydoc's - * HTML output. - * - */ - -/* Default Colors & Styles - * - Set the default foreground & background color with 'body'; and - * link colors with 'a:link' and 'a:visited'. - * - Use bold for decision list terms. - * - The heading styles defined here are used for headings *within* - * docstring descriptions. All headings used by epydoc itself use - * either class='epydoc' or class='toc' (CSS styles for both - * defined below). - */ -body { background: #ffffff; color: #000000; } -p { margin-top: 0.5em; margin-bottom: 0.5em; } -a:link { color: #0000ff; } -a:visited { color: #204080; } -dt { font-weight: bold; } -h1 { font-size: +140%; font-style: italic; - font-weight: bold; } -h2 { font-size: +125%; font-style: italic; - font-weight: bold; } -h3 { font-size: +110%; font-style: italic; - font-weight: normal; } -code { font-size: 100%; } -/* N.B.: class, not pseudoclass */ -a.link { font-family: monospace; } - -/* Page Header & Footer - * - The standard page header consists of a navigation bar (with - * pointers to standard pages such as 'home' and 'trees'); a - * breadcrumbs list, which can be used to navigate to containing - * classes or modules; options links, to show/hide private - * variables and to show/hide frames; and a page title (using - *

). The page title may be followed by a link to the - * corresponding source code (using 'span.codelink'). - * - The footer consists of a navigation bar, a timestamp, and a - * pointer to epydoc's homepage. - */ -h1.epydoc { margin: 0; font-size: +140%; font-weight: bold; } -h2.epydoc { font-size: +130%; font-weight: bold; } -h3.epydoc { font-size: +115%; font-weight: bold; - margin-top: 0.2em; } -td h3.epydoc { font-size: +115%; font-weight: bold; - margin-bottom: 0; } -table.navbar { background: #a0c0ff; color: #000000; - border: 2px groove #c0d0d0; } -table.navbar table { color: #000000; } -th.navbar-select { background: #70b0ff; - color: #000000; } -table.navbar a { text-decoration: none; } -table.navbar a:link { color: #0000ff; } -table.navbar a:visited { color: #204080; } -span.breadcrumbs { font-size: 85%; font-weight: bold; } -span.options { font-size: 70%; } -span.codelink { font-size: 85%; } -td.footer { font-size: 85%; } - -/* Table Headers - * - Each summary table and details section begins with a 'header' - * row. This row contains a section title (marked by - * 'span.table-header') as well as a show/hide private link - * (marked by 'span.options', defined above). - * - Summary tables that contain user-defined groups mark those - * groups using 'group header' rows. - */ -td.table-header { background: #70b0ff; color: #000000; - border: 1px solid #608090; } -td.table-header table { color: #000000; } -td.table-header table a:link { color: #0000ff; } -td.table-header table a:visited { color: #204080; } -span.table-header { font-size: 120%; font-weight: bold; } -th.group-header { background: #c0e0f8; color: #000000; - text-align: left; font-style: italic; - font-size: 115%; - border: 1px solid #608090; } - -/* Summary Tables (functions, variables, etc) - * - Each object is described by a single row of the table with - * two cells. The left cell gives the object's type, and is - * marked with 'code.summary-type'. The right cell gives the - * object's name and a summary description. - * - CSS styles for the table's header and group headers are - * defined above, under 'Table Headers' - */ -table.summary { border-collapse: collapse; - background: #e8f0f8; color: #000000; - border: 1px solid #608090; - margin-bottom: 0.5em; } -td.summary { border: 1px solid #608090; } -code.summary-type { font-size: 85%; } -table.summary a:link { color: #0000ff; } -table.summary a:visited { color: #204080; } - - -/* Details Tables (functions, variables, etc) - * - Each object is described in its own div. - * - A single-row summary table w/ table-header is used as - * a header for each details section (CSS style for table-header - * is defined above, under 'Table Headers'). - */ -table.details { border-collapse: collapse; - background: #e8f0f8; color: #000000; - border: 1px solid #608090; - margin: .2em 0 0 0; } -table.details table { color: #000000; } -table.details a:link { color: #0000ff; } -table.details a:visited { color: #204080; } - -/* Fields */ -dl.fields { margin-left: 2em; margin-top: 1em; - margin-bottom: 1em; } -dl.fields dd ul { margin-left: 0em; padding-left: 0em; } -dl.fields dd ul li ul { margin-left: 2em; padding-left: 0em; } -div.fields { margin-left: 2em; } -div.fields p { margin-bottom: 0.5em; } - -/* Index tables (identifier index, term index, etc) - * - link-index is used for indices containing lists of links - * (namely, the identifier index & term index). - * - index-where is used in link indices for the text indicating - * the container/source for each link. - * - metadata-index is used for indices containing metadata - * extracted from fields (namely, the bug index & todo index). - */ -table.link-index { border-collapse: collapse; - background: #e8f0f8; color: #000000; - border: 1px solid #608090; } -td.link-index { border-width: 0px; } -table.link-index a:link { color: #0000ff; } -table.link-index a:visited { color: #204080; } -span.index-where { font-size: 70%; } -table.metadata-index { border-collapse: collapse; - background: #e8f0f8; color: #000000; - border: 1px solid #608090; - margin: .2em 0 0 0; } -td.metadata-index { border-width: 1px; border-style: solid; } -table.metadata-index a:link { color: #0000ff; } -table.metadata-index a:visited { color: #204080; } - -/* Function signatures - * - sig* is used for the signature in the details section. - * - .summary-sig* is used for the signature in the summary - * table, and when listing property accessor functions. - * */ -.sig-name { color: #006080; } -.sig-arg { color: #008060; } -.sig-default { color: #602000; } -.summary-sig { font-family: monospace; } -.summary-sig-name { color: #006080; font-weight: bold; } -table.summary a.summary-sig-name:link - { color: #006080; font-weight: bold; } -table.summary a.summary-sig-name:visited - { color: #006080; font-weight: bold; } -.summary-sig-arg { color: #006040; } -.summary-sig-default { color: #501800; } - -/* Subclass list - */ -ul.subclass-list { display: inline; } -ul.subclass-list li { display: inline; } - -/* To render variables, classes etc. like functions */ -table.summary .summary-name { color: #006080; font-weight: bold; - font-family: monospace; } -table.summary - a.summary-name:link { color: #006080; font-weight: bold; - font-family: monospace; } -table.summary - a.summary-name:visited { color: #006080; font-weight: bold; - font-family: monospace; } - -/* Variable values - * - In the 'variable details' sections, each varaible's value is - * listed in a 'pre.variable' box. The width of this box is - * restricted to 80 chars; if the value's repr is longer than - * this it will be wrapped, using a backslash marked with - * class 'variable-linewrap'. If the value's repr is longer - * than 3 lines, the rest will be ellided; and an ellipsis - * marker ('...' marked with 'variable-ellipsis') will be used. - * - If the value is a string, its quote marks will be marked - * with 'variable-quote'. - * - If the variable is a regexp, it is syntax-highlighted using - * the re* CSS classes. - */ -pre.variable { padding: .5em; margin: 0; - background: #dce4ec; color: #000000; - border: 1px solid #708890; } -.variable-linewrap { color: #604000; font-weight: bold; } -.variable-ellipsis { color: #604000; font-weight: bold; } -.variable-quote { color: #604000; font-weight: bold; } -.variable-group { color: #008000; font-weight: bold; } -.variable-op { color: #604000; font-weight: bold; } -.variable-string { color: #006030; } -.variable-unknown { color: #a00000; font-weight: bold; } -.re { color: #000000; } -.re-char { color: #006030; } -.re-op { color: #600000; } -.re-group { color: #003060; } -.re-ref { color: #404040; } - -/* Base tree - * - Used by class pages to display the base class hierarchy. - */ -pre.base-tree { font-size: 80%; margin: 0; } - -/* Frames-based table of contents headers - * - Consists of two frames: one for selecting modules; and - * the other listing the contents of the selected module. - * - h1.toc is used for each frame's heading - * - h2.toc is used for subheadings within each frame. - */ -h1.toc { text-align: center; font-size: 105%; - margin: 0; font-weight: bold; - padding: 0; } -h2.toc { font-size: 100%; font-weight: bold; - margin: 0.5em 0 0 -0.3em; } - -/* Syntax Highlighting for Source Code - * - doctest examples are displayed in a 'pre.py-doctest' block. - * If the example is in a details table entry, then it will use - * the colors specified by the 'table pre.py-doctest' line. - * - Source code listings are displayed in a 'pre.py-src' block. - * Each line is marked with 'span.py-line' (used to draw a line - * down the left margin, separating the code from the line - * numbers). Line numbers are displayed with 'span.py-lineno'. - * The expand/collapse block toggle button is displayed with - * 'a.py-toggle' (Note: the CSS style for 'a.py-toggle' should not - * modify the font size of the text.) - * - If a source code page is opened with an anchor, then the - * corresponding code block will be highlighted. The code - * block's header is highlighted with 'py-highlight-hdr'; and - * the code block's body is highlighted with 'py-highlight'. - * - The remaining py-* classes are used to perform syntax - * highlighting (py-string for string literals, py-name for names, - * etc.) - */ -pre.py-doctest { padding: .5em; margin: 1em; - background: #e8f0f8; color: #000000; - border: 1px solid #708890; } -table pre.py-doctest { background: #dce4ec; - color: #000000; } -pre.py-src { border: 2px solid #000000; - background: #f0f0f0; color: #000000; } -.py-line { border-left: 2px solid #000000; - margin-left: .2em; padding-left: .4em; } -.py-lineno { font-style: italic; font-size: 90%; - padding-left: .5em; } -a.py-toggle { text-decoration: none; } -div.py-highlight-hdr { border-top: 2px solid #000000; - border-bottom: 2px solid #000000; - background: #d8e8e8; } -div.py-highlight { border-bottom: 2px solid #000000; - background: #d0e0e0; } -.py-prompt { color: #005050; font-weight: bold;} -.py-more { color: #005050; font-weight: bold;} -.py-string { color: #006030; } -.py-comment { color: #003060; } -.py-keyword { color: #600000; } -.py-output { color: #404040; } -.py-name { color: #000050; } -.py-name:link { color: #000050 !important; } -.py-name:visited { color: #000050 !important; } -.py-number { color: #005000; } -.py-defname { color: #000060; font-weight: bold; } -.py-def-name { color: #000060; font-weight: bold; } -.py-base-class { color: #000060; } -.py-param { color: #000060; } -.py-docstring { color: #006030; } -.py-decorator { color: #804020; } -/* Use this if you don't want links to names underlined: */ -/*a.py-name { text-decoration: none; }*/ - -/* Graphs & Diagrams - * - These CSS styles are used for graphs & diagrams generated using - * Graphviz dot. 'img.graph-without-title' is used for bare - * diagrams (to remove the border created by making the image - * clickable). - */ -img.graph-without-title { border: none; } -img.graph-with-title { border: 1px solid #000000; } -span.graph-title { font-weight: bold; } -span.graph-caption { } - -/* General-purpose classes - * - 'p.indent-wrapped-lines' defines a paragraph whose first line - * is not indented, but whose subsequent lines are. - * - The 'nomargin-top' class is used to remove the top margin (e.g. - * from lists). The 'nomargin' class is used to remove both the - * top and bottom margin (but not the left or right margin -- - * for lists, that would cause the bullets to disappear.) - */ -p.indent-wrapped-lines { padding: 0 0 0 7em; text-indent: -7em; - margin: 0; } -.nomargin-top { margin-top: 0; } -.nomargin { margin-top: 0; margin-bottom: 0; } - -/* HTML Log */ -div.log-block { padding: 0; margin: .5em 0 .5em 0; - background: #e8f0f8; color: #000000; - border: 1px solid #000000; } -div.log-error { padding: .1em .3em .1em .3em; margin: 4px; - background: #ffb0b0; color: #000000; - border: 1px solid #000000; } -div.log-warning { padding: .1em .3em .1em .3em; margin: 4px; - background: #ffffb0; color: #000000; - border: 1px solid #000000; } -div.log-info { padding: .1em .3em .1em .3em; margin: 4px; - background: #b0ffb0; color: #000000; - border: 1px solid #000000; } -h2.log-hdr { background: #70b0ff; color: #000000; - margin: 0; padding: 0em 0.5em 0em 0.5em; - border-bottom: 1px solid #000000; font-size: 110%; } -p.log { font-weight: bold; margin: .5em 0 .5em 0; } -tr.opt-changed { color: #000000; font-weight: bold; } -tr.opt-default { color: #606060; } -pre.log { margin: 0; padding: 0; padding-left: 1em; } diff --git a/bathy_smoother/docs/epydoc.js b/bathy_smoother/docs/epydoc.js deleted file mode 100644 index e787dbc..0000000 --- a/bathy_smoother/docs/epydoc.js +++ /dev/null @@ -1,293 +0,0 @@ -function toggle_private() { - // Search for any private/public links on this page. Store - // their old text in "cmd," so we will know what action to - // take; and change their text to the opposite action. - var cmd = "?"; - var elts = document.getElementsByTagName("a"); - for(var i=0; i...
"; - elt.innerHTML = s; - } -} - -function toggle(id) { - elt = document.getElementById(id+"-toggle"); - if (elt.innerHTML == "-") - collapse(id); - else - expand(id); - return false; -} - -function highlight(id) { - var elt = document.getElementById(id+"-def"); - if (elt) elt.className = "py-highlight-hdr"; - var elt = document.getElementById(id+"-expanded"); - if (elt) elt.className = "py-highlight"; - var elt = document.getElementById(id+"-collapsed"); - if (elt) elt.className = "py-highlight"; -} - -function num_lines(s) { - var n = 1; - var pos = s.indexOf("\n"); - while ( pos > 0) { - n += 1; - pos = s.indexOf("\n", pos+1); - } - return n; -} - -// Collapse all blocks that mave more than `min_lines` lines. -function collapse_all(min_lines) { - var elts = document.getElementsByTagName("div"); - for (var i=0; i 0) - if (elt.id.substring(split, elt.id.length) == "-expanded") - if (num_lines(elt.innerHTML) > min_lines) - collapse(elt.id.substring(0, split)); - } -} - -function expandto(href) { - var start = href.indexOf("#")+1; - if (start != 0 && start != href.length) { - if (href.substring(start, href.length) != "-") { - collapse_all(4); - pos = href.indexOf(".", start); - while (pos != -1) { - var id = href.substring(start, pos); - expand(id); - pos = href.indexOf(".", pos+1); - } - var id = href.substring(start, href.length); - expand(id); - highlight(id); - } - } -} - -function kill_doclink(id) { - var parent = document.getElementById(id); - parent.removeChild(parent.childNodes.item(0)); -} -function auto_kill_doclink(ev) { - if (!ev) var ev = window.event; - if (!this.contains(ev.toElement)) { - var parent = document.getElementById(this.parentID); - parent.removeChild(parent.childNodes.item(0)); - } -} - -function doclink(id, name, targets_id) { - var elt = document.getElementById(id); - - // If we already opened the box, then destroy it. - // (This case should never occur, but leave it in just in case.) - if (elt.childNodes.length > 1) { - elt.removeChild(elt.childNodes.item(0)); - } - else { - // The outer box: relative + inline positioning. - var box1 = document.createElement("div"); - box1.style.position = "relative"; - box1.style.display = "inline"; - box1.style.top = 0; - box1.style.left = 0; - - // A shadow for fun - var shadow = document.createElement("div"); - shadow.style.position = "absolute"; - shadow.style.left = "-1.3em"; - shadow.style.top = "-1.3em"; - shadow.style.background = "#404040"; - - // The inner box: absolute positioning. - var box2 = document.createElement("div"); - box2.style.position = "relative"; - box2.style.border = "1px solid #a0a0a0"; - box2.style.left = "-.2em"; - box2.style.top = "-.2em"; - box2.style.background = "white"; - box2.style.padding = ".3em .4em .3em .4em"; - box2.style.fontStyle = "normal"; - box2.onmouseout=auto_kill_doclink; - box2.parentID = id; - - // Get the targets - var targets_elt = document.getElementById(targets_id); - var targets = targets_elt.getAttribute("targets"); - var links = ""; - target_list = targets.split(","); - for (var i=0; i" + - target[0] + ""; - } - - // Put it all together. - elt.insertBefore(box1, elt.childNodes.item(0)); - //box1.appendChild(box2); - box1.appendChild(shadow); - shadow.appendChild(box2); - box2.innerHTML = - "Which "+name+" do you want to see documentation for?" + - ""; - } - return false; -} - -function get_anchor() { - var href = location.href; - var start = href.indexOf("#")+1; - if ((start != 0) && (start != href.length)) - return href.substring(start, href.length); - } -function redirect_url(dottedName) { - // Scan through each element of the "pages" list, and check - // if "name" matches with any of them. - for (var i=0; i-m" or "-c"; - // extract the portion & compare it to dottedName. - var pagename = pages[i].substring(0, pages[i].length-2); - if (pagename == dottedName.substring(0,pagename.length)) { - - // We've found a page that matches `dottedName`; - // construct its URL, using leftover `dottedName` - // content to form an anchor. - var pagetype = pages[i].charAt(pages[i].length-1); - var url = pagename + ((pagetype=="m")?"-module.html": - "-class.html"); - if (dottedName.length > pagename.length) - url += "#" + dottedName.substring(pagename.length+1, - dottedName.length); - return url; - } - } - } diff --git a/bathy_smoother/docs/frames.html b/bathy_smoother/docs/frames.html deleted file mode 100644 index e2d082b..0000000 --- a/bathy_smoother/docs/frames.html +++ /dev/null @@ -1,17 +0,0 @@ - - - - - API Documentation - - - - - - - - - diff --git a/bathy_smoother/docs/help.html b/bathy_smoother/docs/help.html deleted file mode 100644 index 27fd225..0000000 --- a/bathy_smoother/docs/help.html +++ /dev/null @@ -1,268 +0,0 @@ - - - - - Help - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  - - - - -
[hide private]
[frames] | no frames]
-
- -

API Documentation

- -

This document contains the API (Application Programming Interface) -documentation for this project. Documentation for the Python -objects defined by the project is divided into separate pages for each -package, module, and class. The API documentation also includes two -pages containing information about the project as a whole: a trees -page, and an index page.

- -

Object Documentation

- -

Each Package Documentation page contains:

-
    -
  • A description of the package.
  • -
  • A list of the modules and sub-packages contained by the - package.
  • -
  • A summary of the classes defined by the package.
  • -
  • A summary of the functions defined by the package.
  • -
  • A summary of the variables defined by the package.
  • -
  • A detailed description of each function defined by the - package.
  • -
  • A detailed description of each variable defined by the - package.
  • -
- -

Each Module Documentation page contains:

-
    -
  • A description of the module.
  • -
  • A summary of the classes defined by the module.
  • -
  • A summary of the functions defined by the module.
  • -
  • A summary of the variables defined by the module.
  • -
  • A detailed description of each function defined by the - module.
  • -
  • A detailed description of each variable defined by the - module.
  • -
- -

Each Class Documentation page contains:

-
    -
  • A class inheritance diagram.
  • -
  • A list of known subclasses.
  • -
  • A description of the class.
  • -
  • A summary of the methods defined by the class.
  • -
  • A summary of the instance variables defined by the class.
  • -
  • A summary of the class (static) variables defined by the - class.
  • -
  • A detailed description of each method defined by the - class.
  • -
  • A detailed description of each instance variable defined by the - class.
  • -
  • A detailed description of each class (static) variable defined - by the class.
  • -
- -

Project Documentation

- -

The Trees page contains the module and class hierarchies:

-
    -
  • The module hierarchy lists every package and module, with - modules grouped into packages. At the top level, and within each - package, modules and sub-packages are listed alphabetically.
  • -
  • The class hierarchy lists every class, grouped by base - class. If a class has more than one base class, then it will be - listed under each base class. At the top level, and under each base - class, classes are listed alphabetically.
  • -
- -

The Index page contains indices of terms and - identifiers:

-
    -
  • The term index lists every term indexed by any object's - documentation. For each term, the index provides links to each - place where the term is indexed.
  • -
  • The identifier index lists the (short) name of every package, - module, class, method, function, variable, and parameter. For each - identifier, the index provides a short description, and a link to - its documentation.
  • -
- -

The Table of Contents

- -

The table of contents occupies the two frames on the left side of -the window. The upper-left frame displays the project -contents, and the lower-left frame displays the module -contents:

- - - - - - - - - -
- Project
Contents
...
- API
Documentation
Frame


-
- Module
Contents
 
...
  -

- -

The project contents frame contains a list of all packages -and modules that are defined by the project. Clicking on an entry -will display its contents in the module contents frame. Clicking on a -special entry, labeled "Everything," will display the contents of -the entire project.

- -

The module contents frame contains a list of every -submodule, class, type, exception, function, and variable defined by a -module or package. Clicking on an entry will display its -documentation in the API documentation frame. Clicking on the name of -the module, at the top of the frame, will display the documentation -for the module itself.

- -

The "frames" and "no frames" buttons below the top -navigation bar can be used to control whether the table of contents is -displayed or not.

- -

The Navigation Bar

- -

A navigation bar is located at the top and bottom of every page. -It indicates what type of page you are currently viewing, and allows -you to go to related pages. The following table describes the labels -on the navigation bar. Note that not some labels (such as -[Parent]) are not displayed on all pages.

- - - - - - - - - - - - - - - - - - - - - - - - - - - - -
LabelHighlighted when...Links to...
[Parent](never highlighted) the parent of the current package
[Package]viewing a packagethe package containing the current object -
[Module]viewing a modulethe module containing the current object -
[Class]viewing a class the class containing the current object
[Trees]viewing the trees page the trees page
[Index]viewing the index page the index page
[Help]viewing the help page the help page
- -

The "show private" and "hide private" buttons below -the top navigation bar can be used to control whether documentation -for private objects is displayed. Private objects are usually defined -as objects whose (short) names begin with a single underscore, but do -not end with an underscore. For example, "_x", -"__pprint", and "epydoc.epytext._tokenize" -are private objects; but "re.sub", -"__init__", and "type_" are not. However, -if a module defines the "__all__" variable, then its -contents are used to decide which objects are private.

- -

A timestamp below the bottom navigation bar indicates when each -page was last updated.

- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/bathy_smoother/docs/identifier-index.html b/bathy_smoother/docs/identifier-index.html deleted file mode 100644 index cf1afac..0000000 --- a/bathy_smoother/docs/identifier-index.html +++ /dev/null @@ -1,666 +0,0 @@ - - - - - Identifier Index - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  - - - - -
[hide private]
[frames] | no frames]
-
- -
-

Identifier Index

-
-[ - A - B - C - D - E - F - G - H - I - J - K - L - M - N - O - P - Q - R - S - T - U - V - W - X - Y - Z - _ -] -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

A

- - - - - - - - - - - - - - - - - - - - - - -

B

- - - - - - - - - - - - -

C

- - - - - - - - - - - - -

D

- - - - - - - - -

E

- - - - - - - - -

F

- - - - - - - - -

G

- - - - - - - - - - - - -

I

- - - - - - - - - - - - - - - - - -

L

- - - - - - - - - - - - -

M

- - - - - - - - - - - - - - - - - -

N

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

O

- - - - - - - - -

P

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

R

- - - - - - - - -

S

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

T

- - - - - - - - -

U

- - - - - - - - -

W

- - - - - - - - -
-

- - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/bathy_smoother/docs/index.html b/bathy_smoother/docs/index.html deleted file mode 100644 index e2d082b..0000000 --- a/bathy_smoother/docs/index.html +++ /dev/null @@ -1,17 +0,0 @@ - - - - - API Documentation - - - - - - - - - diff --git a/bathy_smoother/docs/module-tree.html b/bathy_smoother/docs/module-tree.html deleted file mode 100644 index f749c2d..0000000 --- a/bathy_smoother/docs/module-tree.html +++ /dev/null @@ -1,110 +0,0 @@ - - - - - Module Hierarchy - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  - - - - -
[hide private]
[frames] | no frames]
-
-

Module Hierarchy

- - - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/bathy_smoother/docs/redirect.html b/bathy_smoother/docs/redirect.html deleted file mode 100644 index e39aca5..0000000 --- a/bathy_smoother/docs/redirect.html +++ /dev/null @@ -1,38 +0,0 @@ -Epydoc Redirect Page - - - - - - - - -

Epydoc Auto-redirect page

- -

When javascript is enabled, this page will redirect URLs of -the form redirect.html#dotted.name to the -documentation for the object with the given fully-qualified -dotted name.

-

 

- - - - - diff --git a/bathy_smoother/docs/toc-bathy_smoother-module.html b/bathy_smoother/docs/toc-bathy_smoother-module.html deleted file mode 100644 index c3977c6..0000000 --- a/bathy_smoother/docs/toc-bathy_smoother-module.html +++ /dev/null @@ -1,29 +0,0 @@ - - - - - bathy_smoother - - - - - -

Module bathy_smoother

-
-
-[hide private] - - - - diff --git a/bathy_smoother/docs/toc-bathy_smoother.LP_bathy_smoothing-module.html b/bathy_smoother/docs/toc-bathy_smoother.LP_bathy_smoothing-module.html deleted file mode 100644 index f585bf5..0000000 --- a/bathy_smoother/docs/toc-bathy_smoother.LP_bathy_smoothing-module.html +++ /dev/null @@ -1,32 +0,0 @@ - - - - - LP_bathy_smoothing - - - - - -

Module LP_bathy_smoothing

-
-

Functions

- LP_smoothing_rx0
LP_smoothing_rx0_heuristic

-[hide private] - - - - diff --git a/bathy_smoother/docs/toc-bathy_smoother.LP_bathy_tools-module.html b/bathy_smoother/docs/toc-bathy_smoother.LP_bathy_tools-module.html deleted file mode 100644 index 1762839..0000000 --- a/bathy_smoother/docs/toc-bathy_smoother.LP_bathy_tools-module.html +++ /dev/null @@ -1,37 +0,0 @@ - - - - - LP_bathy_tools - - - - - -

Module LP_bathy_tools

-
-

Functions

- ConnectedComponent
GetBadPoints
GetIJS_maxamp
GetIJS_rx0
GetIJS_signs
MergeIJS_listings
Neighborhood

-[hide private] - - - - diff --git a/bathy_smoother/docs/toc-bathy_smoother.LP_tools-module.html b/bathy_smoother/docs/toc-bathy_smoother.LP_tools-module.html deleted file mode 100644 index 558e180..0000000 --- a/bathy_smoother/docs/toc-bathy_smoother.LP_tools-module.html +++ /dev/null @@ -1,160 +0,0 @@ - - - - - LP_tools - - - - - -

Module LP_tools

-
-

Functions

- SolveLinearProgram
WriteLinearProgram

Variables

- ANTIDEGEN_BOUNDFLIP
ANTIDEGEN_COLUMNCHECK
ANTIDEGEN_DURINGBB
ANTIDEGEN_DYNAMIC
ANTIDEGEN_FIXEDVARS
ANTIDEGEN_INFEASIBLE
ANTIDEGEN_LOSTFEAS
ANTIDEGEN_NONE
ANTIDEGEN_NUMFAILURE
ANTIDEGEN_RHSPERTURB
ANTIDEGEN_STALLING
BRANCH_AUTOMATIC
BRANCH_CEILING
BRANCH_FLOOR
CRASH_LEASTDEGENERATE
CRASH_MOSTFEASIBLE
CRASH_NONE
CRITICAL
DEGENERATE
DETAILED
EQ
FEASFOUND
FR
FULL
GE
IMPORTANT
IMPROVE_BBSIMPLEX
IMPROVE_DUALFEAS
IMPROVE_NONE
IMPROVE_SOLUTION
IMPROVE_THETAGAP
INFEASIBLE
Infinite
LE
MSG_LPFEASIBLE
MSG_LPOPTIMAL
MSG_MILPBETTER
MSG_MILPEQUAL
MSG_MILPFEASIBLE
MSG_PRESOLVE
NEUTRAL
NODE_AUTOORDER
NODE_BRANCHREVERSEMODE
NODE_BREADTHFIRSTMODE
NODE_DEPTHFIRSTMODE
NODE_DYNAMICMODE
NODE_FIRSTSELECT
NODE_FRACTIONSELECT
NODE_GAPSELECT
NODE_GREEDYMODE
NODE_GUBMODE
NODE_PSEUDOCOSTMODE
NODE_PSEUDOCOSTSELECT
NODE_PSEUDONONINTSELECT
NODE_PSEUDORATIOSELECT
NODE_RANDOMIZEMODE
NODE_RANGESELECT
NODE_RCOSTFIXING
NODE_RESTARTMODE
NODE_STRONGINIT
NODE_USERSELECT
NODE_WEIGHTREVERSEMODE
NOFEASFOUND
NOMEMORY
NORMAL
NUMFAILURE
OPTIMAL
PRESOLVE_BOUNDS
PRESOLVE_COLDOMINATE
PRESOLVE_COLFIXDUAL
PRESOLVE_COLS
PRESOLVE_DUALS
PRESOLVE_ELIMEQ2
PRESOLVE_IMPLIEDFREE
PRESOLVE_IMPLIEDSLK
PRESOLVE_KNAPSACK
PRESOLVE_LINDEP
PRESOLVE_MERGEROWS
PRESOLVE_NONE
PRESOLVE_PROBEFIX
PRESOLVE_PROBEREDUCE
PRESOLVE_REDUCEGCD
PRESOLVE_REDUCEMIP
PRESOLVE_ROWDOMINATE
PRESOLVE_ROWS
PRESOLVE_SENSDUALS
PRESOLVE_SOS
PRICER_DANTZIG
PRICER_DEVEX
PRICER_FIRSTINDEX
PRICER_STEEPESTEDGE
PRICE_ADAPTIVE
PRICE_AUTOPARTIAL
PRICE_HARRISTWOPASS
PRICE_LOOPALTERNATE
PRICE_LOOPLEFT
PRICE_MULTIPLE
PRICE_PARTIAL
PRICE_PRIMALFALLBACK
PRICE_RANDOMIZE
PRICE_TRUENORMINIT
PROCBREAK
PROCFAIL
SCALE_COLSONLY
SCALE_CURTISREID
SCALE_DYNUPDATE
SCALE_EQUILIBRATE
SCALE_EXTREME
SCALE_GEOMETRIC
SCALE_INTEGERS
SCALE_LOGARITHMIC
SCALE_MEAN
SCALE_NONE
SCALE_POWER2
SCALE_QUADRATIC
SCALE_RANGE
SCALE_ROWSONLY
SCALE_USERWEIGHT
SEVERE
SIMPLEX_DUAL_DUAL
SIMPLEX_DUAL_PRIMAL
SIMPLEX_PRIMAL_DUAL
SIMPLEX_PRIMAL_PRIMAL
SUBOPTIMAL
TIMEOUT
UNBOUNDED
USERABORT

-[hide private] - - - - diff --git a/bathy_smoother/docs/toc-bathy_smoother.bathy_smoothing-module.html b/bathy_smoother/docs/toc-bathy_smoother.bathy_smoothing-module.html deleted file mode 100644 index 5f38659..0000000 --- a/bathy_smoother/docs/toc-bathy_smoother.bathy_smoothing-module.html +++ /dev/null @@ -1,36 +0,0 @@ - - - - - bathy_smoothing - - - - - -

Module bathy_smoothing

-
-

Functions

- smoothing_Laplacian_rx0
smoothing_NegativeVolume_rx0
smoothing_Negative_rx0
smoothing_PlusMinus_rx0
smoothing_PositiveVolume_rx0
smoothing_Positive_rx0

-[hide private] - - - - diff --git a/bathy_smoother/docs/toc-bathy_smoother.bathy_tools-module.html b/bathy_smoother/docs/toc-bathy_smoother.bathy_tools-module.html deleted file mode 100644 index e64a5b1..0000000 --- a/bathy_smoother/docs/toc-bathy_smoother.bathy_tools-module.html +++ /dev/null @@ -1,31 +0,0 @@ - - - - - bathy_tools - - - - - -

Module bathy_tools

-
-

Functions

- RoughnessMatrix

-[hide private] - - - - diff --git a/bathy_smoother/docs/toc-everything.html b/bathy_smoother/docs/toc-everything.html deleted file mode 100644 index 5f9ba1f..0000000 --- a/bathy_smoother/docs/toc-everything.html +++ /dev/null @@ -1,176 +0,0 @@ - - - - - Everything - - - - - -

Everything

-
-

All Functions

- bathy_smoother.LP_bathy_smoothing.LP_smoothing_rx0
bathy_smoother.LP_bathy_smoothing.LP_smoothing_rx0_heuristic
bathy_smoother.LP_bathy_tools.ConnectedComponent
bathy_smoother.LP_bathy_tools.GetBadPoints
bathy_smoother.LP_bathy_tools.GetIJS_maxamp
bathy_smoother.LP_bathy_tools.GetIJS_rx0
bathy_smoother.LP_bathy_tools.GetIJS_signs
bathy_smoother.LP_bathy_tools.MergeIJS_listings
bathy_smoother.LP_bathy_tools.Neighborhood
bathy_smoother.LP_tools.SolveLinearProgram
bathy_smoother.LP_tools.WriteLinearProgram
bathy_smoother.bathy_smoothing.smoothing_Laplacian_rx0
bathy_smoother.bathy_smoothing.smoothing_NegativeVolume_rx0
bathy_smoother.bathy_smoothing.smoothing_Negative_rx0
bathy_smoother.bathy_smoothing.smoothing_PlusMinus_rx0
bathy_smoother.bathy_smoothing.smoothing_PositiveVolume_rx0
bathy_smoother.bathy_smoothing.smoothing_Positive_rx0
bathy_smoother.bathy_tools.RoughnessMatrix

All Variables

- bathy_smoother.LP_tools.ANTIDEGEN_BOUNDFLIP
bathy_smoother.LP_tools.ANTIDEGEN_COLUMNCHECK
bathy_smoother.LP_tools.ANTIDEGEN_DURINGBB
bathy_smoother.LP_tools.ANTIDEGEN_DYNAMIC
bathy_smoother.LP_tools.ANTIDEGEN_FIXEDVARS
bathy_smoother.LP_tools.ANTIDEGEN_INFEASIBLE
bathy_smoother.LP_tools.ANTIDEGEN_LOSTFEAS
bathy_smoother.LP_tools.ANTIDEGEN_NONE
bathy_smoother.LP_tools.ANTIDEGEN_NUMFAILURE
bathy_smoother.LP_tools.ANTIDEGEN_RHSPERTURB
bathy_smoother.LP_tools.ANTIDEGEN_STALLING
bathy_smoother.LP_tools.BRANCH_AUTOMATIC
bathy_smoother.LP_tools.BRANCH_CEILING
bathy_smoother.LP_tools.BRANCH_FLOOR
bathy_smoother.LP_tools.CRASH_LEASTDEGENERATE
bathy_smoother.LP_tools.CRASH_MOSTFEASIBLE
bathy_smoother.LP_tools.CRASH_NONE
bathy_smoother.LP_tools.CRITICAL
bathy_smoother.LP_tools.DEGENERATE
bathy_smoother.LP_tools.DETAILED
bathy_smoother.LP_tools.EQ
bathy_smoother.LP_tools.FEASFOUND
bathy_smoother.LP_tools.FR
bathy_smoother.LP_tools.FULL
bathy_smoother.LP_tools.GE
bathy_smoother.LP_tools.IMPORTANT
bathy_smoother.LP_tools.IMPROVE_BBSIMPLEX
bathy_smoother.LP_tools.IMPROVE_DUALFEAS
bathy_smoother.LP_tools.IMPROVE_NONE
bathy_smoother.LP_tools.IMPROVE_SOLUTION
bathy_smoother.LP_tools.IMPROVE_THETAGAP
bathy_smoother.LP_tools.INFEASIBLE
bathy_smoother.LP_tools.Infinite
bathy_smoother.LP_tools.LE
bathy_smoother.LP_tools.MSG_LPFEASIBLE
bathy_smoother.LP_tools.MSG_LPOPTIMAL
bathy_smoother.LP_tools.MSG_MILPBETTER
bathy_smoother.LP_tools.MSG_MILPEQUAL
bathy_smoother.LP_tools.MSG_MILPFEASIBLE
bathy_smoother.LP_tools.MSG_PRESOLVE
bathy_smoother.LP_tools.NEUTRAL
bathy_smoother.LP_tools.NODE_AUTOORDER
bathy_smoother.LP_tools.NODE_BRANCHREVERSEMODE
bathy_smoother.LP_tools.NODE_BREADTHFIRSTMODE
bathy_smoother.LP_tools.NODE_DEPTHFIRSTMODE
bathy_smoother.LP_tools.NODE_DYNAMICMODE
bathy_smoother.LP_tools.NODE_FIRSTSELECT
bathy_smoother.LP_tools.NODE_FRACTIONSELECT
bathy_smoother.LP_tools.NODE_GAPSELECT
bathy_smoother.LP_tools.NODE_GREEDYMODE
bathy_smoother.LP_tools.NODE_GUBMODE
bathy_smoother.LP_tools.NODE_PSEUDOCOSTMODE
bathy_smoother.LP_tools.NODE_PSEUDOCOSTSELECT
bathy_smoother.LP_tools.NODE_PSEUDONONINTSELECT
bathy_smoother.LP_tools.NODE_PSEUDORATIOSELECT
bathy_smoother.LP_tools.NODE_RANDOMIZEMODE
bathy_smoother.LP_tools.NODE_RANGESELECT
bathy_smoother.LP_tools.NODE_RCOSTFIXING
bathy_smoother.LP_tools.NODE_RESTARTMODE
bathy_smoother.LP_tools.NODE_STRONGINIT
bathy_smoother.LP_tools.NODE_USERSELECT
bathy_smoother.LP_tools.NODE_WEIGHTREVERSEMODE
bathy_smoother.LP_tools.NOFEASFOUND
bathy_smoother.LP_tools.NOMEMORY
bathy_smoother.LP_tools.NORMAL
bathy_smoother.LP_tools.NUMFAILURE
bathy_smoother.LP_tools.OPTIMAL
bathy_smoother.LP_tools.PRESOLVE_BOUNDS
bathy_smoother.LP_tools.PRESOLVE_COLDOMINATE
bathy_smoother.LP_tools.PRESOLVE_COLFIXDUAL
bathy_smoother.LP_tools.PRESOLVE_COLS
bathy_smoother.LP_tools.PRESOLVE_DUALS
bathy_smoother.LP_tools.PRESOLVE_ELIMEQ2
bathy_smoother.LP_tools.PRESOLVE_IMPLIEDFREE
bathy_smoother.LP_tools.PRESOLVE_IMPLIEDSLK
bathy_smoother.LP_tools.PRESOLVE_KNAPSACK
bathy_smoother.LP_tools.PRESOLVE_LINDEP
bathy_smoother.LP_tools.PRESOLVE_MERGEROWS
bathy_smoother.LP_tools.PRESOLVE_NONE
bathy_smoother.LP_tools.PRESOLVE_PROBEFIX
bathy_smoother.LP_tools.PRESOLVE_PROBEREDUCE
bathy_smoother.LP_tools.PRESOLVE_REDUCEGCD
bathy_smoother.LP_tools.PRESOLVE_REDUCEMIP
bathy_smoother.LP_tools.PRESOLVE_ROWDOMINATE
bathy_smoother.LP_tools.PRESOLVE_ROWS
bathy_smoother.LP_tools.PRESOLVE_SENSDUALS
bathy_smoother.LP_tools.PRESOLVE_SOS
bathy_smoother.LP_tools.PRICER_DANTZIG
bathy_smoother.LP_tools.PRICER_DEVEX
bathy_smoother.LP_tools.PRICER_FIRSTINDEX
bathy_smoother.LP_tools.PRICER_STEEPESTEDGE
bathy_smoother.LP_tools.PRICE_ADAPTIVE
bathy_smoother.LP_tools.PRICE_AUTOPARTIAL
bathy_smoother.LP_tools.PRICE_HARRISTWOPASS
bathy_smoother.LP_tools.PRICE_LOOPALTERNATE
bathy_smoother.LP_tools.PRICE_LOOPLEFT
bathy_smoother.LP_tools.PRICE_MULTIPLE
bathy_smoother.LP_tools.PRICE_PARTIAL
bathy_smoother.LP_tools.PRICE_PRIMALFALLBACK
bathy_smoother.LP_tools.PRICE_RANDOMIZE
bathy_smoother.LP_tools.PRICE_TRUENORMINIT
bathy_smoother.LP_tools.PROCBREAK
bathy_smoother.LP_tools.PROCFAIL
bathy_smoother.LP_tools.SCALE_COLSONLY
bathy_smoother.LP_tools.SCALE_CURTISREID
bathy_smoother.LP_tools.SCALE_DYNUPDATE
bathy_smoother.LP_tools.SCALE_EQUILIBRATE
bathy_smoother.LP_tools.SCALE_EXTREME
bathy_smoother.LP_tools.SCALE_GEOMETRIC
bathy_smoother.LP_tools.SCALE_INTEGERS
bathy_smoother.LP_tools.SCALE_LOGARITHMIC
bathy_smoother.LP_tools.SCALE_MEAN
bathy_smoother.LP_tools.SCALE_NONE
bathy_smoother.LP_tools.SCALE_POWER2
bathy_smoother.LP_tools.SCALE_QUADRATIC
bathy_smoother.LP_tools.SCALE_RANGE
bathy_smoother.LP_tools.SCALE_ROWSONLY
bathy_smoother.LP_tools.SCALE_USERWEIGHT
bathy_smoother.LP_tools.SEVERE
bathy_smoother.LP_tools.SIMPLEX_DUAL_DUAL
bathy_smoother.LP_tools.SIMPLEX_DUAL_PRIMAL
bathy_smoother.LP_tools.SIMPLEX_PRIMAL_DUAL
bathy_smoother.LP_tools.SIMPLEX_PRIMAL_PRIMAL
bathy_smoother.LP_tools.SUBOPTIMAL
bathy_smoother.LP_tools.TIMEOUT
bathy_smoother.LP_tools.UNBOUNDED
bathy_smoother.LP_tools.USERABORT

-[hide private] - - - - diff --git a/bathy_smoother/docs/toc.html b/bathy_smoother/docs/toc.html deleted file mode 100644 index f10d6dd..0000000 --- a/bathy_smoother/docs/toc.html +++ /dev/null @@ -1,38 +0,0 @@ - - - - - Table of Contents - - - - - -

Table of Contents

-
- Everything -
-

Modules

- bathy_smoother
bathy_smoother.LP_bathy_smoothing
bathy_smoother.LP_bathy_tools
bathy_smoother.LP_tools
bathy_smoother.bathy_smoothing
bathy_smoother.bathy_tools

- [hide private] - - - - diff --git a/bathy_smoother/external/lp_solve_5.5/extra/Python/lp_maker.pyc b/bathy_smoother/external/lp_solve_5.5/extra/Python/lp_maker.pyc deleted file mode 100644 index 720f8c8..0000000 Binary files a/bathy_smoother/external/lp_solve_5.5/extra/Python/lp_maker.pyc and /dev/null differ diff --git a/bathy_smoother/external/lp_solve_5.5/extra/Python/pyhelp.py b/bathy_smoother/external/lp_solve_5.5/extra/Python/pyhelp.py index 03e569d..de9ca0a 100644 --- a/bathy_smoother/external/lp_solve_5.5/extra/Python/pyhelp.py +++ b/bathy_smoother/external/lp_solve_5.5/extra/Python/pyhelp.py @@ -18,4 +18,4 @@ setupfile.close() output.close() - + diff --git a/bathy_smoother/setup.py b/bathy_smoother/setup.py index 497f913..f469f44 100644 --- a/bathy_smoother/setup.py +++ b/bathy_smoother/setup.py @@ -1,6 +1,6 @@ """ bathy_smoother is a suite of tools for working with ROMS bathymetry. -(ripped from matlab script LP_bathymetry) +(ripped from matlab script LP_bathymetry) Requires: NumPy (http://numpy.scipy.org) @@ -84,9 +84,7 @@ def configuration(parent_package='',top_path=None): version = '0.1', description = doclines[0], long_description = "\n".join(doclines[2:]), - author = "Frederic Castruccio", - author_email = "frederic@marine.rutgers.edu", - url = 'https://github.com/kshedstrom/pyroms', + url = 'https://github.com/ESMG/pyroms', license = 'BSD', platforms = ["any"], configuration=configuration, diff --git a/examples/Arctic2/make_bdry_file.py b/examples/Arctic2/make_bdry_file.py index 4722a37..502dfd8 100644 --- a/examples/Arctic2/make_bdry_file.py +++ b/examples/Arctic2/make_bdry_file.py @@ -68,9 +68,9 @@ def do_file(file, src_grd, dst_grd): lst = lst.split() lst_file = lst_file + lst -print 'Build OBC file from the following file list:' -print lst_file -print ' ' +print('Build OBC file from the following file list:') +print(lst_file) +print(' ') src_grd_file = data_dir + 'SODA_grid.cdf' src_grd = pyroms_toolbox.BGrid_SODA.get_nc_BGrid_SODA(src_grd_file, name='SODA_2.1.6_ARCTIC2', area='npolar', ystart=240) diff --git a/examples/Arctic2/make_clm_file.py b/examples/Arctic2/make_clm_file.py index 0d0efd6..714ce0f 100644 --- a/examples/Arctic2/make_clm_file.py +++ b/examples/Arctic2/make_clm_file.py @@ -73,9 +73,9 @@ def do_file(file, src_grd, dst_grd): lst = lst.split() lst_file = lst_file + lst -print 'Build CLM file from the following file list:' -print lst_file -print ' ' +print('Build CLM file from the following file list:') +print(lst_file) +print(' ') src_grd_file = data_dir + '../SODA_grid.cdf' src_grd = pyroms_toolbox.BGrid_SODA.get_nc_BGrid_SODA(src_grd_file, name='SODA_2.1.6_ARCTIC2', area='npolar', ystart=240) diff --git a/examples/Arctic2/make_ic_file.py b/examples/Arctic2/make_ic_file.py index ce64f24..e8d8099 100644 --- a/examples/Arctic2/make_ic_file.py +++ b/examples/Arctic2/make_ic_file.py @@ -2,7 +2,7 @@ matplotlib.use('Agg') import subprocess import os -import commands +import subprocess import numpy as np import pyroms @@ -15,9 +15,9 @@ file = '/nfs/P1/Data/SODA/SODA_2.1.6/SODA_2.1.6_20031231-20040105.cdf' dst_dir='./' -print 'Build IC file from the following file:' -print file -print ' ' +print('Build IC file from the following file:') +print(file) +print(' ') src_grd = pyroms_toolbox.BGrid_SODA.get_nc_BGrid_SODA('/nfs/P1/Data/SODA/SODA_2.1.6/SODA_grid.cdf', name='SODA_2.1.6', area='npolar', ystart=240) dst_grd = pyroms.grid.get_ROMS_grid('ARCTIC2') diff --git a/examples/Arctic2/make_remap_weights_file.py b/examples/Arctic2/make_remap_weights_file.py index 3348b98..2afa1c2 100644 --- a/examples/Arctic2/make_remap_weights_file.py +++ b/examples/Arctic2/make_remap_weights_file.py @@ -26,7 +26,7 @@ pyroms.remapping.compute_remap_weights(grid1_file, grid2_file, \ interp_file1, interp_file2, map1_name, \ map2_name, num_maps, map_method, \ - grid1_periodic='.true.', grid2_periodic='.true.') + grid1_periodic='.true.', grid2_periodic='.true.') # compute remap weights @@ -43,7 +43,7 @@ pyroms.remapping.compute_remap_weights(grid1_file, grid2_file, \ interp_file1, interp_file2, map1_name, \ map2_name, num_maps, map_method, \ - grid1_periodic='.true.', grid2_periodic='.true.') + grid1_periodic='.true.', grid2_periodic='.true.') # compute remap weights @@ -60,7 +60,7 @@ pyroms.remapping.compute_remap_weights(grid1_file, grid2_file, \ interp_file1, interp_file2, map1_name, \ map2_name, num_maps, map_method, \ - grid1_periodic='.true.', grid2_periodic='.true.') + grid1_periodic='.true.', grid2_periodic='.true.') # compute remap weights @@ -77,5 +77,5 @@ pyroms.remapping.compute_remap_weights(grid1_file, grid2_file, \ interp_file1, interp_file2, map1_name, \ map2_name, num_maps, map_method, \ - grid1_periodic='.true.', grid2_periodic='.true.') + grid1_periodic='.true.', grid2_periodic='.true.') diff --git a/examples/Arctic2/remap.py b/examples/Arctic2/remap.py index ecdbad0..092bf50 100644 --- a/examples/Arctic2/remap.py +++ b/examples/Arctic2/remap.py @@ -47,7 +47,7 @@ def remap(src_file, src_varname, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_d # create IC file dst_file = src_file.rsplit('/')[-1] dst_file = dst_dir + dst_file[:-4] + '_' + src_varname + '_ic_' + dst_grd.name + '.nc' - print '\nCreating file', dst_file + print('\nCreating file', dst_file) if os.path.exists(dst_file) is True: os.remove(dst_file) pyroms_toolbox.nc_create_roms_file(dst_file, dst_grd, nctime) @@ -107,7 +107,7 @@ def remap(src_file, src_varname, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_d units = 'PSU' field = 'salinity, scalar, series' else: - raise ValueError, 'Undefined src_varname' + raise ValueError('Undefined src_varname') if ndim == 3: @@ -119,7 +119,7 @@ def remap(src_file, src_varname, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_d # create variable in file - print 'Creating variable', dst_varname + print('Creating variable', dst_varname) nc.createVariable(dst_varname, 'f8', dimensions, fill_value=spval) nc.variables[dst_varname].long_name = long_name nc.variables[dst_varname].units = units @@ -127,34 +127,34 @@ def remap(src_file, src_varname, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_d # remapping - print 'remapping', dst_varname, 'from', src_grd.name, \ - 'to', dst_grd.name - print 'time =', time + print('remapping', dst_varname, 'from', src_grd.name, \ + 'to', dst_grd.name) + print('time =', time) if ndim == 3: # flood the grid - print 'flood the grid' + print('flood the grid') src_varz = pyroms_toolbox.BGrid_SODA.flood(src_var, src_grd, Bpos=Bpos, spval=spval, \ dmax=dmax, cdepth=cdepth, kk=kk) else: src_varz = src_var # horizontal interpolation using scrip weights - print 'horizontal interpolation using scrip weights' + print('horizontal interpolation using scrip weights') dst_varz = pyroms.remapping.remap(src_varz, wts_file, \ spval=spval) if ndim == 3: # vertical interpolation from standard z level to sigma - print 'vertical interpolation from standard z level to sigma' + print('vertical interpolation from standard z level to sigma') dst_var = pyroms.remapping.z2roms(dst_varz[::-1,:,:], dst_grdz, \ dst_grd, Cpos=Cpos, spval=spval, flood=False) else: dst_var = dst_varz # write data in destination file - print 'write data in destination file' + print('write data in destination file') nc.variables['ocean_time'][0] = time nc.variables[dst_varname][0] = dst_var diff --git a/examples/Arctic2/remap_bdry.py b/examples/Arctic2/remap_bdry.py index 57328f6..b3fce57 100644 --- a/examples/Arctic2/remap_bdry.py +++ b/examples/Arctic2/remap_bdry.py @@ -38,7 +38,7 @@ def remap_bdry(src_file, src_varname, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, # create boundary file dst_file = src_file.rsplit('/')[-1] dst_file = dst_dir + dst_file[:-4] + '_' + src_varname + '_bdry_' + dst_grd.name + '.nc' - print '\nCreating boundary file', dst_file + print('\nCreating boundary file', dst_file) if os.path.exists(dst_file) is True: os.remove(dst_file) pyroms_toolbox.nc_create_roms_file(dst_file, dst_grd, nctime) @@ -139,7 +139,7 @@ def remap_bdry(src_file, src_varname, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, field_west = 'salt_west, scalar, series' units = 'PSU' else: - raise ValueError, 'Undefined src_varname' + raise ValueError('Undefined src_varname') if ndim == 3: @@ -151,25 +151,25 @@ def remap_bdry(src_file, src_varname, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, # create variable in boudary file - print 'Creating variable', dst_varname_north + print('Creating variable', dst_varname_north) nc.createVariable(dst_varname_north, 'f8', dimensions_north, fill_value=spval) nc.variables[dst_varname_north].long_name = long_name_north nc.variables[dst_varname_north].units = units nc.variables[dst_varname_north].field = field_north - print 'Creating variable', dst_varname_south + print('Creating variable', dst_varname_south) nc.createVariable(dst_varname_south, 'f8', dimensions_south, fill_value=spval) nc.variables[dst_varname_south].long_name = long_name_south nc.variables[dst_varname_south].units = units nc.variables[dst_varname_south].field = field_south - print 'Creating variable', dst_varname_east + print('Creating variable', dst_varname_east) nc.createVariable(dst_varname_east, 'f8', dimensions_east, fill_value=spval) nc.variables[dst_varname_east].long_name = long_name_east nc.variables[dst_varname_east].units = units nc.variables[dst_varname_east].field = field_east - print 'Creating variable', dst_varname_west + print('Creating variable', dst_varname_west) nc.createVariable(dst_varname_west, 'f8', dimensions_west, fill_value=spval) nc.variables[dst_varname_west].long_name = long_name_west nc.variables[dst_varname_west].units = units @@ -177,28 +177,28 @@ def remap_bdry(src_file, src_varname, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, # remapping - print 'remapping', dst_varname, 'from', src_grd.name, \ - 'to', dst_grd.name - print 'time =', time + print('remapping', dst_varname, 'from', src_grd.name, \ + 'to', dst_grd.name) + print('time =', time) if ndim == 3: # flood the grid - print 'flood the grid' - print src_var.shape - print src_grd.z_t.shape + print('flood the grid') + print(src_var.shape) + print(src_grd.z_t.shape) src_varz = pyroms_toolbox.BGrid_SODA.flood(src_var, src_grd, Bpos=Bpos, spval=spval, \ dmax=dmax, cdepth=cdepth, kk=kk) else: src_varz = src_var # horizontal interpolation using scrip weights - print 'horizontal interpolation using scrip weights' + print('horizontal interpolation using scrip weights') dst_varz = pyroms.remapping.remap(src_varz, wts_file, spval=spval) if ndim == 3: # vertical interpolation from standard z level to sigma - print 'vertical interpolation from standard z level to sigma' + print('vertical interpolation from standard z level to sigma') dst_var_north = pyroms.remapping.z2roms(dst_varz[::-1, Mp-1:Mp, 0:Lp], \ dst_grdz, dst_grd, Cpos=Cpos, spval=spval, \ flood=False, irange=(0,Lp), jrange=(Mp-1,Mp)) @@ -218,7 +218,7 @@ def remap_bdry(src_file, src_varname, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_var_west = dst_varz[:, 0] # write data in destination file - print 'write data in destination file' + print('write data in destination file') nc.variables['ocean_time'][0] = time nc.variables[dst_varname_north][0] = np.squeeze(dst_var_north) nc.variables[dst_varname_south][0] = np.squeeze(dst_var_south) diff --git a/examples/Arctic2/remap_bdry_uv.py b/examples/Arctic2/remap_bdry_uv.py index eee6eac..51c1f13 100644 --- a/examples/Arctic2/remap_bdry_uv.py +++ b/examples/Arctic2/remap_bdry_uv.py @@ -42,12 +42,12 @@ def remap_bdry_uv(src_file, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='. # create destination file dst_file = src_file.rsplit('/')[-1] dst_fileu = dst_dir + dst_file[:-4] + '_u_bdry_' + dst_grd.name + '.nc' - print '\nCreating destination file', dst_fileu + print('\nCreating destination file', dst_fileu) if os.path.exists(dst_fileu) is True: os.remove(dst_fileu) pyroms_toolbox.nc_create_roms_file(dst_fileu, dst_grd, nctime) dst_filev = dst_dir + dst_file[:-4] + '_v_bdry_' + dst_grd.name + '.nc' - print 'Creating destination file', dst_filev + print('Creating destination file', dst_filev) if os.path.exists(dst_filev) is True: os.remove(dst_filev) pyroms_toolbox.nc_create_roms_file(dst_filev, dst_grd, nctime) @@ -80,87 +80,87 @@ def remap_bdry_uv(src_file, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='. dst_grdz = pyroms.grid.ROMS_Grid(dst_grd.name+'_Z', dst_grd.hgrid, dst_zcoord) # create variable in destination file - print 'Creating variable u_north' + print('Creating variable u_north') ncu.createVariable('u_north', 'f8', ('ocean_time', 's_rho', 'xi_u'), fill_value=spval) ncu.variables['u_north'].long_name = '3D u-momentum north boundary condition' ncu.variables['u_north'].units = 'meter second-1' ncu.variables['u_north'].field = 'u_north, scalar, series' - print 'Creating variable u_south' + print('Creating variable u_south') ncu.createVariable('u_south', 'f8', ('ocean_time', 's_rho', 'xi_u'), fill_value=spval) ncu.variables['u_south'].long_name = '3D u-momentum south boundary condition' ncu.variables['u_south'].units = 'meter second-1' ncu.variables['u_south'].field = 'u_south, scalar, series' - print 'Creating variable u_east' + print('Creating variable u_east') ncu.createVariable('u_east', 'f8', ('ocean_time', 's_rho', 'eta_u'), fill_value=spval) ncu.variables['u_east'].long_name = '3D u-momentum east boundary condition' ncu.variables['u_east'].units = 'meter second-1' ncu.variables['u_east'].field = 'u_east, scalar, series' - print 'Creating variable u_west' + print('Creating variable u_west') ncu.createVariable('u_west', 'f8', ('ocean_time', 's_rho', 'eta_u'), fill_value=spval) ncu.variables['u_west'].long_name = '3D u-momentum west boundary condition' ncu.variables['u_west'].units = 'meter second-1' ncu.variables['u_west'].field = 'u_east, scalar, series' # create variable in destination file - print 'Creating variable ubar_north' + print('Creating variable ubar_north') ncu.createVariable('ubar_north', 'f8', ('ocean_time', 'xi_u'), fill_value=spval) ncu.variables['ubar_north'].long_name = '2D u-momentum north boundary condition' ncu.variables['ubar_north'].units = 'meter second-1' ncu.variables['ubar_north'].field = 'ubar_north, scalar, series' - print 'Creating variable ubar_south' + print('Creating variable ubar_south') ncu.createVariable('ubar_south', 'f8', ('ocean_time', 'xi_u'), fill_value=spval) ncu.variables['ubar_south'].long_name = '2D u-momentum south boundary condition' ncu.variables['ubar_south'].units = 'meter second-1' ncu.variables['ubar_south'].field = 'ubar_south, scalar, series' - print 'Creating variable ubar_east' + print('Creating variable ubar_east') ncu.createVariable('ubar_east', 'f8', ('ocean_time', 'eta_u'), fill_value=spval) ncu.variables['ubar_east'].long_name = '2D u-momentum east boundary condition' ncu.variables['ubar_east'].units = 'meter second-1' ncu.variables['ubar_east'].field = 'ubar_east, scalar, series' - print 'Creating variable ubar_west' + print('Creating variable ubar_west') ncu.createVariable('ubar_west', 'f8', ('ocean_time', 'eta_u'), fill_value=spval) ncu.variables['ubar_west'].long_name = '2D u-momentum west boundary condition' ncu.variables['ubar_west'].units = 'meter second-1' ncu.variables['ubar_west'].field = 'ubar_east, scalar, series' - print 'Creating variable v_north' + print('Creating variable v_north') ncv.createVariable('v_north', 'f8', ('ocean_time', 's_rho', 'xi_v'), fill_value=spval) ncv.variables['v_north'].long_name = '3D v-momentum north boundary condition' ncv.variables['v_north'].units = 'meter second-1' ncv.variables['v_north'].field = 'v_north, scalar, series' - print 'Creating variable v_south' + print('Creating variable v_south') ncv.createVariable('v_south', 'f8', ('ocean_time', 's_rho', 'xi_v'), fill_value=spval) ncv.variables['v_south'].long_name = '3D v-momentum south boundary condition' ncv.variables['v_south'].units = 'meter second-1' ncv.variables['v_south'].field = 'v_south, scalar, series' - print 'Creating variable v_east' + print('Creating variable v_east') ncv.createVariable('v_east', 'f8', ('ocean_time', 's_rho', 'eta_v'), fill_value=spval) ncv.variables['v_east'].long_name = '3D v-momentum east boundary condition' ncv.variables['v_east'].units = 'meter second-1' ncv.variables['v_east'].field = 'v_east, scalar, series' - print 'Creating variable v_west' + print('Creating variable v_west') ncv.createVariable('v_west', 'f8', ('ocean_time', 's_rho', 'eta_v'), fill_value=spval) ncv.variables['v_west'].long_name = '3D v-momentum west boundary condition' ncv.variables['v_west'].units = 'meter second-1' ncv.variables['v_west'].field = 'v_east, scalar, series' - print 'Creating variable vbar_north' + print('Creating variable vbar_north') ncv.createVariable('vbar_north', 'f8', ('ocean_time', 'xi_v'), fill_value=spval) ncv.variables['vbar_north'].long_name = '2D v-momentum north boundary condition' ncv.variables['vbar_north'].units = 'meter second-1' ncv.variables['vbar_north'].field = 'vbar_north, scalar, series' - print 'Creating variable vbar_south' + print('Creating variable vbar_south') ncv.createVariable('vbar_south', 'f8', ('ocean_time', 'xi_v'), fill_value=spval) ncv.variables['vbar_south'].long_name = '2D v-momentum south boundary condition' ncv.variables['vbar_south'].units = 'meter second-1' ncv.variables['vbar_south'].field = 'vbar_south, scalar, series' - print 'Creating variable vbar_east' + print('Creating variable vbar_east') ncv.createVariable('vbar_east', 'f8', ('ocean_time', 'eta_v'), fill_value=spval) ncv.variables['vbar_east'].long_name = '2D v-momentum east boundary condition' ncv.variables['vbar_east'].units = 'meter second-1' ncv.variables['vbar_east'].field = 'vbar_east, scalar, series' - print 'Creating variable vbar_west' + print('Creating variable vbar_west') ncv.createVariable('vbar_west', 'f8', ('ocean_time', 'eta_v'), fill_value=spval) ncv.variables['vbar_west'].long_name = '2D v-momentum west boundary condition' ncv.variables['vbar_west'].units = 'meter second-1' @@ -169,27 +169,27 @@ def remap_bdry_uv(src_file, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='. # remaping - print 'remapping and rotating u and v from', src_grd.name, \ - 'to', dst_grd.name - print 'time =', time + print('remapping and rotating u and v from', src_grd.name, \ + 'to', dst_grd.name) + print('time =', time) # flood the grid - print 'flood the grid' + print('flood the grid') src_uz = pyroms_toolbox.BGrid_SODA.flood(src_varu, src_grd, Bpos='uv', \ spval=spval, dmax=dmax, cdepth=cdepth, kk=kk) src_vz = pyroms_toolbox.BGrid_SODA.flood(src_varv, src_grd, Bpos='uv', \ spval=spval, dmax=dmax, cdepth=cdepth, kk=kk) # horizontal interpolation using scrip weights - print 'horizontal interpolation using scrip weights' + print('horizontal interpolation using scrip weights') dst_uz = pyroms.remapping.remap(src_uz, wts_file, \ spval=spval) dst_vz = pyroms.remapping.remap(src_vz, wts_file, \ spval=spval) # vertical interpolation from standard z level to sigma - print 'vertical interpolation from standard z level to sigma' + print('vertical interpolation from standard z level to sigma') dst_u_north = pyroms.remapping.z2roms(dst_uz[::-1, Mp-2:Mp, 0:Lp], \ dst_grdz, dst_grd, Cpos='rho', spval=spval, \ flood=False, irange=(0,Lp), jrange=(Mp-2,Mp)) @@ -320,7 +320,7 @@ def remap_bdry_uv(src_file, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='. dst_vbar_west = np.ma.masked_where(dst_grd.hgrid.mask_v[:,0] == 0, dst_vbar_west) # write data in destination file - print 'write data in destination file' + print('write data in destination file') ncu.variables['ocean_time'][0] = time ncu.variables['u_north'][0] = dst_u_north ncu.variables['u_south'][0] = dst_u_south diff --git a/examples/Arctic2/remap_clm.py b/examples/Arctic2/remap_clm.py index 5f6c69a..3bf9589 100644 --- a/examples/Arctic2/remap_clm.py +++ b/examples/Arctic2/remap_clm.py @@ -47,7 +47,7 @@ def remap_clm(src_file, src_varname, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, d # create IC file dst_file = src_file.rsplit('/')[-1] dst_file = dst_dir + dst_file[:-3] + '_' + src_varname + '_clim_' + dst_grd.name + '.nc' - print '\nCreating file', dst_file + print('\nCreating file', dst_file) if os.path.exists(dst_file) is True: os.remove(dst_file) pyroms_toolbox.nc_create_roms_file(dst_file, dst_grd, nctime) @@ -110,7 +110,7 @@ def remap_clm(src_file, src_varname, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, d field = 'salinity, scalar, series' vartime = 'ocean_time' else: - raise ValueError, 'Undefined src_varname' + raise ValueError('Undefined src_varname') if ndim == 3: @@ -122,7 +122,7 @@ def remap_clm(src_file, src_varname, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, d # create variable in file - print 'Creating variable', dst_varname + print('Creating variable', dst_varname) nc.createVariable(dst_varname, 'f8', dimensions, fill_value=spval) nc.variables[dst_varname].long_name = long_name nc.variables[dst_varname].units = units @@ -131,34 +131,34 @@ def remap_clm(src_file, src_varname, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, d # remapping - print 'remapping', dst_varname, 'from', src_grd.name, \ - 'to', dst_grd.name - print 'time =', time + print('remapping', dst_varname, 'from', src_grd.name, \ + 'to', dst_grd.name) + print('time =', time) if ndim == 3: # flood the grid - print 'flood the grid' + print('flood the grid') src_varz = pyroms_toolbox.BGrid_SODA.flood(src_var, src_grd, Bpos=Bpos, spval=spval, \ dmax=dmax, cdepth=cdepth, kk=kk) else: src_varz = src_var # horizontal interpolation using scrip weights - print 'horizontal interpolation using scrip weights' + print('horizontal interpolation using scrip weights') dst_varz = pyroms.remapping.remap(src_varz, wts_file, \ spval=spval) if ndim == 3: # vertical interpolation from standard z level to sigma - print 'vertical interpolation from standard z level to sigma' + print('vertical interpolation from standard z level to sigma') dst_var = pyroms.remapping.z2roms(dst_varz[::-1,:,:], dst_grdz, \ dst_grd, Cpos=Cpos, spval=spval, flood=False) else: dst_var = dst_varz # write data in destination file - print 'write data in destination file' + print('write data in destination file') nc.variables['ocean_time'][0] = time nc.variables[dst_varname][0] = dst_var diff --git a/examples/Arctic2/remap_clm_uv.py b/examples/Arctic2/remap_clm_uv.py index 0d10c7a..980b813 100644 --- a/examples/Arctic2/remap_clm_uv.py +++ b/examples/Arctic2/remap_clm_uv.py @@ -48,12 +48,12 @@ def remap_clm_uv(src_file, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='./ # create destination file dst_file = src_file.rsplit('/')[-1] dst_fileu = dst_dir + dst_file[:-3] + '_u_clim_' + dst_grd.name + '.nc' - print '\nCreating destination file', dst_fileu + print('\nCreating destination file', dst_fileu) if os.path.exists(dst_fileu) is True: os.remove(dst_fileu) pyroms_toolbox.nc_create_roms_file(dst_fileu, dst_grd, nctime) dst_filev = dst_dir + dst_file[:-3] + '_v_clim_' + dst_grd.name + '.nc' - print 'Creating destination file', dst_filev + print('Creating destination file', dst_filev) if os.path.exists(dst_filev) is True: os.remove(dst_filev) pyroms_toolbox.nc_create_roms_file(dst_filev, dst_grd, nctime) @@ -86,27 +86,27 @@ def remap_clm_uv(src_file, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='./ dst_grdz = pyroms.grid.ROMS_Grid(dst_grd.name+'_Z', dst_grd.hgrid, dst_zcoord) # create variable in destination file - print 'Creating variable u' + print('Creating variable u') ncu.createVariable('u', 'f8', ('ocean_time', 's_rho', 'eta_u', 'xi_u'), fill_value=spval) ncu.variables['u'].long_name = '3D u-momentum component' ncu.variables['u'].units = 'meter second-1' ncu.variables['u'].field = 'u-velocity, scalar, series' ncu.variables['u'].time = 'ocean_time' # create variable in destination file - print 'Creating variable ubar' + print('Creating variable ubar') ncu.createVariable('ubar', 'f8', ('ocean_time', 'eta_u', 'xi_u'), fill_value=spval) ncu.variables['ubar'].long_name = '2D u-momentum component' ncu.variables['ubar'].units = 'meter second-1' ncu.variables['ubar'].field = 'ubar-velocity,, scalar, series' ncu.variables['ubar'].time = 'ocean_time' - print 'Creating variable v' + print('Creating variable v') ncv.createVariable('v', 'f8', ('ocean_time', 's_rho', 'eta_v', 'xi_v'), fill_value=spval) ncv.variables['v'].long_name = '3D v-momentum component' ncv.variables['v'].units = 'meter second-1' ncv.variables['v'].field = 'v-velocity, scalar, series' ncv.variables['v'].time = 'ocean_time' - print 'Creating variable vbar' + print('Creating variable vbar') ncv.createVariable('vbar', 'f8', ('ocean_time', 'eta_v', 'xi_v'), fill_value=spval) ncv.variables['vbar'].long_name = '2D v-momentum component' ncv.variables['vbar'].units = 'meter second-1' @@ -115,27 +115,27 @@ def remap_clm_uv(src_file, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='./ # remaping - print 'remapping and rotating u and v from', src_grd.name, \ - 'to', dst_grd.name - print 'time =', time + print('remapping and rotating u and v from', src_grd.name, \ + 'to', dst_grd.name) + print('time =', time) # flood the grid - print 'flood the grid' + print('flood the grid') src_uz = pyroms_toolbox.BGrid_SODA.flood(src_varu, src_grd, Bpos='uv', \ spval=spval, dmax=dmax, cdepth=cdepth, kk=kk) src_vz = pyroms_toolbox.BGrid_SODA.flood(src_varv, src_grd, Bpos='uv', \ spval=spval, dmax=dmax, cdepth=cdepth, kk=kk) # horizontal interpolation using scrip weights - print 'horizontal interpolation using scrip weights' + print('horizontal interpolation using scrip weights') dst_uz = pyroms.remapping.remap(src_uz, wts_file, \ spval=spval) dst_vz = pyroms.remapping.remap(src_vz, wts_file, \ spval=spval) # vertical interpolation from standard z level to sigma - print 'vertical interpolation from standard z level to sigma' + print('vertical interpolation from standard z level to sigma') dst_u = pyroms.remapping.z2roms(dst_uz[::-1,:,:], dst_grdz, \ dst_grd, Cpos='rho', spval=spval, flood=False) dst_v = pyroms.remapping.z2roms(dst_vz[::-1,:,:], dst_grdz, \ @@ -187,7 +187,7 @@ def remap_clm_uv(src_file, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='./ dst_vbar[idxv[0], idxv[1]] = spval # write data in destination file - print 'write data in destination file' + print('write data in destination file') ncu.variables['ocean_time'][0] = time ncu.variables['u'][0] = dst_u ncu.variables['ubar'][0] = dst_ubar @@ -196,10 +196,10 @@ def remap_clm_uv(src_file, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='./ ncv.variables['v'][0] = dst_v ncv.variables['vbar'][0] = dst_vbar - print dst_u.shape - print dst_ubar.shape - print dst_v.shape - print dst_vbar.shape + print(dst_u.shape) + print(dst_ubar.shape) + print(dst_v.shape) + print(dst_vbar.shape) # close destination file ncu.close() diff --git a/examples/Arctic2/remap_uv.py b/examples/Arctic2/remap_uv.py index 1cf7765..fd218fa 100644 --- a/examples/Arctic2/remap_uv.py +++ b/examples/Arctic2/remap_uv.py @@ -40,12 +40,12 @@ def remap_uv(src_file, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='./'): # create destination file dst_file = src_file.rsplit('/')[-1] dst_fileu = dst_dir + dst_file[:-4] + '_u_ic_' + dst_grd.name + '.nc' - print '\nCreating destination file', dst_fileu + print('\nCreating destination file', dst_fileu) if os.path.exists(dst_fileu) is True: os.remove(dst_fileu) pyroms_toolbox.nc_create_roms_file(dst_fileu, dst_grd, nctime) dst_filev = dst_dir + dst_file[:-4] + '_v_ic_' + dst_grd.name + '.nc' - print 'Creating destination file', dst_filev + print('Creating destination file', dst_filev) if os.path.exists(dst_filev) is True: os.remove(dst_filev) pyroms_toolbox.nc_create_roms_file(dst_filev, dst_grd, nctime) @@ -78,24 +78,24 @@ def remap_uv(src_file, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='./'): dst_grdz = pyroms.grid.ROMS_Grid(dst_grd.name+'_Z', dst_grd.hgrid, dst_zcoord) # create variable in destination file - print 'Creating variable u' + print('Creating variable u') ncu.createVariable('u', 'f8', ('ocean_time', 's_rho', 'eta_u', 'xi_u'), fill_value=spval) ncu.variables['u'].long_name = '3D u-momentum component' ncu.variables['u'].units = 'meter second-1' ncu.variables['u'].field = 'u-velocity, scalar, series' # create variable in destination file - print 'Creating variable ubar' + print('Creating variable ubar') ncu.createVariable('ubar', 'f8', ('ocean_time', 'eta_u', 'xi_u'), fill_value=spval) ncu.variables['ubar'].long_name = '2D u-momentum component' ncu.variables['ubar'].units = 'meter second-1' ncu.variables['ubar'].field = 'ubar-velocity,, scalar, series' - print 'Creating variable v' + print('Creating variable v') ncv.createVariable('v', 'f8', ('ocean_time', 's_rho', 'eta_v', 'xi_v'), fill_value=spval) ncv.variables['v'].long_name = '3D v-momentum component' ncv.variables['v'].units = 'meter second-1' ncv.variables['v'].field = 'v-velocity, scalar, series' - print 'Creating variable vbar' + print('Creating variable vbar') ncv.createVariable('vbar', 'f8', ('ocean_time', 'eta_v', 'xi_v'), fill_value=spval) ncv.variables['vbar'].long_name = '2D v-momentum component' ncv.variables['vbar'].units = 'meter second-1' @@ -103,27 +103,27 @@ def remap_uv(src_file, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='./'): # remaping - print 'remapping and rotating u and v from', src_grd.name, \ - 'to', dst_grd.name - print 'time =', time + print('remapping and rotating u and v from', src_grd.name, \ + 'to', dst_grd.name) + print('time =', time) # flood the grid - print 'flood the grid' + print('flood the grid') src_uz = pyroms_toolbox.BGrid_SODA.flood(src_varu, src_grd, Bpos='uv', \ spval=spval, dmax=dmax, cdepth=cdepth, kk=kk) src_vz = pyroms_toolbox.BGrid_SODA.flood(src_varv, src_grd, Bpos='uv', \ spval=spval, dmax=dmax, cdepth=cdepth, kk=kk) # horizontal interpolation using scrip weights - print 'horizontal interpolation using scrip weights' + print('horizontal interpolation using scrip weights') dst_uz = pyroms.remapping.remap(src_uz, wts_file, \ spval=spval) dst_vz = pyroms.remapping.remap(src_vz, wts_file, \ spval=spval) # vertical interpolation from standard z level to sigma - print 'vertical interpolation from standard z level to sigma' + print('vertical interpolation from standard z level to sigma') dst_u = pyroms.remapping.z2roms(dst_uz[::-1,:,:], dst_grdz, \ dst_grd, Cpos='rho', spval=spval, flood=False) dst_v = pyroms.remapping.z2roms(dst_vz[::-1,:,:], dst_grdz, \ @@ -175,7 +175,7 @@ def remap_uv(src_file, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='./'): dst_vbar[idxv[0], idxv[1]] = spval # write data in destination file - print 'write data in destination file' + print('write data in destination file') ncu.variables['ocean_time'][0] = time ncu.variables['u'][0] = dst_u ncu.variables['ubar'][0] = dst_ubar @@ -184,10 +184,10 @@ def remap_uv(src_file, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='./'): ncv.variables['v'][0] = dst_v ncv.variables['vbar'][0] = dst_vbar - print dst_u.shape - print dst_ubar.shape - print dst_v.shape - print dst_vbar.shape + print(dst_u.shape) + print(dst_ubar.shape) + print(dst_v.shape) + print(dst_vbar.shape) # close destination file ncu.close() diff --git a/examples/Arctic_GLORYS/fixpole.py b/examples/Arctic_GLORYS/fixpole.py index e58988a..8d8e8ca 100644 --- a/examples/Arctic_GLORYS/fixpole.py +++ b/examples/Arctic_GLORYS/fixpole.py @@ -10,26 +10,26 @@ h = nc.variables['hice'][0,759:762,277:281] a = nc.variables['aice'][0,759:762,277:281] -print 'temp shape', t.shape +print('temp shape', t.shape) t1 = (t[:,0,1] + t[:,1,0] + t[:,2,2] + t[:,2,1] + t[:,0,2] + t[:,1,3])/6. s1 = (s[:,0,1] + s[:,1,0] + s[:,2,2] + s[:,2,1] + s[:,0,2] + s[:,1,3])/6. h1 = (h[0,1] + h[1,0] + h[2,2] + h[2,1] + h[0,2] + h[1,3])/6. a1 = (a[0,1] + a[1,0] + a[2,2] + a[2,1] + a[0,2] + a[1,3])/6. -print 'temp 0', t[:,1,1] -print 'temp 0a', t[:,1,2] -print 'temp 1', t[:,1,0] -print 'temp 2', t1 -print 'salt 0', s[:,1,1] -print 'salt 1', s[:,1,0] -print 'salt 2', s1 +print('temp 0', t[:,1,1]) +print('temp 0a', t[:,1,2]) +print('temp 1', t[:,1,0]) +print('temp 2', t1) +print('salt 0', s[:,1,1]) +print('salt 1', s[:,1,0]) +print('salt 2', s1) -print 'hice 0', h[1,1] -print 'hice 0a', h[1,2] -print 'hice 1', h[1,0] -print 'hice 2', h1 -print 'aice 0', a[1,1] -print 'aice 1', a[1,0] -print 'aice 2', a1 +print('hice 0', h[1,1]) +print('hice 0a', h[1,2]) +print('hice 1', h[1,0]) +print('hice 2', h1) +print('aice 0', a[1,1]) +print('aice 1', a[1,0]) +print('aice 2', a1) nc.variables['temp'][0,:,760,278] = t1 nc.variables['salt'][0,:,760,278] = s1 diff --git a/examples/Arctic_GLORYS/make_ic_file.py b/examples/Arctic_GLORYS/make_ic_file.py old mode 100755 new mode 100644 index 268022b..b304d18 --- a/examples/Arctic_GLORYS/make_ic_file.py +++ b/examples/Arctic_GLORYS/make_ic_file.py @@ -2,7 +2,7 @@ matplotlib.use('Agg') import subprocess import os -import commands +import subprocess import numpy as np import pyroms @@ -15,9 +15,9 @@ dst_dir='./' -print 'Build IC file from the following file:' -print file -print ' ' +print('Build IC file from the following file:') +print(file) +print(' ') src_grd = pyroms_toolbox.CGrid_GLORYS.get_nc_CGrid_GLORYS('/archive/u1/uaf/kate/GLORYS/GL2V1_mesh_mask_new.nc', name='GLORYS', area='npolar', ystart=690) dst_grd = pyroms.grid.get_ROMS_grid('ARCTIC2') diff --git a/examples/Arctic_GLORYS/make_remap_weights_file.py b/examples/Arctic_GLORYS/make_remap_weights_file.py old mode 100755 new mode 100644 index 669e1ab..a6b7fab --- a/examples/Arctic_GLORYS/make_remap_weights_file.py +++ b/examples/Arctic_GLORYS/make_remap_weights_file.py @@ -32,7 +32,7 @@ pyroms.remapping.compute_remap_weights(grid1_file, grid2_file, \ interp_file1, interp_file2, map1_name, \ map2_name, num_maps, map_method, \ - grid1_periodic='.true.', grid2_periodic='.true.') + grid1_periodic='.true.', grid2_periodic='.true.') # compute remap weights @@ -49,7 +49,7 @@ pyroms.remapping.compute_remap_weights(grid1_file, grid2_file, \ interp_file1, interp_file2, map1_name, \ map2_name, num_maps, map_method, \ - grid1_periodic='.true.', grid2_periodic='.true.') + grid1_periodic='.true.', grid2_periodic='.true.') # compute remap weights @@ -66,7 +66,7 @@ pyroms.remapping.compute_remap_weights(grid1_file, grid2_file, \ interp_file1, interp_file2, map1_name, \ map2_name, num_maps, map_method, \ - grid1_periodic='.true.', grid2_periodic='.true.') + grid1_periodic='.true.', grid2_periodic='.true.') # compute remap weights @@ -83,7 +83,7 @@ pyroms.remapping.compute_remap_weights(grid1_file, grid2_file, \ interp_file1, interp_file2, map1_name, \ map2_name, num_maps, map_method, \ - grid1_periodic='.true.', grid2_periodic='.true.') + grid1_periodic='.true.', grid2_periodic='.true.') # compute remap weights @@ -100,5 +100,5 @@ pyroms.remapping.compute_remap_weights(grid1_file, grid2_file, \ interp_file1, interp_file2, map1_name, \ map2_name, num_maps, map_method, \ - grid1_periodic='.true.', grid2_periodic='.true.') + grid1_periodic='.true.', grid2_periodic='.true.') diff --git a/examples/Arctic_GLORYS/remap.py b/examples/Arctic_GLORYS/remap.py old mode 100755 new mode 100644 index 3aeedfd..f8b2171 --- a/examples/Arctic_GLORYS/remap.py +++ b/examples/Arctic_GLORYS/remap.py @@ -28,7 +28,7 @@ def remap(src_file, src_varname, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_d ref = date2num(ref) # For IC tag = src_file.rsplit('/')[-1].rsplit('_')[2] - print("date string:", tag) + print(("date string:", tag)) year = int(tag[:4]) month = int(tag[4:6]) day = int(tag[6:]) @@ -40,7 +40,7 @@ def remap(src_file, src_varname, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_d # create IC file dst_file = src_file.rsplit('/')[-1] dst_file = dst_dir + dst_file[:-4] + '_' + src_varname + '_ic_' + dst_grd.name + '.nc' - print '\nCreating file', dst_file + print('\nCreating file', dst_file) if os.path.exists(dst_file) is True: os.remove(dst_file) pyroms_toolbox.nc_create_roms_file(dst_file, dst_grd, nctime) @@ -74,7 +74,7 @@ def remap(src_file, src_varname, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_d elif ndim == 2: src_var = src_var[:] src_var = src_var[np.r_[ystart:np.size(src_var,0),-1],:] - print "dimensions:", src_var.shape, ndim + print("dimensions:", src_var.shape, ndim) if src_varname == 'sossheig': Bpos = 't' @@ -143,7 +143,7 @@ def remap(src_file, src_varname, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_d units = 'PSU' field = 'salinity, scalar, series' else: - raise ValueError, 'Undefined src_varname' + raise ValueError('Undefined src_varname') if ndim == 3: @@ -155,7 +155,7 @@ def remap(src_file, src_varname, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_d # create variable in file - print 'Creating variable', dst_varname + print('Creating variable', dst_varname) nc.createVariable(dst_varname, 'f8', dimensions, fill_value=spval) nc.variables[dst_varname].long_name = long_name nc.variables[dst_varname].units = units @@ -163,36 +163,36 @@ def remap(src_file, src_varname, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_d # remapping - print 'remapping', dst_varname, 'from', src_grd.name, \ - 'to', dst_grd.name - print 'time =', time + print('remapping', dst_varname, 'from', src_grd.name, \ + 'to', dst_grd.name) + print('time =', time) if ndim == 3: # flood the grid - print 'flood the grid' + print('flood the grid') src_varz = pyroms_toolbox.CGrid_GLORYS.flood(src_var, src_grd, Cpos=Bpos, spval=spval, \ dmax=dmax, cdepth=cdepth, kk=kk) - print 'flooded the grid', src_varz[:,-1,189] - print 'flooded the grid', src_varz[:,-1,277] + print('flooded the grid', src_varz[:,-1,189]) + print('flooded the grid', src_varz[:,-1,277]) else: src_varz = src_var # horizontal interpolation using scrip weights - print 'horizontal interpolation using scrip weights' + print('horizontal interpolation using scrip weights') dst_varz = pyroms.remapping.remap(src_varz, wts_file, \ spval=spval) if ndim == 3: # vertical interpolation from standard z level to sigma - print 'vertical interpolation from standard z level to sigma' + print('vertical interpolation from standard z level to sigma') dst_var = pyroms.remapping.z2roms(dst_varz[::-1,:,:], dst_grdz, \ dst_grd, Cpos=Cpos, spval=spval, flood=False) else: dst_var = dst_varz # write data in destination file - print 'write data in destination file' + print('write data in destination file') nc.variables['ocean_time'][0] = time nc.variables[dst_varname][0] = dst_var diff --git a/examples/Arctic_GLORYS/remap_bdry.py b/examples/Arctic_GLORYS/remap_bdry.py old mode 100755 new mode 100644 index f87f88d..4941bfa --- a/examples/Arctic_GLORYS/remap_bdry.py +++ b/examples/Arctic_GLORYS/remap_bdry.py @@ -27,7 +27,7 @@ def remap_bdry(src_file, src_varname, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, ref = datetime(1900, 1, 1, 0, 0, 0) ref = date2num(ref) tag = src_file.rsplit('/')[-1].rsplit('_')[2] - print("date string:", tag) + print(("date string:", tag)) year = int(tag[:4]) month = int(tag[4:6]) day = int(tag[6:]) @@ -39,7 +39,7 @@ def remap_bdry(src_file, src_varname, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, # create boundary file dst_file = src_file.rsplit('/')[-1] dst_file = dst_dir + dst_file[:-4] + '_' + src_varname + '_bdry_' + dst_grd.name + '.nc' - print '\nCreating boundary file', dst_file + print('\nCreating boundary file', dst_file) if os.path.exists(dst_file) is True: os.remove(dst_file) pyroms_toolbox.nc_create_roms_file(dst_file, dst_grd, nctime) @@ -149,7 +149,7 @@ def remap_bdry(src_file, src_varname, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, field_west = 'salt_west, scalar, series' units = 'PSU' else: - raise ValueError, 'Undefined src_varname' + raise ValueError('Undefined src_varname') if ndim == 3: @@ -161,25 +161,25 @@ def remap_bdry(src_file, src_varname, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, # create variable in boudary file - print 'Creating variable', dst_varname_north + print('Creating variable', dst_varname_north) nc.createVariable(dst_varname_north, 'f8', dimensions_north, fill_value=spval) nc.variables[dst_varname_north].long_name = long_name_north nc.variables[dst_varname_north].units = units nc.variables[dst_varname_north].field = field_north - print 'Creating variable', dst_varname_south + print('Creating variable', dst_varname_south) nc.createVariable(dst_varname_south, 'f8', dimensions_south, fill_value=spval) nc.variables[dst_varname_south].long_name = long_name_south nc.variables[dst_varname_south].units = units nc.variables[dst_varname_south].field = field_south - print 'Creating variable', dst_varname_east + print('Creating variable', dst_varname_east) nc.createVariable(dst_varname_east, 'f8', dimensions_east, fill_value=spval) nc.variables[dst_varname_east].long_name = long_name_east nc.variables[dst_varname_east].units = units nc.variables[dst_varname_east].field = field_east - print 'Creating variable', dst_varname_west + print('Creating variable', dst_varname_west) nc.createVariable(dst_varname_west, 'f8', dimensions_west, fill_value=spval) nc.variables[dst_varname_west].long_name = long_name_west nc.variables[dst_varname_west].units = units @@ -187,26 +187,26 @@ def remap_bdry(src_file, src_varname, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, # remapping - print 'remapping', dst_varname, 'from', src_grd.name, \ - 'to', dst_grd.name - print 'time =', time + print('remapping', dst_varname, 'from', src_grd.name, \ + 'to', dst_grd.name) + print('time =', time) if ndim == 3: # flood the grid - print 'flood the grid' + print('flood the grid') src_varz = pyroms_toolbox.CGrid_GLORYS.flood(src_var, src_grd, Cpos=Bpos, spval=spval, \ dmax=dmax, cdepth=cdepth, kk=kk) else: src_varz = src_var # horizontal interpolation using scrip weights - print 'horizontal interpolation using scrip weights' + print('horizontal interpolation using scrip weights') dst_varz = pyroms.remapping.remap(src_varz, wts_file, spval=spval) if ndim == 3: # vertical interpolation from standard z level to sigma - print 'vertical interpolation from standard z level to sigma' + print('vertical interpolation from standard z level to sigma') dst_var_north = pyroms.remapping.z2roms(dst_varz[::-1, Mp-1:Mp, 0:Lp], \ dst_grdz, dst_grd, Cpos=Cpos, spval=spval, \ flood=False, irange=(0,Lp), jrange=(Mp-1,Mp)) @@ -226,7 +226,7 @@ def remap_bdry(src_file, src_varname, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_var_west = dst_varz[:, 0] # write data in destination file - print 'write data in destination file' + print('write data in destination file') nc.variables['ocean_time'][0] = time nc.variables[dst_varname_north][0] = np.squeeze(dst_var_north) nc.variables[dst_varname_south][0] = np.squeeze(dst_var_south) diff --git a/examples/Arctic_GLORYS/remap_bdry_uv.py b/examples/Arctic_GLORYS/remap_bdry_uv.py old mode 100755 new mode 100644 index bca375e..611875d --- a/examples/Arctic_GLORYS/remap_bdry_uv.py +++ b/examples/Arctic_GLORYS/remap_bdry_uv.py @@ -28,7 +28,7 @@ def remap_bdry_uv(src_file, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='. ref = datetime(1900, 1, 1, 0, 0, 0) ref = date2num(ref) tag = src_file.rsplit('/')[-1].rsplit('_')[2] - print("tag:", tag) + print(("tag:", tag)) year = int(tag[:4]) month = int(tag[4:6]) day = int(tag[6:]) @@ -43,12 +43,12 @@ def remap_bdry_uv(src_file, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='. # create destination file dst_file = src_file.rsplit('/')[-1] dst_fileu = dst_dir + dst_file[:-4] + '_u_bdry_' + dst_grd.name + '.nc' - print '\nCreating destination file', dst_fileu + print('\nCreating destination file', dst_fileu) if os.path.exists(dst_fileu) is True: os.remove(dst_fileu) pyroms_toolbox.nc_create_roms_file(dst_fileu, dst_grd, nctime) dst_filev = dst_dir + dst_file[:-4] + '_v_bdry_' + dst_grd.name + '.nc' - print 'Creating destination file', dst_filev + print('Creating destination file', dst_filev) if os.path.exists(dst_filev) is True: os.remove(dst_filev) pyroms_toolbox.nc_create_roms_file(dst_filev, dst_grd, nctime) @@ -68,13 +68,13 @@ def remap_bdry_uv(src_file, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='. # ARCTIC2 grid sub-sample src_varu = src_varu[:] src_varv = src_varv[:] - print "shape 1", src_varu.shape, src_varv.shape + print("shape 1", src_varu.shape, src_varv.shape) src_varu = np.squeeze(src_varu) src_varv = np.squeeze(src_varv) - print "shape 2", src_varu.shape, src_varv.shape + print("shape 2", src_varu.shape, src_varv.shape) src_varu = src_varu[:,np.r_[ystart:np.size(src_varu,1),-1],:] src_varv = src_varv[:,np.r_[ystart:np.size(src_varv,1),-1],:] - print "shape 3", src_varu.shape, src_varv.shape + print("shape 3", src_varu.shape, src_varv.shape) # get weights file wts_file_a = 'remap_weights_GLORYS_to_ARCTIC2_bilinear_t_to_rho.nc' @@ -88,87 +88,87 @@ def remap_bdry_uv(src_file, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='. dst_grdz = pyroms.grid.ROMS_Grid(dst_grd.name+'_Z', dst_grd.hgrid, dst_zcoord) # create variable in destination file - print 'Creating variable u_north' + print('Creating variable u_north') ncu.createVariable('u_north', 'f8', ('ocean_time', 's_rho', 'xi_u'), fill_value=spval) ncu.variables['u_north'].long_name = '3D u-momentum north boundary condition' ncu.variables['u_north'].units = 'meter second-1' ncu.variables['u_north'].field = 'u_north, scalar, series' - print 'Creating variable u_south' + print('Creating variable u_south') ncu.createVariable('u_south', 'f8', ('ocean_time', 's_rho', 'xi_u'), fill_value=spval) ncu.variables['u_south'].long_name = '3D u-momentum south boundary condition' ncu.variables['u_south'].units = 'meter second-1' ncu.variables['u_south'].field = 'u_south, scalar, series' - print 'Creating variable u_east' + print('Creating variable u_east') ncu.createVariable('u_east', 'f8', ('ocean_time', 's_rho', 'eta_u'), fill_value=spval) ncu.variables['u_east'].long_name = '3D u-momentum east boundary condition' ncu.variables['u_east'].units = 'meter second-1' ncu.variables['u_east'].field = 'u_east, scalar, series' - print 'Creating variable u_west' + print('Creating variable u_west') ncu.createVariable('u_west', 'f8', ('ocean_time', 's_rho', 'eta_u'), fill_value=spval) ncu.variables['u_west'].long_name = '3D u-momentum west boundary condition' ncu.variables['u_west'].units = 'meter second-1' ncu.variables['u_west'].field = 'u_east, scalar, series' # create variable in destination file - print 'Creating variable ubar_north' + print('Creating variable ubar_north') ncu.createVariable('ubar_north', 'f8', ('ocean_time', 'xi_u'), fill_value=spval) ncu.variables['ubar_north'].long_name = '2D u-momentum north boundary condition' ncu.variables['ubar_north'].units = 'meter second-1' ncu.variables['ubar_north'].field = 'ubar_north, scalar, series' - print 'Creating variable ubar_south' + print('Creating variable ubar_south') ncu.createVariable('ubar_south', 'f8', ('ocean_time', 'xi_u'), fill_value=spval) ncu.variables['ubar_south'].long_name = '2D u-momentum south boundary condition' ncu.variables['ubar_south'].units = 'meter second-1' ncu.variables['ubar_south'].field = 'ubar_south, scalar, series' - print 'Creating variable ubar_east' + print('Creating variable ubar_east') ncu.createVariable('ubar_east', 'f8', ('ocean_time', 'eta_u'), fill_value=spval) ncu.variables['ubar_east'].long_name = '2D u-momentum east boundary condition' ncu.variables['ubar_east'].units = 'meter second-1' ncu.variables['ubar_east'].field = 'ubar_east, scalar, series' - print 'Creating variable ubar_west' + print('Creating variable ubar_west') ncu.createVariable('ubar_west', 'f8', ('ocean_time', 'eta_u'), fill_value=spval) ncu.variables['ubar_west'].long_name = '2D u-momentum west boundary condition' ncu.variables['ubar_west'].units = 'meter second-1' ncu.variables['ubar_west'].field = 'ubar_east, scalar, series' - print 'Creating variable v_north' + print('Creating variable v_north') ncv.createVariable('v_north', 'f8', ('ocean_time', 's_rho', 'xi_v'), fill_value=spval) ncv.variables['v_north'].long_name = '3D v-momentum north boundary condition' ncv.variables['v_north'].units = 'meter second-1' ncv.variables['v_north'].field = 'v_north, scalar, series' - print 'Creating variable v_south' + print('Creating variable v_south') ncv.createVariable('v_south', 'f8', ('ocean_time', 's_rho', 'xi_v'), fill_value=spval) ncv.variables['v_south'].long_name = '3D v-momentum south boundary condition' ncv.variables['v_south'].units = 'meter second-1' ncv.variables['v_south'].field = 'v_south, scalar, series' - print 'Creating variable v_east' + print('Creating variable v_east') ncv.createVariable('v_east', 'f8', ('ocean_time', 's_rho', 'eta_v'), fill_value=spval) ncv.variables['v_east'].long_name = '3D v-momentum east boundary condition' ncv.variables['v_east'].units = 'meter second-1' ncv.variables['v_east'].field = 'v_east, scalar, series' - print 'Creating variable v_west' + print('Creating variable v_west') ncv.createVariable('v_west', 'f8', ('ocean_time', 's_rho', 'eta_v'), fill_value=spval) ncv.variables['v_west'].long_name = '3D v-momentum west boundary condition' ncv.variables['v_west'].units = 'meter second-1' ncv.variables['v_west'].field = 'v_east, scalar, series' - print 'Creating variable vbar_north' + print('Creating variable vbar_north') ncv.createVariable('vbar_north', 'f8', ('ocean_time', 'xi_v'), fill_value=spval) ncv.variables['vbar_north'].long_name = '2D v-momentum north boundary condition' ncv.variables['vbar_north'].units = 'meter second-1' ncv.variables['vbar_north'].field = 'vbar_north, scalar, series' - print 'Creating variable vbar_south' + print('Creating variable vbar_south') ncv.createVariable('vbar_south', 'f8', ('ocean_time', 'xi_v'), fill_value=spval) ncv.variables['vbar_south'].long_name = '2D v-momentum south boundary condition' ncv.variables['vbar_south'].units = 'meter second-1' ncv.variables['vbar_south'].field = 'vbar_south, scalar, series' - print 'Creating variable vbar_east' + print('Creating variable vbar_east') ncv.createVariable('vbar_east', 'f8', ('ocean_time', 'eta_v'), fill_value=spval) ncv.variables['vbar_east'].long_name = '2D v-momentum east boundary condition' ncv.variables['vbar_east'].units = 'meter second-1' ncv.variables['vbar_east'].field = 'vbar_east, scalar, series' - print 'Creating variable vbar_west' + print('Creating variable vbar_west') ncv.createVariable('vbar_west', 'f8', ('ocean_time', 'eta_v'), fill_value=spval) ncv.variables['vbar_west'].long_name = '2D v-momentum west boundary condition' ncv.variables['vbar_west'].units = 'meter second-1' @@ -177,27 +177,27 @@ def remap_bdry_uv(src_file, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='. # remaping - print 'remapping and rotating u and v from', src_grd.name, \ - 'to', dst_grd.name - print 'time =', time + print('remapping and rotating u and v from', src_grd.name, \ + 'to', dst_grd.name) + print('time =', time) # flood the grid - print 'flood the grid' + print('flood the grid') src_uz = pyroms_toolbox.CGrid_GLORYS.flood(src_varu, src_grd, Cpos='u', \ spval=spval, dmax=dmax, cdepth=cdepth, kk=kk) src_vz = pyroms_toolbox.CGrid_GLORYS.flood(src_varv, src_grd, Cpos='v', \ spval=spval, dmax=dmax, cdepth=cdepth, kk=kk) # horizontal interpolation using scrip weights - print 'horizontal interpolation using scrip weights' + print('horizontal interpolation using scrip weights') dst_uz = pyroms.remapping.remap(src_uz, wts_file_u, \ spval=spval) dst_vz = pyroms.remapping.remap(src_vz, wts_file_v, \ spval=spval) # vertical interpolation from standard z level to sigma - print 'vertical interpolation from standard z level to sigma' + print('vertical interpolation from standard z level to sigma') dst_u_north = pyroms.remapping.z2roms(dst_uz[::-1, Mp-2:Mp, 0:Lp], \ dst_grdz, dst_grd, Cpos='rho', spval=spval, \ flood=False, irange=(0,Lp), jrange=(Mp-2,Mp)) @@ -329,7 +329,7 @@ def remap_bdry_uv(src_file, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='. dst_vbar_west = np.ma.masked_where(dst_grd.hgrid.mask_v[:,0] == 0, dst_vbar_west) # write data in destination file - print 'write data in destination file' + print('write data in destination file') ncu.variables['ocean_time'][0] = time ncu.variables['u_north'][0] = dst_u_north ncu.variables['u_south'][0] = dst_u_south diff --git a/examples/Arctic_GLORYS/remap_uv.py b/examples/Arctic_GLORYS/remap_uv.py old mode 100755 new mode 100644 index a7cb5f6..de156fa --- a/examples/Arctic_GLORYS/remap_uv.py +++ b/examples/Arctic_GLORYS/remap_uv.py @@ -26,7 +26,7 @@ def remap_uv(src_file, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='./'): ref = datetime(1900, 1, 1, 0, 0, 0) ref = date2num(ref) tag = src_file.rsplit('/')[-1].rsplit('_')[2] - print("tag:", tag) + print(("tag:", tag)) year = int(tag[:4]) month = int(tag[4:6]) day = int(tag[6:]) @@ -41,12 +41,12 @@ def remap_uv(src_file, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='./'): # create destination file dst_file = src_file.rsplit('/')[-1] dst_fileu = dst_dir + dst_file[:-4] + '_u_ic_' + dst_grd.name + '.nc' - print '\nCreating destination file', dst_fileu + print('\nCreating destination file', dst_fileu) if os.path.exists(dst_fileu) is True: os.remove(dst_fileu) pyroms_toolbox.nc_create_roms_file(dst_fileu, dst_grd, nctime) dst_filev = dst_dir + dst_file[:-4] + '_v_ic_' + dst_grd.name + '.nc' - print 'Creating destination file', dst_filev + print('Creating destination file', dst_filev) if os.path.exists(dst_filev) is True: os.remove(dst_filev) pyroms_toolbox.nc_create_roms_file(dst_filev, dst_grd, nctime) @@ -59,7 +59,7 @@ def remap_uv(src_file, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='./'): cdf = netCDF.Dataset(src_file) src_varu = cdf.variables['vozocrtx'] src_varv = cdf.variables['vomecrty'] - print "dims", src_varu.dimensions, src_varv.dimensions + print("dims", src_varu.dimensions, src_varv.dimensions) #get missing value spval = src_varu._FillValue @@ -67,13 +67,13 @@ def remap_uv(src_file, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='./'): # ARCTIC grid sub-sample src_varu = src_varu[:] src_varv = src_varv[:] - print "shape 1", src_varu.shape, src_varv.shape + print("shape 1", src_varu.shape, src_varv.shape) src_varu = np.squeeze(src_varu) src_varv = np.squeeze(src_varv) - print "shape 2", src_varu.shape, src_varv.shape + print("shape 2", src_varu.shape, src_varv.shape) src_varu = src_varu[:,np.r_[ystart:np.size(src_varu,1),-1],:] src_varv = src_varv[:,np.r_[ystart:np.size(src_varv,1),-1],:] - print "shape 3", src_varu.shape, src_varv.shape + print("shape 3", src_varu.shape, src_varv.shape) # get weights file wts_file_a = 'remap_weights_GLORYS_to_ARCTIC2_bilinear_t_to_rho.nc' @@ -87,24 +87,24 @@ def remap_uv(src_file, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='./'): dst_grdz = pyroms.grid.ROMS_Grid(dst_grd.name+'_Z', dst_grd.hgrid, dst_zcoord) # create variable in destination file - print 'Creating variable u' + print('Creating variable u') ncu.createVariable('u', 'f8', ('ocean_time', 's_rho', 'eta_u', 'xi_u'), fill_value=spval) ncu.variables['u'].long_name = '3D u-momentum component' ncu.variables['u'].units = 'meter second-1' ncu.variables['u'].field = 'u-velocity, scalar, series' # create variable in destination file - print 'Creating variable ubar' + print('Creating variable ubar') ncu.createVariable('ubar', 'f8', ('ocean_time', 'eta_u', 'xi_u'), fill_value=spval) ncu.variables['ubar'].long_name = '2D u-momentum component' ncu.variables['ubar'].units = 'meter second-1' ncu.variables['ubar'].field = 'ubar-velocity,, scalar, series' - print 'Creating variable v' + print('Creating variable v') ncv.createVariable('v', 'f8', ('ocean_time', 's_rho', 'eta_v', 'xi_v'), fill_value=spval) ncv.variables['v'].long_name = '3D v-momentum component' ncv.variables['v'].units = 'meter second-1' ncv.variables['v'].field = 'v-velocity, scalar, series' - print 'Creating variable vbar' + print('Creating variable vbar') ncv.createVariable('vbar', 'f8', ('ocean_time', 'eta_v', 'xi_v'), fill_value=spval) ncv.variables['vbar'].long_name = '2D v-momentum component' ncv.variables['vbar'].units = 'meter second-1' @@ -112,27 +112,27 @@ def remap_uv(src_file, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='./'): # remaping - print 'remapping and rotating u and v from', src_grd.name, \ - 'to', dst_grd.name - print 'time =', time + print('remapping and rotating u and v from', src_grd.name, \ + 'to', dst_grd.name) + print('time =', time) # flood the grid - print 'flood the grid', src_varu.shape + print('flood the grid', src_varu.shape) src_uz = pyroms_toolbox.CGrid_GLORYS.flood(src_varu, src_grd, Cpos='u', \ spval=spval, dmax=dmax, cdepth=cdepth, kk=kk) src_vz = pyroms_toolbox.CGrid_GLORYS.flood(src_varv, src_grd, Cpos='v', \ spval=spval, dmax=dmax, cdepth=cdepth, kk=kk) # horizontal interpolation using scrip weights - print 'horizontal interpolation using scrip weights' + print('horizontal interpolation using scrip weights') dst_uz = pyroms.remapping.remap(src_uz, wts_file_u, \ spval=spval) dst_vz = pyroms.remapping.remap(src_vz, wts_file_v, \ spval=spval) # vertical interpolation from standard z level to sigma - print 'vertical interpolation from standard z level to sigma' + print('vertical interpolation from standard z level to sigma') dst_u = pyroms.remapping.z2roms(dst_uz[::-1,:,:], dst_grdz, \ dst_grd, Cpos='rho', spval=spval, flood=False) dst_v = pyroms.remapping.z2roms(dst_vz[::-1,:,:], dst_grdz, \ @@ -185,7 +185,7 @@ def remap_uv(src_file, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='./'): dst_vbar[idxv[0], idxv[1]] = spval # write data in destination file - print 'write data in destination file' + print('write data in destination file') ncu.variables['ocean_time'][0] = time ncu.variables['u'][0] = dst_u ncu.variables['ubar'][0] = dst_ubar @@ -194,10 +194,10 @@ def remap_uv(src_file, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='./'): ncv.variables['v'][0] = dst_v ncv.variables['vbar'][0] = dst_vbar - print dst_u.shape - print dst_ubar.shape - print dst_v.shape - print dst_vbar.shape + print(dst_u.shape) + print(dst_ubar.shape) + print(dst_v.shape) + print(dst_vbar.shape) # close destination file ncu.close() diff --git a/examples/Arctic_HYCOM/fixt.py b/examples/Arctic_HYCOM/fixt.py index 3214db9..86d50c9 100644 --- a/examples/Arctic_HYCOM/fixt.py +++ b/examples/Arctic_HYCOM/fixt.py @@ -41,13 +41,13 @@ for i in range(706,898): if np.fabs(ubar[0,j,i]) > 10: if np.fabs(ubar[0,j,i+1]) > 10: - ubar[0,j,i] = 2/3.0*ubar[0,j,i-1]+1/3.0*ubar[0,j,i+2] - ubar[0,j,i+1] = 1/3.0*ubar[0,j,i-1]+2/3.0*ubar[0,j,i+2] - u[0,:,j,i] = 2/3.0*u[0,:,j,i-1]+1/3.0*u[0,:,j,i+2] - u[0,:,j,i+1] = 1/3.0*u[0,:,j,i-1]+2/3.0*u[0,:,j,i+2] - else: - ubar[0,j,i] = 0.5*(ubar[0,j,i-1]+ubar[0,j,i+1]) - u[0,:,j,i] = 0.5*(u[0,:,j,i-1]+u[0,:,j,i+1]) + ubar[0,j,i] = 2/3.0*ubar[0,j,i-1]+1/3.0*ubar[0,j,i+2] + ubar[0,j,i+1] = 1/3.0*ubar[0,j,i-1]+2/3.0*ubar[0,j,i+2] + u[0,:,j,i] = 2/3.0*u[0,:,j,i-1]+1/3.0*u[0,:,j,i+2] + u[0,:,j,i+1] = 1/3.0*u[0,:,j,i-1]+2/3.0*u[0,:,j,i+2] + else: + ubar[0,j,i] = 0.5*(ubar[0,j,i-1]+ubar[0,j,i+1]) + u[0,:,j,i] = 0.5*(u[0,:,j,i-1]+u[0,:,j,i+1]) vbar = root.variables['vbar'][:] @@ -66,13 +66,13 @@ for i in range(708,900): if np.fabs(vbar[0,j,i]) > 10: if np.fabs(vbar[0,j,i+1]) > 10: - vbar[0,j,i] = 2/3.0*vbar[0,j,i-1]+1/3.0*vbar[0,j,i+2] - vbar[0,j,i+1] = 1/3.0*vbar[0,j,i-1]+2/3.0*vbar[0,j,i+2] - v[0,:,j,i] = 2/3.0*v[0,:,j,i-1]+1/3.0*v[0,:,j,i+2] - v[0,:,j,i+1] = 1/3.0*v[0,:,j,i-1]+2/3.0*v[0,:,j,i+2] - else: - vbar[0,j,i] = 0.5*(vbar[0,j,i-1]+vbar[0,j,i+1]) - v[0,:,j,i] = 0.5*(v[0,:,j,i-1]+v[0,:,j,i+1]) + vbar[0,j,i] = 2/3.0*vbar[0,j,i-1]+1/3.0*vbar[0,j,i+2] + vbar[0,j,i+1] = 1/3.0*vbar[0,j,i-1]+2/3.0*vbar[0,j,i+2] + v[0,:,j,i] = 2/3.0*v[0,:,j,i-1]+1/3.0*v[0,:,j,i+2] + v[0,:,j,i+1] = 1/3.0*v[0,:,j,i-1]+2/3.0*v[0,:,j,i+2] + else: + vbar[0,j,i] = 0.5*(vbar[0,j,i-1]+vbar[0,j,i+1]) + v[0,:,j,i] = 0.5*(v[0,:,j,i-1]+v[0,:,j,i+1]) root.variables['ubar'][:] = ubar root.variables['u'][:] = u diff --git a/examples/Arctic_HYCOM/get_hycom_GLBa0.08_salt_2014.py b/examples/Arctic_HYCOM/get_hycom_GLBa0.08_salt_2014.py index 294f6a3..257a231 100644 --- a/examples/Arctic_HYCOM/get_hycom_GLBa0.08_salt_2014.py +++ b/examples/Arctic_HYCOM/get_hycom_GLBa0.08_salt_2014.py @@ -12,7 +12,7 @@ def create_HYCOM_file(name, time, lon, lat, z, var): - print 'Write with file %s' %name + print('Write with file %s' %name) #create netCDF file nc = netCDF4.Dataset(name, 'w', format='NETCDF3_64BIT') @@ -57,7 +57,7 @@ def create_HYCOM_file(name, time, lon, lat, z, var): nc.close() - print 'Done with file %s' %name + print('Done with file %s' %name) @@ -92,7 +92,7 @@ def create_HYCOM_file(name, time, lon, lat, z, var): daysinyear = 94 for day in range(1,daysinyear+1): #for day in range(95,daysinyear+1): - print 'Processing file for %s, day %03d, year %04d' %(invarname, day, year) + print('Processing file for %s, day %03d, year %04d' %(invarname, day, year)) url='http://tds.hycom.org/thredds/dodsC/datasets/GLBa0.08/expt_91.0/2014/salt/archv.%04d_%03d_00_3zs.nc' %(year,day) # url='http://tds.hycom.org/thredds/dodsC/datasets/GLBa0.08/expt_91.1/2014/salt/archv.%04d_%03d_00_3zs.nc' %(year,day) #get data from server @@ -101,9 +101,9 @@ def create_HYCOM_file(name, time, lon, lat, z, var): var = dataset.variables[invarname][0,:,2100:,550:4040] spval = var.get_fill_value() dataset.close() - print 'Got %s from server...' %invarname + print('Got %s from server...' %invarname) except: - print 'No file on the server... We skip this day.' + print('No file on the server... We skip this day.') retry_day.append(day) continue @@ -115,10 +115,10 @@ def create_HYCOM_file(name, time, lon, lat, z, var): if retry == 'True': if len(retry_day) != 0: - print "Some file have not been downloded... Let's try again" + print("Some file have not been downloded... Let's try again") while len(retry_day) != 0: for day in retry_day: - print 'Retry file for %s, day %03d, year %04d' %(invarname, day, year) + print('Retry file for %s, day %03d, year %04d' %(invarname, day, year)) url='http://tds.hycom.org/thredds/dodsC/datasets/GLBa0.08/expt_91.0/2014/salt/archv.%04d_%03d_00_3zs.nc' %(year,day) # url='http://tds.hycom.org/thredds/dodsC/datasets/GLBa0.08/expt_91.1/2014/salt/archv.%04d_%03d_00_3zs.nc' %(year,day) #get data from server @@ -127,9 +127,9 @@ def create_HYCOM_file(name, time, lon, lat, z, var): var = dataset.variables[invarname][0,:,2100:,550:4040] spval = var.get_fill_value() dataset.close() - print 'Got %s from server...' %invarname + print('Got %s from server...' %invarname) except: - print 'No file on the server... We skip this day.' + print('No file on the server... We skip this day.') continue #create netCDF file diff --git a/examples/Arctic_HYCOM/get_hycom_GLBa0.08_ssh_2014.py b/examples/Arctic_HYCOM/get_hycom_GLBa0.08_ssh_2014.py index 086ed6d..5958b2e 100644 --- a/examples/Arctic_HYCOM/get_hycom_GLBa0.08_ssh_2014.py +++ b/examples/Arctic_HYCOM/get_hycom_GLBa0.08_ssh_2014.py @@ -48,7 +48,7 @@ def create_HYCOM_file(name, time, lon, lat, var): nc.close() - print 'Done with file %s' %name + print('Done with file %s' %name) @@ -82,7 +82,7 @@ def create_HYCOM_file(name, time, lon, lat, var): daysinyear = 94 for day in range(1,daysinyear+1): #for day in range(95,daysinyear+1): - print 'Processing file for day %03d, year %04d' %(day, year) + print('Processing file for day %03d, year %04d' %(day, year)) url='http://tds.hycom.org/thredds/dodsC/datasets/GLBa0.08/expt_91.0/2014/2d/archv.%04d_%03d_00_2d.nc' %(year,day) # url='http://tds.hycom.org/thredds/dodsC/datasets/GLBa0.08/expt_91.1/2014/2d/archv.%04d_%03d_00_2d.nc' %(year,day) #get data from server @@ -92,7 +92,7 @@ def create_HYCOM_file(name, time, lon, lat, var): spval = var.get_fill_value() dataset.close() except: - print 'No file on the server... We skip this day.' + print('No file on the server... We skip this day.') retry_day.append(day) continue @@ -104,10 +104,10 @@ def create_HYCOM_file(name, time, lon, lat, var): if retry == 'True': if len(retry_day) != 0: - print "Some file have not been downloded... Let's try again" + print("Some file have not been downloded... Let's try again") while len(retry_day) != 0: for day in retry_day: - print 'Retry file for day %03d, year %04d' %(day, year) + print('Retry file for day %03d, year %04d' %(day, year)) url='http://tds.hycom.org/thredds/dodsC/datasets/GLBa0.08/expt_91.0/2014/2d/archv.%04d_%03d_00_2d.nc' %(year,day) # url='http://tds.hycom.org/thredds/dodsC/datasets/GLBa0.08/expt_91.1/2014/2d/archv.%04d_%03d_00_2d.nc' %(year,day) #get data from server @@ -117,7 +117,7 @@ def create_HYCOM_file(name, time, lon, lat, var): spval = var.get_fill_value() dataset.close() except: - print 'No file on the server... We skip this day.' + print('No file on the server... We skip this day.') continue #create netCDF file diff --git a/examples/Arctic_HYCOM/get_hycom_GLBa0.08_temp_2014.py b/examples/Arctic_HYCOM/get_hycom_GLBa0.08_temp_2014.py index 3af6443..09969cb 100644 --- a/examples/Arctic_HYCOM/get_hycom_GLBa0.08_temp_2014.py +++ b/examples/Arctic_HYCOM/get_hycom_GLBa0.08_temp_2014.py @@ -12,7 +12,7 @@ def create_HYCOM_file(name, time, lon, lat, z, var): - print 'Write with file %s' %name + print('Write with file %s' %name) #create netCDF file nc = netCDF4.Dataset(name, 'w', format='NETCDF3_64BIT') @@ -57,7 +57,7 @@ def create_HYCOM_file(name, time, lon, lat, z, var): nc.close() - print 'Done with file %s' %name + print('Done with file %s' %name) @@ -92,7 +92,7 @@ def create_HYCOM_file(name, time, lon, lat, z, var): daysinyear = 94 for day in range(1,daysinyear+1): #for day in range(95,daysinyear+1): - print 'Processing file for %s, day %03d, year %04d' %(invarname, day, year) + print('Processing file for %s, day %03d, year %04d' %(invarname, day, year)) url='http://tds.hycom.org/thredds/dodsC/datasets/GLBa0.08/expt_91.0/2014/temp/archv.%04d_%03d_00_3zt.nc' %(year,day) # url='http://tds.hycom.org/thredds/dodsC/datasets/GLBa0.08/expt_91.1/2014/temp/archv.%04d_%03d_00_3zt.nc' %(year,day) #get data from server @@ -101,9 +101,9 @@ def create_HYCOM_file(name, time, lon, lat, z, var): var = dataset.variables[invarname][0,:,2100:,550:4040] spval = var.get_fill_value() dataset.close() - print 'Got %s from server...' %invarname + print('Got %s from server...' %invarname) except: - print 'No file on the server... We skip this day.' + print('No file on the server... We skip this day.') retry_day.append(day) continue @@ -115,10 +115,10 @@ def create_HYCOM_file(name, time, lon, lat, z, var): if retry == 'True': if len(retry_day) != 0: - print "Some file have not been downloded... Let's try again" + print("Some file have not been downloded... Let's try again") while len(retry_day) != 0: for day in retry_day: - print 'Retry file for %s, day %03d, year %04d' %(invarname, day, year) + print('Retry file for %s, day %03d, year %04d' %(invarname, day, year)) url='http://tds.hycom.org/thredds/dodsC/datasets/GLBa0.08/expt_91.0/2014/temp/archv.%04d_%03d_00_3zt.nc' %(year,day) # url='http://tds.hycom.org/thredds/dodsC/datasets/GLBa0.08/expt_91.1/2014/temp/archv.%04d_%03d_00_3zt.nc' %(year,day) #get data from server @@ -127,9 +127,9 @@ def create_HYCOM_file(name, time, lon, lat, z, var): var = dataset.variables[invarname][0,:,2100:,550:4040] spval = var.get_fill_value() dataset.close() - print 'Got %s from server...' %invarname + print('Got %s from server...' %invarname) except: - print 'No file on the server... We skip this day.' + print('No file on the server... We skip this day.') continue #create netCDF file diff --git a/examples/Arctic_HYCOM/get_hycom_GLBa0.08_u_2014.py b/examples/Arctic_HYCOM/get_hycom_GLBa0.08_u_2014.py index 682e00d..bf7813e 100644 --- a/examples/Arctic_HYCOM/get_hycom_GLBa0.08_u_2014.py +++ b/examples/Arctic_HYCOM/get_hycom_GLBa0.08_u_2014.py @@ -12,7 +12,7 @@ def create_HYCOM_file(name, time, lon, lat, z, var): - print 'Write with file %s' %name + print('Write with file %s' %name) #create netCDF file nc = netCDF4.Dataset(name, 'w', format='NETCDF3_64BIT') @@ -57,7 +57,7 @@ def create_HYCOM_file(name, time, lon, lat, z, var): nc.close() - print 'Done with file %s' %name + print('Done with file %s' %name) @@ -92,7 +92,7 @@ def create_HYCOM_file(name, time, lon, lat, z, var): daysinyear = 94 for day in range(1,daysinyear+1): #for day in range(95,daysinyear+1): - print 'Processing file for %s, day %03d, year %04d' %(invarname, day, year) + print('Processing file for %s, day %03d, year %04d' %(invarname, day, year)) url='http://tds.hycom.org/thredds/dodsC/datasets/GLBa0.08/expt_91.0/2014/uvel/archv.%04d_%03d_00_3zu.nc' %(year,day) # url='http://tds.hycom.org/thredds/dodsC/datasets/GLBa0.08/expt_91.1/2014/uvel/archv.%04d_%03d_00_3zu.nc' %(year,day) #get data from server @@ -101,9 +101,9 @@ def create_HYCOM_file(name, time, lon, lat, z, var): var = dataset.variables[invarname][0,:,2100:,550:4040] spval = var.get_fill_value() dataset.close() - print 'Got %s from server...' %invarname + print('Got %s from server...' %invarname) except: - print 'No file on the server... We skip this day.' + print('No file on the server... We skip this day.') retry_day.append(day) continue @@ -115,10 +115,10 @@ def create_HYCOM_file(name, time, lon, lat, z, var): if retry == 'True': if len(retry_day) != 0: - print "Some file have not been downloded... Let's try again" + print("Some file have not been downloded... Let's try again") while len(retry_day) != 0: for day in retry_day: - print 'Retry file for %s, day %03d, year %04d' %(invarname, day, year) + print('Retry file for %s, day %03d, year %04d' %(invarname, day, year)) url='http://tds.hycom.org/thredds/dodsC/datasets/GLBa0.08/expt_91.0/2014/uvel/archv.%04d_%03d_00_3zu.nc' %(year,day) # url='http://tds.hycom.org/thredds/dodsC/datasets/GLBa0.08/expt_91.1/2014/uvel/archv.%04d_%03d_00_3zu.nc' %(year,day) #get data from server @@ -127,9 +127,9 @@ def create_HYCOM_file(name, time, lon, lat, z, var): var = dataset.variables[invarname][0,:,2100:,550:4040] spval = var.get_fill_value() dataset.close() - print 'Got %s from server...' %invarname + print('Got %s from server...' %invarname) except: - print 'No file on the server... We skip this day.' + print('No file on the server... We skip this day.') continue #create netCDF file diff --git a/examples/Arctic_HYCOM/get_hycom_GLBa0.08_v_2014.py b/examples/Arctic_HYCOM/get_hycom_GLBa0.08_v_2014.py index 122675f..abbb7d2 100644 --- a/examples/Arctic_HYCOM/get_hycom_GLBa0.08_v_2014.py +++ b/examples/Arctic_HYCOM/get_hycom_GLBa0.08_v_2014.py @@ -12,7 +12,7 @@ def create_HYCOM_file(name, time, lon, lat, z, var): - print 'Write with file %s' %name + print('Write with file %s' %name) #create netCDF file nc = netCDF4.Dataset(name, 'w', format='NETCDF3_64BIT') @@ -57,7 +57,7 @@ def create_HYCOM_file(name, time, lon, lat, z, var): nc.close() - print 'Done with file %s' %name + print('Done with file %s' %name) @@ -92,7 +92,7 @@ def create_HYCOM_file(name, time, lon, lat, z, var): daysinyear = 94 for day in range(1,daysinyear+1): #for day in range(95,daysinyear+1): - print 'Processing file for %s, day %03d, year %04d' %(invarname, day, year) + print('Processing file for %s, day %03d, year %04d' %(invarname, day, year)) url='http://tds.hycom.org/thredds/dodsC/datasets/GLBa0.08/expt_91.0/2014/vvel/archv.%04d_%03d_00_3zv.nc' %(year,day) # url='http://tds.hycom.org/thredds/dodsC/datasets/GLBa0.08/expt_91.1/2014/vvel/archv.%04d_%03d_00_3zv.nc' %(year,day) #get data from server @@ -101,9 +101,9 @@ def create_HYCOM_file(name, time, lon, lat, z, var): var = dataset.variables[invarname][0,:,2100:,550:4040] spval = var.get_fill_value() dataset.close() - print 'Got %s from server...' %invarname + print('Got %s from server...' %invarname) except: - print 'No file on the server... We skip this day.' + print('No file on the server... We skip this day.') retry_day.append(day) continue @@ -115,10 +115,10 @@ def create_HYCOM_file(name, time, lon, lat, z, var): if retry == 'True': if len(retry_day) != 0: - print "Some file have not been downloded... Let's try again" + print("Some file have not been downloded... Let's try again") while len(retry_day) != 0: for day in retry_day: - print 'Retry file for %s, day %03d, year %04d' %(invarname, day, year) + print('Retry file for %s, day %03d, year %04d' %(invarname, day, year)) url='http://tds.hycom.org/thredds/dodsC/datasets/GLBa0.08/expt_91.0/2014/vvel/archv.%04d_%03d_00_3zv.nc' %(year,day) # url='http://tds.hycom.org/thredds/dodsC/datasets/GLBa0.08/expt_91.1/2014/vvel/archv.%04d_%03d_00_3zv.nc' %(year,day) #get data from server @@ -127,9 +127,9 @@ def create_HYCOM_file(name, time, lon, lat, z, var): var = dataset.variables[invarname][0,:,2100:,550:4040] spval = var.get_fill_value() dataset.close() - print 'Got %s from server...' %invarname + print('Got %s from server...' %invarname) except: - print 'No file on the server... We skip this day.' + print('No file on the server... We skip this day.') continue #create netCDF file diff --git a/examples/Arctic_HYCOM/make_bdry_file.py b/examples/Arctic_HYCOM/make_bdry_file.py index 8028fa8..d1b5512 100644 --- a/examples/Arctic_HYCOM/make_bdry_file.py +++ b/examples/Arctic_HYCOM/make_bdry_file.py @@ -4,7 +4,7 @@ import subprocess import os import sys -import commands +import subprocess import numpy as np from multiprocessing import Pool #import pdb @@ -29,14 +29,14 @@ for year in lst_year: # lst = commands.getoutput('ls ' + data_dir + 'HYCOM_GLBa0.08_' + year + '*') - lst = commands.getoutput('ls ' + data_dir + 'HYCOM_GLBa0.08_' + year + '_0*') + lst = subprocess.getoutput('ls ' + data_dir + 'HYCOM_GLBa0.08_' + year + '_0*') # lst = commands.getoutput('ls ' + data_dir + 'HYCOM_GLBa0.08_' + year + '_0[4-9]*') lst = lst.split() lst_file = lst_file + lst -print 'Build OBC file from the following file list:' -print lst_file -print ' ' +print('Build OBC file from the following file list:') +print(lst_file) +print(' ') src_grd_file = data_dir + '../HYCOM_GLBa0.08_North_grid2.nc' src_grd = pyroms_toolbox.Grid_HYCOM.get_nc_Grid_HYCOM(src_grd_file) diff --git a/examples/Arctic_HYCOM/make_clm_file.py b/examples/Arctic_HYCOM/make_clm_file.py index b4afc36..8de0bc9 100644 --- a/examples/Arctic_HYCOM/make_clm_file.py +++ b/examples/Arctic_HYCOM/make_clm_file.py @@ -1,7 +1,7 @@ import subprocess import os import sys -import commands +import subprocess import numpy as np from datetime import datetime import matplotlib @@ -23,13 +23,13 @@ for year in lst_year: year = np.str(year) # lst = commands.getoutput('ls ' + data_dir + 'SODA_2.1.6_' + year + '_0*') - lst = commands.getoutput('ls ' + data_dir + '*' + year + '*') + lst = subprocess.getoutput('ls ' + data_dir + '*' + year + '*') lst = lst.split() lst_file = lst_file + lst -print 'Build CLM file from the following file list:' -print lst_file -print ' ' +print('Build CLM file from the following file list:') +print(lst_file) +print(' ') src_grd = pyroms_toolbox.Grid_HYCOM.get_nc_Grid_HYCOM('/archive/u1/uaf/kate/HYCOM/Svalbard/HYCOM_GLBa0.08_North_grid2.nc') dst_grd = pyroms.grid.get_ROMS_grid('ARCTIC2') @@ -47,26 +47,26 @@ out_file = dst_dir + file.rsplit('/')[-1][:-3] + '_ssh_clim_' + dst_grd.name + '.nc' command = ('ncks', '-a', '-O', out_file, clim_file) - print command + print(command) subprocess.check_call(command) os.remove(out_file) out_file = dst_dir + file.rsplit('/')[-1][:-3] + '_temp_clim_' + dst_grd.name + '.nc' command = ('ncks', '-a', '-A', out_file, clim_file) - print command + print(command) subprocess.check_call(command) os.remove(out_file) out_file = dst_dir + file.rsplit('/')[-1][:-3] + '_salt_clim_' + dst_grd.name + '.nc' command = ('ncks', '-a', '-A', out_file, clim_file) - print command + print(command) subprocess.check_call(command) os.remove(out_file) out_file = dst_dir + file.rsplit('/')[-1][:-3] + '_u_clim_' + dst_grd.name + '.nc' command = ('ncks', '-a', '-A', out_file, clim_file) - print command + print(command) subprocess.check_call(command) os.remove(out_file) out_file = dst_dir + file.rsplit('/')[-1][:-3] + '_v_clim_' + dst_grd.name + '.nc' command = ('ncks', '-a', '-A', out_file, clim_file) - print command + print(command) subprocess.check_call(command) os.remove(out_file) diff --git a/examples/Arctic_HYCOM/make_ic_file.py b/examples/Arctic_HYCOM/make_ic_file.py index a4f2c45..55d2ac2 100644 --- a/examples/Arctic_HYCOM/make_ic_file.py +++ b/examples/Arctic_HYCOM/make_ic_file.py @@ -1,6 +1,6 @@ import subprocess import os -import commands +import subprocess import numpy as np import matplotlib matplotlib.use('Agg') @@ -15,9 +15,9 @@ file = '/archive/u1/uaf/kate/HYCOM/Svalbard/data/HYCOM_GLBa0.08_2009_001.nc' dst_dir='./' -print 'Build IC file from the following file:' -print file -print ' ' +print('Build IC file from the following file:') +print(file) +print(' ') src_grd = pyroms_toolbox.Grid_HYCOM.get_nc_Grid_HYCOM('/archive/u1/uaf/kate/HYCOM/Svalbard/HYCOM_GLBa0.08_North_grid2.nc') dst_grd = pyroms.grid.get_ROMS_grid('ARCTIC2') @@ -34,26 +34,26 @@ out_file = dst_dir + file.rsplit('/')[-1][:-3] + '_ssh_ic_' + dst_grd.name + '.nc' command = ('ncks', '-a', '-O', out_file, ic_file) -print command +print(command) subprocess.check_call(command) os.remove(out_file) out_file = dst_dir + file.rsplit('/')[-1][:-3] + '_temp_ic_' + dst_grd.name + '.nc' command = ('ncks', '-a', '-A', out_file, ic_file) -print command +print(command) subprocess.check_call(command) os.remove(out_file) out_file = dst_dir + file.rsplit('/')[-1][:-3] + '_salt_ic_' + dst_grd.name + '.nc' command = ('ncks', '-a', '-A', out_file, ic_file) -print command +print(command) subprocess.check_call(command) os.remove(out_file) out_file = dst_dir + file.rsplit('/')[-1][:-3] + '_u_ic_' + dst_grd.name + '.nc' command = ('ncks', '-a', '-A', out_file, ic_file) -print command +print(command) subprocess.check_call(command) os.remove(out_file) out_file = dst_dir + file.rsplit('/')[-1][:-3] + '_v_ic_' + dst_grd.name + '.nc' command = ('ncks', '-a', '-A', out_file, ic_file) -print command +print(command) subprocess.check_call(command) os.remove(out_file) diff --git a/examples/Arctic_HYCOM/remap.py b/examples/Arctic_HYCOM/remap.py index 38e9238..55560dd 100644 --- a/examples/Arctic_HYCOM/remap.py +++ b/examples/Arctic_HYCOM/remap.py @@ -41,7 +41,7 @@ def remap(src_file, src_varname, src_grd, dst_grd, dxy=20, cdepth=0, kk=0, dst_d # create IC file dst_file = src_file.rsplit('/')[-1] dst_file = dst_dir + dst_file[:-3] + '_' + src_varname + '_ic_' + dst_grd.name + '.nc' - print '\nCreating file', dst_file + print('\nCreating file', dst_file) if os.path.exists(dst_file) is True: os.remove(dst_file) pyroms_toolbox.nc_create_roms_file(dst_file, dst_grd, nctime) @@ -95,7 +95,7 @@ def remap(src_file, src_varname, src_grd, dst_grd, dxy=20, cdepth=0, kk=0, dst_d units = 'PSU' field = 'salinity, scalar, series' else: - raise ValueError, 'Undefined src_varname' + raise ValueError('Undefined src_varname') if ndim == 3: @@ -107,7 +107,7 @@ def remap(src_file, src_varname, src_grd, dst_grd, dxy=20, cdepth=0, kk=0, dst_d # create variable in file - print 'Creating variable', dst_varname + print('Creating variable', dst_varname) nc.createVariable(dst_varname, 'f8', dimensions, fill_value=spval) nc.variables[dst_varname].long_name = long_name nc.variables[dst_varname].units = units @@ -115,34 +115,34 @@ def remap(src_file, src_varname, src_grd, dst_grd, dxy=20, cdepth=0, kk=0, dst_d # remapping - print 'remapping', dst_varname, 'from', src_grd.name, \ - 'to', dst_grd.name - print 'time =', time + print('remapping', dst_varname, 'from', src_grd.name, \ + 'to', dst_grd.name) + print('time =', time) if ndim == 3: # flood the grid - print 'flood the grid' + print('flood the grid') src_varz = pyroms_toolbox.Grid_HYCOM.flood_fast(src_var, src_grd, pos=pos, spval=spval, \ dxy=dxy, cdepth=cdepth, kk=kk) else: src_varz = src_var # horizontal interpolation using scrip weights - print 'horizontal interpolation using scrip weights' + print('horizontal interpolation using scrip weights') dst_varz = pyroms.remapping.remap(src_varz, wts_file, \ spval=spval) if ndim == 3: # vertical interpolation from standard z level to sigma - print 'vertical interpolation from standard z level to sigma' + print('vertical interpolation from standard z level to sigma') dst_var = pyroms.remapping.z2roms(dst_varz[::-1,:,:], dst_grdz, \ dst_grd, Cpos=Cpos, spval=spval, flood=False) else: dst_var = dst_varz # write data in destination file - print 'write data in destination file' + print('write data in destination file') nc.variables['ocean_time'][0] = time nc.variables[dst_varname][0] = dst_var diff --git a/examples/Arctic_HYCOM/remap_bdry.py b/examples/Arctic_HYCOM/remap_bdry.py index 8ff2232..62d2d31 100644 --- a/examples/Arctic_HYCOM/remap_bdry.py +++ b/examples/Arctic_HYCOM/remap_bdry.py @@ -18,7 +18,7 @@ class nctime(object): def remap_bdry(src_file, src_varname, src_grd, dst_grd, dxy=20, cdepth=0, kk=2, dst_dir='./'): - print src_file + print(src_file) # get time nctime.long_name = 'time' @@ -27,7 +27,7 @@ def remap_bdry(src_file, src_varname, src_grd, dst_grd, dxy=20, cdepth=0, kk=2, # create boundary file dst_file = src_file.rsplit('/')[-1] dst_file = dst_dir + dst_file[:-3] + '_' + src_varname + '_bdry_' + dst_grd.name + '.nc' - print '\nCreating boundary file', dst_file + print('\nCreating boundary file', dst_file) if os.path.exists(dst_file) is True: os.remove(dst_file) pyroms_toolbox.nc_create_roms_bdry_file(dst_file, dst_grd, nctime) @@ -39,7 +39,7 @@ def remap_bdry(src_file, src_varname, src_grd, dst_grd, dxy=20, cdepth=0, kk=2, cdf = netCDF.Dataset(src_file) src_var = cdf.variables[src_varname] time = cdf.variables['ocean_time'][0] - print time + print(time) #get missing value spval = src_var._FillValue @@ -124,7 +124,7 @@ def remap_bdry(src_file, src_varname, src_grd, dst_grd, dxy=20, cdepth=0, kk=2, field_west = 'salt_west, scalar, series' units = 'PSU' else: - raise ValueError, 'Undefined src_varname' + raise ValueError('Undefined src_varname') if ndim == 3: @@ -136,25 +136,25 @@ def remap_bdry(src_file, src_varname, src_grd, dst_grd, dxy=20, cdepth=0, kk=2, # create variable in boudary file - print 'Creating variable', dst_varname_north + print('Creating variable', dst_varname_north) nc.createVariable(dst_varname_north, 'f8', dimensions_north, fill_value=spval) nc.variables[dst_varname_north].long_name = long_name_north nc.variables[dst_varname_north].units = units nc.variables[dst_varname_north].field = field_north - print 'Creating variable', dst_varname_south + print('Creating variable', dst_varname_south) nc.createVariable(dst_varname_south, 'f8', dimensions_south, fill_value=spval) nc.variables[dst_varname_south].long_name = long_name_south nc.variables[dst_varname_south].units = units nc.variables[dst_varname_south].field = field_south - print 'Creating variable', dst_varname_east + print('Creating variable', dst_varname_east) nc.createVariable(dst_varname_east, 'f8', dimensions_east, fill_value=spval) nc.variables[dst_varname_east].long_name = long_name_east nc.variables[dst_varname_east].units = units nc.variables[dst_varname_east].field = field_east - print 'Creating variable', dst_varname_west + print('Creating variable', dst_varname_west) nc.createVariable(dst_varname_west, 'f8', dimensions_west, fill_value=spval) nc.variables[dst_varname_west].long_name = long_name_west nc.variables[dst_varname_west].units = units @@ -162,26 +162,26 @@ def remap_bdry(src_file, src_varname, src_grd, dst_grd, dxy=20, cdepth=0, kk=2, # remapping - print 'remapping', dst_varname, 'from', src_grd.name, \ - 'to', dst_grd.name - print 'time =', time + print('remapping', dst_varname, 'from', src_grd.name, \ + 'to', dst_grd.name) + print('time =', time) if ndim == 3: # flood the grid - print 'flood the grid' + print('flood the grid') src_varz = pyroms_toolbox.Grid_HYCOM.flood_fast(src_var, src_grd, pos=pos, spval=spval, \ dxy=dxy, cdepth=cdepth, kk=kk) else: src_varz = src_var # horizontal interpolation using scrip weights - print 'horizontal interpolation using scrip weights' + print('horizontal interpolation using scrip weights') dst_varz = pyroms.remapping.remap(src_varz, wts_file, spval=spval) if ndim == 3: # vertical interpolation from standard z level to sigma - print 'vertical interpolation from standard z level to sigma' + print('vertical interpolation from standard z level to sigma') dst_var_north = pyroms.remapping.z2roms(dst_varz[::-1, Mp-1:Mp, :], \ dst_grdz, dst_grd, Cpos=Cpos, spval=spval, \ flood=False, irange=(0,Lp), jrange=(Mp-1,Mp)) @@ -201,7 +201,7 @@ def remap_bdry(src_file, src_varname, src_grd, dst_grd, dxy=20, cdepth=0, kk=2, dst_var_west = dst_varz[:, 0] # write data in destination file - print 'write data in destination file' + print('write data in destination file') nc.variables['ocean_time'][0] = time nc.variables[dst_varname_north][0] = np.squeeze(dst_var_north) nc.variables[dst_varname_south][0] = np.squeeze(dst_var_south) diff --git a/examples/Arctic_HYCOM/remap_bdry_uv.py b/examples/Arctic_HYCOM/remap_bdry_uv.py index 5bf4c69..ad093f8 100644 --- a/examples/Arctic_HYCOM/remap_bdry_uv.py +++ b/examples/Arctic_HYCOM/remap_bdry_uv.py @@ -28,12 +28,12 @@ def remap_bdry_uv(src_file, src_grd, dst_grd, dxy=20, cdepth=0, kk=2, dst_dir='. # create destination file dst_file = src_file.rsplit('/')[-1] dst_fileu = dst_dir + dst_file[:-3] + '_u_bdry_' + dst_grd.name + '.nc' - print '\nCreating destination file', dst_fileu + print('\nCreating destination file', dst_fileu) if os.path.exists(dst_fileu) is True: os.remove(dst_fileu) pyroms_toolbox.nc_create_roms_file(dst_fileu, dst_grd, nctime) dst_filev = dst_dir + dst_file[:-3] + '_v_bdry_' + dst_grd.name + '.nc' - print 'Creating destination file', dst_filev + print('Creating destination file', dst_filev) if os.path.exists(dst_filev) is True: os.remove(dst_filev) pyroms_toolbox.nc_create_roms_file(dst_filev, dst_grd, nctime) @@ -64,95 +64,95 @@ def remap_bdry_uv(src_file, src_grd, dst_grd, dxy=20, cdepth=0, kk=2, dst_dir='. dst_grdz = pyroms.grid.ROMS_Grid(dst_grd.name+'_Z', dst_grd.hgrid, dst_zcoord) # create variable in destination file - print 'Creating variable u_north' + print('Creating variable u_north') ncu.createVariable('u_north', 'f8', ('ocean_time', 's_rho', 'xi_u'), fill_value=spval) ncu.variables['u_north'].long_name = '3D u-momentum north boundary condition' ncu.variables['u_north'].units = 'meter second-1' ncu.variables['u_north'].field = 'u_north, scalar, series' - print 'Creating variable u_south' + print('Creating variable u_south') ncu.createVariable('u_south', 'f8', ('ocean_time', 's_rho', 'xi_u'), fill_value=spval) ncu.variables['u_south'].long_name = '3D u-momentum south boundary condition' ncu.variables['u_south'].units = 'meter second-1' ncu.variables['u_south'].field = 'u_south, scalar, series' - print 'Creating variable u_east' + print('Creating variable u_east') ncu.createVariable('u_east', 'f8', ('ocean_time', 's_rho', 'eta_u'), fill_value=spval) ncu.variables['u_east'].long_name = '3D u-momentum east boundary condition' ncu.variables['u_east'].units = 'meter second-1' ncu.variables['u_east'].field = 'u_east, scalar, series' - print 'Creating variable u_west' + print('Creating variable u_west') ncu.createVariable('u_west', 'f8', ('ocean_time', 's_rho', 'eta_u'), fill_value=spval) ncu.variables['u_west'].long_name = '3D u-momentum west boundary condition' ncu.variables['u_west'].units = 'meter second-1' ncu.variables['u_west'].field = 'u_east, scalar, series' # create variable in destination file - print 'Creating variable ubar_north' + print('Creating variable ubar_north') ncu.createVariable('ubar_north', 'f8', ('ocean_time', 'xi_u'), fill_value=spval) ncu.variables['ubar_north'].long_name = '2D u-momentum north boundary condition' ncu.variables['ubar_north'].units = 'meter second-1' ncu.variables['ubar_north'].field = 'ubar_north, scalar, series' - print 'Creating variable ubar_south' + print('Creating variable ubar_south') ncu.createVariable('ubar_south', 'f8', ('ocean_time', 'xi_u'), fill_value=spval) ncu.variables['ubar_south'].long_name = '2D u-momentum south boundary condition' ncu.variables['ubar_south'].units = 'meter second-1' ncu.variables['ubar_south'].field = 'ubar_south, scalar, series' - print 'Creating variable ubar_east' + print('Creating variable ubar_east') ncu.createVariable('ubar_east', 'f8', ('ocean_time', 'eta_u'), fill_value=spval) ncu.variables['ubar_east'].long_name = '2D u-momentum east boundary condition' ncu.variables['ubar_east'].units = 'meter second-1' ncu.variables['ubar_east'].field = 'ubar_east, scalar, series' - print 'Creating variable ubar_west' + print('Creating variable ubar_west') ncu.createVariable('ubar_west', 'f8', ('ocean_time', 'eta_u'), fill_value=spval) ncu.variables['ubar_west'].long_name = '2D u-momentum west boundary condition' ncu.variables['ubar_west'].units = 'meter second-1' ncu.variables['ubar_west'].field = 'ubar_east, scalar, series' - print 'Creating variable v_north' + print('Creating variable v_north') ncv.createVariable('v_north', 'f8', ('ocean_time', 's_rho', 'xi_v'), fill_value=spval) ncv.variables['v_north'].long_name = '3D v-momentum north boundary condition' ncv.variables['v_north'].units = 'meter second-1' ncv.variables['v_north'].field = 'v_north, scalar, series' - print 'Creating variable v_south' + print('Creating variable v_south') ncv.createVariable('v_south', 'f8', ('ocean_time', 's_rho', 'xi_v'), fill_value=spval) ncv.variables['v_south'].long_name = '3D v-momentum south boundary condition' ncv.variables['v_south'].units = 'meter second-1' ncv.variables['v_south'].field = 'v_south, scalar, series' - print 'Creating variable v_east' + print('Creating variable v_east') ncv.createVariable('v_east', 'f8', ('ocean_time', 's_rho', 'eta_v'), fill_value=spval) ncv.variables['v_east'].long_name = '3D v-momentum east boundary condition' ncv.variables['v_east'].units = 'meter second-1' ncv.variables['v_east'].field = 'v_east, scalar, series' - print 'Creating variable v_west' + print('Creating variable v_west') ncv.createVariable('v_west', 'f8', ('ocean_time', 's_rho', 'eta_v'), fill_value=spval) ncv.variables['v_west'].long_name = '3D v-momentum west boundary condition' ncv.variables['v_west'].units = 'meter second-1' ncv.variables['v_west'].field = 'v_east, scalar, series' - print 'Creating variable vbar_north' + print('Creating variable vbar_north') ncv.createVariable('vbar_north', 'f8', ('ocean_time', 'xi_v'), fill_value=spval) ncv.variables['vbar_north'].long_name = '2D v-momentum north boundary condition' ncv.variables['vbar_north'].units = 'meter second-1' ncv.variables['vbar_north'].field = 'vbar_north, scalar, series' - print 'Creating variable vbar_south' + print('Creating variable vbar_south') ncv.createVariable('vbar_south', 'f8', ('ocean_time', 'xi_v'), fill_value=spval) ncv.variables['vbar_south'].long_name = '2D v-momentum south boundary condition' ncv.variables['vbar_south'].units = 'meter second-1' ncv.variables['vbar_south'].field = 'vbar_south, scalar, series' - print 'Creating variable vbar_east' + print('Creating variable vbar_east') ncv.createVariable('vbar_east', 'f8', ('ocean_time', 'eta_v'), fill_value=spval) ncv.variables['vbar_east'].long_name = '2D v-momentum east boundary condition' ncv.variables['vbar_east'].units = 'meter second-1' ncv.variables['vbar_east'].field = 'vbar_east, scalar, series' - print 'Creating variable vbar_west' + print('Creating variable vbar_west') ncv.createVariable('vbar_west', 'f8', ('ocean_time', 'eta_v'), fill_value=spval) ncv.variables['vbar_west'].long_name = '2D v-momentum west boundary condition' ncv.variables['vbar_west'].units = 'meter second-1' @@ -161,27 +161,27 @@ def remap_bdry_uv(src_file, src_grd, dst_grd, dxy=20, cdepth=0, kk=2, dst_dir='. # remaping - print 'remapping and rotating u and v from', src_grd.name, \ - 'to', dst_grd.name - print 'time =', time + print('remapping and rotating u and v from', src_grd.name, \ + 'to', dst_grd.name) + print('time =', time) # flood the grid - print 'flood the grid' + print('flood the grid') src_uz = pyroms_toolbox.Grid_HYCOM.flood_fast(src_varu, src_grd, pos='t', \ spval=spval, dxy=dxy, cdepth=cdepth, kk=kk) src_vz = pyroms_toolbox.Grid_HYCOM.flood_fast(src_varv, src_grd, pos='t', \ spval=spval, dxy=dxy, cdepth=cdepth, kk=kk) # horizontal interpolation using scrip weights - print 'horizontal interpolation using scrip weights' + print('horizontal interpolation using scrip weights') dst_uz = pyroms.remapping.remap(src_uz, wts_file, \ spval=spval) dst_vz = pyroms.remapping.remap(src_vz, wts_file, \ spval=spval) # vertical interpolation from standard z level to sigma - print 'vertical interpolation from standard z level to sigma' + print('vertical interpolation from standard z level to sigma') dst_u_north = pyroms.remapping.z2roms(dst_uz[::-1, Mp-2:Mp, 0:Lp], \ dst_grdz, dst_grd, Cpos='rho', spval=spval, \ flood=False, irange=(0,Lp), jrange=(Mp-2,Mp)) @@ -325,7 +325,7 @@ def remap_bdry_uv(src_file, src_grd, dst_grd, dxy=20, cdepth=0, kk=2, dst_dir='. dst_vbar_west = np.ma.masked_where(dst_grd.hgrid.mask_v[:,0] == 0, dst_vbar_west) # write data in destination file - print 'write data in destination file' + print('write data in destination file') ncu.variables['ocean_time'][0] = time ncu.variables['u_north'][0] = dst_u_north ncu.variables['u_south'][0] = dst_u_south diff --git a/examples/Arctic_HYCOM/remap_clm.py b/examples/Arctic_HYCOM/remap_clm.py index c5aafe5..3505435 100644 --- a/examples/Arctic_HYCOM/remap_clm.py +++ b/examples/Arctic_HYCOM/remap_clm.py @@ -41,7 +41,7 @@ def remap_clm(src_file, src_varname, src_grd, dst_grd, dxy=20, cdepth=0, kk=0, d # create IC file dst_file = src_file.rsplit('/')[-1] dst_file = dst_dir + dst_file[:-3] + '_' + src_varname + '_clim_' + dst_grd.name + '.nc' - print '\nCreating file', dst_file + print('\nCreating file', dst_file) if os.path.exists(dst_file) is True: os.remove(dst_file) pyroms_toolbox.nc_create_roms_file(dst_file, dst_grd, nctime) @@ -72,7 +72,7 @@ def remap_clm(src_file, src_varname, src_grd, dst_grd, dxy=20, cdepth=0, kk=0, d long_name = 'free-surface' units = 'meter' field = 'free-surface, scalar, series' - vartime = 'ocean_time' + vartime = 'ocean_time' elif src_varname == 'temp': pos = 't' Cpos = 'rho' @@ -84,7 +84,7 @@ def remap_clm(src_file, src_varname, src_grd, dst_grd, dxy=20, cdepth=0, kk=0, d long_name = 'potential temperature' units = 'Celsius' field = 'temperature, scalar, series' - vartime = 'ocean_time' + vartime = 'ocean_time' elif src_varname == 'salt': pos = 't' Cpos = 'rho' @@ -96,9 +96,9 @@ def remap_clm(src_file, src_varname, src_grd, dst_grd, dxy=20, cdepth=0, kk=0, d long_name = 'salinity' units = 'PSU' field = 'salinity, scalar, series' - vartime = 'ocean_time' + vartime = 'ocean_time' else: - raise ValueError, 'Undefined src_varname' + raise ValueError('Undefined src_varname') if ndim == 3: @@ -110,7 +110,7 @@ def remap_clm(src_file, src_varname, src_grd, dst_grd, dxy=20, cdepth=0, kk=0, d # create variable in file - print 'Creating variable', dst_varname + print('Creating variable', dst_varname) nc.createVariable(dst_varname, 'f8', dimensions, fill_value=spval) nc.variables[dst_varname].long_name = long_name nc.variables[dst_varname].units = units @@ -119,34 +119,34 @@ def remap_clm(src_file, src_varname, src_grd, dst_grd, dxy=20, cdepth=0, kk=0, d # remapping - print 'remapping', dst_varname, 'from', src_grd.name, \ - 'to', dst_grd.name - print 'time =', time + print('remapping', dst_varname, 'from', src_grd.name, \ + 'to', dst_grd.name) + print('time =', time) if ndim == 3: # flood the grid - print 'flood the grid' + print('flood the grid') src_varz = pyroms_toolbox.Grid_HYCOM.flood_fast(src_var, src_grd, pos=pos, spval=spval, \ dxy=dxy, cdepth=cdepth, kk=kk) else: src_varz = src_var # horizontal interpolation using scrip weights - print 'horizontal interpolation using scrip weights' + print('horizontal interpolation using scrip weights') dst_varz = pyroms.remapping.remap(src_varz, wts_file, \ spval=spval) if ndim == 3: # vertical interpolation from standard z level to sigma - print 'vertical interpolation from standard z level to sigma' + print('vertical interpolation from standard z level to sigma') dst_var = pyroms.remapping.z2roms(dst_varz[::-1,:,:], dst_grdz, \ dst_grd, Cpos=Cpos, spval=spval, flood=False) else: dst_var = dst_varz # write data in destination file - print 'write data in destination file' + print('write data in destination file') nc.variables['ocean_time'][0] = time nc.variables[dst_varname][0] = dst_var diff --git a/examples/Arctic_HYCOM/remap_clm_uv.py b/examples/Arctic_HYCOM/remap_clm_uv.py index ede5f9a..e91f265 100644 --- a/examples/Arctic_HYCOM/remap_clm_uv.py +++ b/examples/Arctic_HYCOM/remap_clm_uv.py @@ -43,12 +43,12 @@ def remap_clm_uv(src_file, src_grd, dst_grd, dxy=20, cdepth=0, kk=0, dst_dir='./ # create destination file dst_file = src_file.rsplit('/')[-1] dst_fileu = dst_dir + dst_file[:-3] + '_u_clim_' + dst_grd.name + '.nc' - print '\nCreating destination file', dst_fileu + print('\nCreating destination file', dst_fileu) if os.path.exists(dst_fileu) is True: os.remove(dst_fileu) pyroms_toolbox.nc_create_roms_file(dst_fileu, dst_grd, nctime) dst_filev = dst_dir + dst_file[:-3] + '_v_clim_' + dst_grd.name + '.nc' - print 'Creating destination file', dst_filev + print('Creating destination file', dst_filev) if os.path.exists(dst_filev) is True: os.remove(dst_filev) pyroms_toolbox.nc_create_roms_file(dst_filev, dst_grd, nctime) @@ -77,27 +77,27 @@ def remap_clm_uv(src_file, src_grd, dst_grd, dxy=20, cdepth=0, kk=0, dst_dir='./ dst_grdz = pyroms.grid.ROMS_Grid(dst_grd.name+'_Z', dst_grd.hgrid, dst_zcoord) # create variable in destination file - print 'Creating variable u' + print('Creating variable u') ncu.createVariable('u', 'f8', ('ocean_time', 's_rho', 'eta_u', 'xi_u'), fill_value=spval) ncu.variables['u'].long_name = '3D u-momentum component' ncu.variables['u'].units = 'meter second-1' ncu.variables['u'].field = 'u-velocity, scalar, series' ncu.variables['u'].time = 'ocean_time' # create variable in destination file - print 'Creating variable ubar' + print('Creating variable ubar') ncu.createVariable('ubar', 'f8', ('ocean_time', 'eta_u', 'xi_u'), fill_value=spval) ncu.variables['ubar'].long_name = '2D u-momentum component' ncu.variables['ubar'].units = 'meter second-1' ncu.variables['ubar'].field = 'ubar-velocity,, scalar, series' ncu.variables['ubar'].time = 'ocean_time' - print 'Creating variable v' + print('Creating variable v') ncv.createVariable('v', 'f8', ('ocean_time', 's_rho', 'eta_v', 'xi_v'), fill_value=spval) ncv.variables['v'].long_name = '3D v-momentum component' ncv.variables['v'].units = 'meter second-1' ncv.variables['v'].field = 'v-velocity, scalar, series' ncv.variables['v'].time = 'ocean_time' - print 'Creating variable vbar' + print('Creating variable vbar') ncv.createVariable('vbar', 'f8', ('ocean_time', 'eta_v', 'xi_v'), fill_value=spval) ncv.variables['vbar'].long_name = '2D v-momentum component' ncv.variables['vbar'].units = 'meter second-1' @@ -106,27 +106,27 @@ def remap_clm_uv(src_file, src_grd, dst_grd, dxy=20, cdepth=0, kk=0, dst_dir='./ # remaping - print 'remapping and rotating u and v from', src_grd.name, \ - 'to', dst_grd.name - print 'time =', time + print('remapping and rotating u and v from', src_grd.name, \ + 'to', dst_grd.name) + print('time =', time) # flood the grid - print 'flood the grid' + print('flood the grid') src_uz = pyroms_toolbox.Grid_HYCOM.flood_fast(src_varu, src_grd, pos='t', \ spval=spval, dxy=dxy, cdepth=cdepth, kk=kk) src_vz = pyroms_toolbox.Grid_HYCOM.flood_fast(src_varv, src_grd, pos='t', \ spval=spval, dxy=dxy, cdepth=cdepth, kk=kk) # horizontal interpolation using scrip weights - print 'horizontal interpolation using scrip weights' + print('horizontal interpolation using scrip weights') dst_uz = pyroms.remapping.remap(src_uz, wts_file, \ spval=spval) dst_vz = pyroms.remapping.remap(src_vz, wts_file, \ spval=spval) # vertical interpolation from standard z level to sigma - print 'vertical interpolation from standard z level to sigma' + print('vertical interpolation from standard z level to sigma') dst_u = pyroms.remapping.z2roms(dst_uz[::-1,:,:], dst_grdz, \ dst_grd, Cpos='rho', spval=spval, flood=False) dst_v = pyroms.remapping.z2roms(dst_vz[::-1,:,:], dst_grdz, \ @@ -181,7 +181,7 @@ def remap_clm_uv(src_file, src_grd, dst_grd, dxy=20, cdepth=0, kk=0, dst_dir='./ dst_vbar[idxv[0], idxv[1]] = spval # write data in destination file - print 'write data in destination file' + print('write data in destination file') ncu.variables['ocean_time'][0] = time ncu.variables['u'][0] = dst_u ncu.variables['ubar'][0] = dst_ubar diff --git a/examples/Arctic_HYCOM/remap_uv.py b/examples/Arctic_HYCOM/remap_uv.py index 0d48fc3..1c871c9 100644 --- a/examples/Arctic_HYCOM/remap_uv.py +++ b/examples/Arctic_HYCOM/remap_uv.py @@ -43,12 +43,12 @@ def remap_uv(src_file, src_grd, dst_grd, dxy=20, cdepth=0, kk=0, dst_dir='./'): # create destination file dst_file = src_file.rsplit('/')[-1] dst_fileu = dst_dir + dst_file[:-3] + '_u_ic_' + dst_grd.name + '.nc' - print '\nCreating destination file', dst_fileu + print('\nCreating destination file', dst_fileu) if os.path.exists(dst_fileu) is True: os.remove(dst_fileu) pyroms_toolbox.nc_create_roms_file(dst_fileu, dst_grd, nctime) dst_filev = dst_dir + dst_file[:-3] + '_v_ic_' + dst_grd.name + '.nc' - print 'Creating destination file', dst_filev + print('Creating destination file', dst_filev) if os.path.exists(dst_filev) is True: os.remove(dst_filev) pyroms_toolbox.nc_create_roms_file(dst_filev, dst_grd, nctime) @@ -77,24 +77,24 @@ def remap_uv(src_file, src_grd, dst_grd, dxy=20, cdepth=0, kk=0, dst_dir='./'): dst_grdz = pyroms.grid.ROMS_Grid(dst_grd.name+'_Z', dst_grd.hgrid, dst_zcoord) # create variable in destination file - print 'Creating variable u' + print('Creating variable u') ncu.createVariable('u', 'f8', ('ocean_time', 's_rho', 'eta_u', 'xi_u'), fill_value=spval) ncu.variables['u'].long_name = '3D u-momentum component' ncu.variables['u'].units = 'meter second-1' ncu.variables['u'].field = 'u-velocity, scalar, series' # create variable in destination file - print 'Creating variable ubar' + print('Creating variable ubar') ncu.createVariable('ubar', 'f8', ('ocean_time', 'eta_u', 'xi_u'), fill_value=spval) ncu.variables['ubar'].long_name = '2D u-momentum component' ncu.variables['ubar'].units = 'meter second-1' ncu.variables['ubar'].field = 'ubar-velocity,, scalar, series' - print 'Creating variable v' + print('Creating variable v') ncv.createVariable('v', 'f8', ('ocean_time', 's_rho', 'eta_v', 'xi_v'), fill_value=spval) ncv.variables['v'].long_name = '3D v-momentum component' ncv.variables['v'].units = 'meter second-1' ncv.variables['v'].field = 'v-velocity, scalar, series' - print 'Creating variable vbar' + print('Creating variable vbar') ncv.createVariable('vbar', 'f8', ('ocean_time', 'eta_v', 'xi_v'), fill_value=spval) ncv.variables['vbar'].long_name = '2D v-momentum component' ncv.variables['vbar'].units = 'meter second-1' @@ -102,27 +102,27 @@ def remap_uv(src_file, src_grd, dst_grd, dxy=20, cdepth=0, kk=0, dst_dir='./'): # remaping - print 'remapping and rotating u and v from', src_grd.name, \ - 'to', dst_grd.name - print 'time =', time + print('remapping and rotating u and v from', src_grd.name, \ + 'to', dst_grd.name) + print('time =', time) # flood the grid - print 'flood the grid' + print('flood the grid') src_uz = pyroms_toolbox.Grid_HYCOM.flood_fast(src_varu, src_grd, pos='t', \ spval=spval, dxy=dxy, cdepth=cdepth, kk=kk) src_vz = pyroms_toolbox.Grid_HYCOM.flood_fast(src_varv, src_grd, pos='t', \ spval=spval, dxy=dxy, cdepth=cdepth, kk=kk) # horizontal interpolation using scrip weights - print 'horizontal interpolation using scrip weights' + print('horizontal interpolation using scrip weights') dst_uz = pyroms.remapping.remap(src_uz, wts_file, \ spval=spval) dst_vz = pyroms.remapping.remap(src_vz, wts_file, \ spval=spval) # vertical interpolation from standard z level to sigma - print 'vertical interpolation from standard z level to sigma' + print('vertical interpolation from standard z level to sigma') dst_u = pyroms.remapping.z2roms(dst_uz[::-1,:,:], dst_grdz, \ dst_grd, Cpos='rho', spval=spval, flood=False) dst_v = pyroms.remapping.z2roms(dst_vz[::-1,:,:], dst_grdz, \ @@ -177,7 +177,7 @@ def remap_uv(src_file, src_grd, dst_grd, dxy=20, cdepth=0, kk=0, dst_dir='./'): dst_vbar[idxv[0], idxv[1]] = spval # write data in destination file - print 'write data in destination file' + print('write data in destination file') ncu.variables['ocean_time'][0] = time ncu.variables['u'][0] = dst_u ncu.variables['ubar'][0] = dst_ubar diff --git a/examples/Arctic_SODA3.3.1/make_bdry_file.py b/examples/Arctic_SODA3.3.1/make_bdry_file.py new file mode 100644 index 0000000..1821570 --- /dev/null +++ b/examples/Arctic_SODA3.3.1/make_bdry_file.py @@ -0,0 +1,71 @@ +import matplotlib +matplotlib.use('Agg') +import subprocess +import os +import sys +import commands +import numpy as np + +#increase the maximum number of open files allowed +#import resource +#resource.setrlimit(resource.RLIMIT_NOFILE, (3000,-1)) + +import pyroms +import pyroms_toolbox + +from remap_bdry import remap_bdry +from remap_bdry_uv import remap_bdry_uv + +my_year=int(sys.argv[-1]) + +data_dir = '/archive/u1/uaf/AKWATERS/kshedstrom/SODA/' +data_dir_year = data_dir + str(my_year) + '/' +dst_dir='bdry/' + +filelst = subprocess.check_output(['ls', data_dir_year]).replace('/n',' ').split() + +#src_grd = pyroms_toolbox.BGrid_GFDL.get_nc_BGrid_GFDL(data_dir + 'grid/SODA3_0.5deg_grid.nc', name='SODA3.3.1', xrange=(285, 500), yrange=(180, 300) ) +#src_grd = pyroms_toolbox.BGrid_GFDL.get_nc_BGrid_GFDL(data_dir + 'grid/SODA3_0.5deg_grid.nc', name='SODA3.3.1', area=npolar, ystart=236) +src_grd = pyroms_toolbox.BGrid_GFDL.get_nc_BGrid_GFDL(data_dir + 'SODA3_0.5deg_grid.nc', name='SODA3.3.1', area='npolar') +dst_grd = pyroms.grid.get_ROMS_grid('ARCTIC4') + +for filein in filelst: + tag=filein.replace('soda3.3.1_5dy_ocean_reg_','').replace('.nc','') + print '\nBuild OBC file for time %s' %filein + zeta_dst_file = dst_dir + dst_grd.name + '_bdry_zeta_' + tag + '_' + src_grd.name + '.nc' + temp_dst_file = dst_dir + dst_grd.name + '_bdry_temp_' + tag + '_' + src_grd.name + '.nc' + salt_dst_file = dst_dir + dst_grd.name + '_bdry_salt_' + tag + '_' + src_grd.name + '.nc' + u_dst_file = dst_dir + dst_grd.name + '_bdry_u_' + tag + '_' + src_grd.name + '.nc' + v_dst_file = dst_dir + dst_grd.name + '_bdry_v_' + tag + '_' + src_grd.name + '.nc' + + # remap ssh + zeta = remap_bdry('ssh', data_dir_year + filein, src_grd, dst_grd, zeta_dst_file, dst_dir=dst_dir) + + # reload grid with zeta (more accurate) + dst_grd = pyroms.grid.get_ROMS_grid('ARCTIC4', zeta=zeta) + + # regrid temp, salt and velocities + remap_bdry('temp',data_dir_year + filein, src_grd, dst_grd, temp_dst_file, dst_dir=dst_dir) + remap_bdry('salt',data_dir_year + filein, src_grd, dst_grd, salt_dst_file, dst_dir=dst_dir) + remap_bdry_uv(data_dir_year + filein, src_grd, dst_grd, u_dst_file, v_dst_file, dst_dir=dst_dir) + + # merge file + bdry_file = dst_dir + dst_grd.name + '_bdry_' + tag + '_' + src_grd.name + '.nc' + + command1 = 'mv ' + zeta_dst_file + ' ' + bdry_file + command2 = 'ncks -A ' + temp_dst_file + ' -o ' + bdry_file + command3 = 'ncks -A ' + salt_dst_file + ' -o ' + bdry_file + command4 = 'ncks -A ' + u_dst_file + ' -o ' + bdry_file + command5 = 'ncks -A ' + v_dst_file + ' -o ' + bdry_file + + subprocess.call(command1,shell=True) + subprocess.call(command2,shell=True) + subprocess.call(command3,shell=True) + subprocess.call(command4,shell=True) + subprocess.call(command5,shell=True) + + # clean up + os.remove(temp_dst_file) + os.remove(salt_dst_file) + os.remove(u_dst_file) + os.remove(v_dst_file) diff --git a/examples/Arctic_SODA3.3.1/make_clm_file.py b/examples/Arctic_SODA3.3.1/make_clm_file.py new file mode 100644 index 0000000..8787185 --- /dev/null +++ b/examples/Arctic_SODA3.3.1/make_clm_file.py @@ -0,0 +1,75 @@ +import matplotlib +matplotlib.use('Agg') +import subprocess +import os +import sys +import commands +import numpy as np + +#increase the maximum number of open files allowed +#import resource +#resource.setrlimit(resource.RLIMIT_NOFILE, (3000,-1)) + +import pyroms +import pyroms_toolbox + +from remap import remap +from remap_uv import remap_uv + +my_year=int(sys.argv[-1]) + +data_dir = '/archive/u1/uaf/AKWATERS/kshedstrom/SODA/' +data_dir_year = data_dir + '/monthly/*' + str(my_year) + '*' +dst_dir='clm/' + +filelst = commands.getoutput('ls ' + data_dir_year) +filelst = filelst.split() +#filelst = subprocess.check_output(['ls', data_dir_year]).replace('/n',' ').split() + +#src_grd = pyroms_toolbox.BGrid_GFDL.get_nc_BGrid_GFDL(data_dir + 'grid/SODA3_0.5deg_grid.nc', name='SODA3.3.1', xrange=(285, 500), yrange=(180, 300) ) +#src_grd = pyroms_toolbox.BGrid_GFDL.get_nc_BGrid_GFDL(data_dir + 'grid/SODA3_0.5deg_grid.nc', name='SODA3.3.1', area=npolar, ystart=236) +src_grd = pyroms_toolbox.BGrid_GFDL.get_nc_BGrid_GFDL(data_dir + 'SODA3_0.5deg_grid.nc', name='SODA3.3.1', area='npolar') +dst_grd = pyroms.grid.get_ROMS_grid('ARCTIC4') +print src_grd.name +print dst_grd.name + +for filein in filelst: + tag=filein.replace(data_dir+'/monthly/','').replace('soda3.3.1_monthly_ocean_reg_','').replace('.nc','') + print '\nBuild OBC file for time %s' %tag + zeta_dst_file = dst_dir + dst_grd.name + '_clm_zeta_' + tag + '_' + src_grd.name + '.nc' + temp_dst_file = dst_dir + dst_grd.name + '_clm_temp_' + tag + '_' + src_grd.name + '.nc' + salt_dst_file = dst_dir + dst_grd.name + '_clm_salt_' + tag + '_' + src_grd.name + '.nc' + u_dst_file = dst_dir + dst_grd.name + '_clm_u_' + tag + '_' + src_grd.name + '.nc' + v_dst_file = dst_dir + dst_grd.name + '_clm_v_' + tag + '_' + src_grd.name + '.nc' + + # remap ssh + zeta = remap('ssh', filein, src_grd, dst_grd, zeta_dst_file, dst_dir=dst_dir) + + # reload grid with zeta (more accurate) + dst_grd = pyroms.grid.get_ROMS_grid('ARCTIC4', zeta=zeta) + + # regrid temp, salt and velocities + remap('temp', filein, src_grd, dst_grd, temp_dst_file, dst_dir=dst_dir) + remap('salt', filein, src_grd, dst_grd, salt_dst_file, dst_dir=dst_dir) + remap_uv(filein, src_grd, dst_grd, u_dst_file, v_dst_file, dst_dir=dst_dir) + + # merge file + clm_file = dst_dir + dst_grd.name + '_clm_' + tag + '_' + src_grd.name + '.nc' + + command1 = 'mv ' + zeta_dst_file + ' ' + clm_file + command2 = 'ncks -A ' + temp_dst_file + ' -o ' + clm_file + command3 = 'ncks -A ' + salt_dst_file + ' -o ' + clm_file + command4 = 'ncks -A ' + u_dst_file + ' -o ' + clm_file + command5 = 'ncks -A ' + v_dst_file + ' -o ' + clm_file + + subprocess.call(command1,shell=True) + subprocess.call(command2,shell=True) + subprocess.call(command3,shell=True) + subprocess.call(command4,shell=True) + subprocess.call(command5,shell=True) + + # clean up + os.remove(temp_dst_file) + os.remove(salt_dst_file) + os.remove(u_dst_file) + os.remove(v_dst_file) diff --git a/examples/Arctic_SODA3.3.1/make_ini_file.py b/examples/Arctic_SODA3.3.1/make_ini_file.py new file mode 100644 index 0000000..951f7f3 --- /dev/null +++ b/examples/Arctic_SODA3.3.1/make_ini_file.py @@ -0,0 +1,76 @@ +import matplotlib +matplotlib.use('Agg') +import subprocess +import os +import sys +import commands +import numpy as np + +#increase the maximum number of open files allowed +#import resource +#resource.setrlimit(resource.RLIMIT_NOFILE, (3000,-1)) + +import pyroms +import pyroms_toolbox + +from remap import remap +from remap_uv import remap_uv + +#my_year=int(sys.argv[-1]) +my_year = '2010' + +data_dir = '/archive/AKWATERS/kshedstrom/SODA/' +data_dir_year = data_dir + '/' + str(my_year) + '/soda3.3.1_5dy_ocean_reg_2010_12_26.nc' +dst_dir='./' + +filelst = commands.getoutput('ls ' + data_dir_year) +filelst = filelst.split() +#filelst = subprocess.check_output(['ls', data_dir_year]).replace('/n',' ').split() + +#src_grd = pyroms_toolbox.BGrid_GFDL.get_nc_BGrid_GFDL(data_dir + 'grid/SODA3_0.5deg_grid.nc', name='SODA3.3.1', xrange=(285, 500), yrange=(180, 300) ) +#src_grd = pyroms_toolbox.BGrid_GFDL.get_nc_BGrid_GFDL(data_dir + 'grid/SODA3_0.5deg_grid.nc', name='SODA3.3.1', area=npolar, ystart=236) +src_grd = pyroms_toolbox.BGrid_GFDL.get_nc_BGrid_GFDL(data_dir + 'SODA3_0.5deg_grid.nc', name='SODA3.3.1', area='npolar') +dst_grd = pyroms.grid.get_ROMS_grid('ARCTIC4') +print src_grd.name +print dst_grd.name + +for filein in filelst: + tag=filein.replace(data_dir+'/'+my_year+'/','').replace('soda3.3.1_5dy_ocean_reg_','').replace('.nc','') + print '\nBuild OBC file for time %s' %tag + zeta_dst_file = dst_dir + dst_grd.name + '_IC_zeta_' + tag + '_' + src_grd.name + '.nc' + temp_dst_file = dst_dir + dst_grd.name + '_IC_temp_' + tag + '_' + src_grd.name + '.nc' + salt_dst_file = dst_dir + dst_grd.name + '_IC_salt_' + tag + '_' + src_grd.name + '.nc' + u_dst_file = dst_dir + dst_grd.name + '_IC_u_' + tag + '_' + src_grd.name + '.nc' + v_dst_file = dst_dir + dst_grd.name + '_IC_v_' + tag + '_' + src_grd.name + '.nc' + + # remap ssh + zeta = remap('ssh', filein, src_grd, dst_grd, zeta_dst_file, dst_dir=dst_dir) + + # reload grid with zeta (more accurate) + dst_grd = pyroms.grid.get_ROMS_grid('ARCTIC4', zeta=zeta) + + # regrid temp, salt and velocities + remap('temp', filein, src_grd, dst_grd, temp_dst_file, dst_dir=dst_dir) + remap('salt', filein, src_grd, dst_grd, salt_dst_file, dst_dir=dst_dir) + remap_uv(filein, src_grd, dst_grd, u_dst_file, v_dst_file, dst_dir=dst_dir) + + # merge file + IC_file = dst_dir + dst_grd.name + '_IC_' + tag + '_' + src_grd.name + '.nc' + + command1 = 'mv ' + zeta_dst_file + ' ' + IC_file + command2 = 'ncks -A ' + temp_dst_file + ' -o ' + IC_file + command3 = 'ncks -A ' + salt_dst_file + ' -o ' + IC_file + command4 = 'ncks -A ' + u_dst_file + ' -o ' + IC_file + command5 = 'ncks -A ' + v_dst_file + ' -o ' + IC_file + + subprocess.call(command1,shell=True) + subprocess.call(command2,shell=True) + subprocess.call(command3,shell=True) + subprocess.call(command4,shell=True) + subprocess.call(command5,shell=True) + + # clean up + os.remove(temp_dst_file) + os.remove(salt_dst_file) + os.remove(u_dst_file) + os.remove(v_dst_file) diff --git a/examples/Arctic_SODA3.3.1/make_remap_weights_file.py b/examples/Arctic_SODA3.3.1/make_remap_weights_file.py new file mode 100644 index 0000000..eb884e6 --- /dev/null +++ b/examples/Arctic_SODA3.3.1/make_remap_weights_file.py @@ -0,0 +1,83 @@ +import pyroms +import pyroms_toolbox + +# load the grid +#srcgrd = pyroms_toolbox.BGrid_GFDL.get_nc_BGrid_GFDL('/Volumes/P1/Data/SODA/SODA_3.3.1/grid/SODA3_0.5deg_grid.nc', name='SODA3.3.1', xrange=(285, 500), yrange=(180, 300)) +srcgrd = pyroms_toolbox.BGrid_GFDL.get_nc_BGrid_GFDL('/archive/u1/uaf/AKWATERS/kshedstrom/SODA/SODA3_0.5deg_grid.nc', \ + name='SODA3.3.1', area='npolar', ystart=235) +dstgrd = pyroms.grid.get_ROMS_grid('ARCTIC4') + +# make remap grid file for scrip +pyroms_toolbox.BGrid_GFDL.make_remap_grid_file(srcgrd, Bpos='t') +pyroms_toolbox.BGrid_GFDL.make_remap_grid_file(srcgrd, Bpos='uv') +pyroms.remapping.make_remap_grid_file(dstgrd, Cpos='rho') +pyroms.remapping.make_remap_grid_file(dstgrd, Cpos='u') +pyroms.remapping.make_remap_grid_file(dstgrd, Cpos='v') + +# compute remap weights +# input namelist variables for bilinear remapping at rho points +grid1_file = 'remap_grid_' + srcgrd.name + '_t.nc' +grid2_file = 'remap_grid_' + dstgrd.name + '_rho.nc' +interp_file1 = 'remap_weights_' + srcgrd.name + '_to_' + dstgrd.name + '_bilinear_t_to_rho.nc' +interp_file2 = 'remap_weights_' + dstgrd.name + '_to_' + srcgrd.name + '_bilinear_rho_to_t.nc' +map1_name = srcgrd.name + ' to ' + dstgrd.name + ' Bilinear Mapping' +map2_name = dstgrd.name + ' to ' + srcgrd.name + ' Bilinear Mapping' +num_maps = 1 +map_method = 'bilinear' + +pyroms.remapping.compute_remap_weights(grid1_file, grid2_file, \ + interp_file1, interp_file2, map1_name, \ + map2_name, num_maps, map_method, \ + grid1_periodic='.true.', grid2_periodic='.true.') + + +# compute remap weights +# input namelist variables for bilinear remapping at rho points +grid1_file = 'remap_grid_' + srcgrd.name + '_uv.nc' +grid2_file = 'remap_grid_' + dstgrd.name + '_rho.nc' +interp_file1 = 'remap_weights_' + srcgrd.name + '_to_' + dstgrd.name + '_bilinear_uv_to_rho.nc' +interp_file2 = 'remap_weights_' + dstgrd.name + '_to_' + srcgrd.name + '_bilinear_rho_to_uv.nc' +map1_name = srcgrd.name + ' to ' + dstgrd.name + ' Bilinear Mapping' +map2_name = dstgrd.name + ' to ' + srcgrd.name + ' Bilinear Mapping' +num_maps = 1 +map_method = 'bilinear' + +pyroms.remapping.compute_remap_weights(grid1_file, grid2_file, \ + interp_file1, interp_file2, map1_name, \ + map2_name, num_maps, map_method, \ + grid1_periodic='.true.', grid2_periodic='.true.') + + +# compute remap weights +# input namelist variables for bilinear remapping at rho points +grid1_file = 'remap_grid_' + srcgrd.name + '_t.nc' +grid2_file = 'remap_grid_' + dstgrd.name + '_u.nc' +interp_file1 = 'remap_weights_' + srcgrd.name + '_to_' + dstgrd.name + '_bilinear_t_to_u.nc' +interp_file2 = 'remap_weights_' + dstgrd.name + '_to_' + srcgrd.name + '_bilinear_u_to_t.nc' +map1_name = srcgrd.name + ' to ' + dstgrd.name + ' Bilinear Mapping' +map2_name = dstgrd.name + ' to ' + srcgrd.name + ' Bilinear Mapping' +num_maps = 1 +map_method = 'bilinear' + +pyroms.remapping.compute_remap_weights(grid1_file, grid2_file, \ + interp_file1, interp_file2, map1_name, \ + map2_name, num_maps, map_method, \ + grid1_periodic='.true.', grid2_periodic='.true.') + + +# compute remap weights +# input namelist variables for bilinear remapping at rho points +grid1_file = 'remap_grid_' + srcgrd.name + '_t.nc' +grid2_file = 'remap_grid_' + dstgrd.name + '_v.nc' +interp_file1 = 'remap_weights_' + srcgrd.name + '_to_' + dstgrd.name + '_bilinear_t_to_v.nc' +interp_file2 = 'remap_weights_' + dstgrd.name + '_to_' + srcgrd.name + '_bilinear_v_to_t.nc' +map1_name = srcgrd.name + ' to ' + dstgrd.name + ' Bilinear Mapping' +map2_name = dstgrd.name + ' to ' + srcgrd.name + ' Bilinear Mapping' +num_maps = 1 +map_method = 'bilinear' + +pyroms.remapping.compute_remap_weights(grid1_file, grid2_file, \ + interp_file1, interp_file2, map1_name, \ + map2_name, num_maps, map_method, \ + grid1_periodic='.true.', grid2_periodic='.true.') + diff --git a/examples/Arctic_SODA3.3.1/remap.py b/examples/Arctic_SODA3.3.1/remap.py new file mode 100644 index 0000000..ee0daa5 --- /dev/null +++ b/examples/Arctic_SODA3.3.1/remap.py @@ -0,0 +1,165 @@ +import numpy as np +import os +try: + import netCDF4 as netCDF +except: + import netCDF3 as netCDF +import matplotlib.pyplot as plt +import time +import datetime as dt +from matplotlib.dates import date2num, num2date + +import pyroms +import pyroms_toolbox +import _remapping + +class nctime(object): + pass + +def remap(src_varname, src_file, src_grd, dst_grd, dst_file, dmax=0, cdepth=0, kk=0, dst_dir='./'): + + # ARCTIC4 grid sub-sample +# xrange=src_grd.xrange; yrange=src_grd.yrange + ystart=235 + + # get time + nctime.long_name = 'time' + nctime.units = 'days since 1900-01-01 00:00:00' + + # create IC/CLM file + print 'Creating ROMS file', dst_file + if os.path.exists(dst_file) is True: + os.remove(dst_file) + pyroms_toolbox.nc_create_roms_file(dst_file, dst_grd, nctime) + + # open IC file + nc = netCDF.Dataset(dst_file, 'a', format='NETCDF3_64BIT') + + #load var + cdf = netCDF.Dataset(src_file) + src_var = cdf.variables[src_varname] + + tmp = cdf.variables['time'][:] + if len(tmp) > 1: + print 'error : multiple frames in input file' ; exit() + else: + time = tmp[0] + + # we need to correct the time axis + ref_soda = dt.datetime(1980,1,1,0,0) + ref_roms = dt.datetime(1900,1,1,0,0) + ndays = (ref_soda - ref_roms).days + time = time + ndays + + #get missing value + spval = src_var.missing_value + + # determine variable dimension + ndim = len(src_var.dimensions) - 1 + + # ARCTIC4 grid sub-sample + if ndim == 3: +# src_var = src_var[0,:, yrange[0]:yrange[1]+1, xrange[0]:xrange[1]+1] + print 'subgrid 3d', src_var.shape + src_var = np.squeeze(src_var, axis=(0,)) + src_var = src_var[:,np.r_[ystart:np.size(src_var,1),-1],:] + print 'subgrid 3d', src_var.shape + elif ndim == 2: +# src_var = src_var[0,yrange[0]:yrange[1]+1, xrange[0]:xrange[1]+1] + print 'subgrid 2d', src_var.shape + src_var = np.squeeze(src_var, axis=(0,)) + src_var = src_var[np.r_[ystart:np.size(src_var,0),-1],:] + print 'subgrid 2d', src_var.shape + + + vartime = 'ocean_time' + if src_varname == 'ssh': + Bpos = 't' + Cpos = 'rho' + z = src_grd.z_t + Mp, Lp = dst_grd.hgrid.mask_rho.shape + wts_file = 'remap_weights_' + src_grd.name + '_to_' + dst_grd.name + '_bilinear_t_to_rho.nc' + dst_varname = 'zeta' + dimensions = ('ocean_time', 'eta_rho', 'xi_rho') + long_name = 'free-surface' + units = 'meter' + field = 'zeta, scalar, series' + elif src_varname == 'temp': + src_var = src_var + Bpos = 't' + Cpos = 'rho' + z = src_grd.z_t + Mp, Lp = dst_grd.hgrid.mask_rho.shape + wts_file = 'remap_weights_' + src_grd.name + '_to_' + dst_grd.name + '_bilinear_t_to_rho.nc' + dst_varname = 'temp' + dimensions = ('ocean_time', 's_rho', 'eta_rho', 'xi_rho') + long_name = 'potential temperature' + units = 'Celsius' + field = 'temp, scalar, series' + elif src_varname == 'salt': + Bpos = 't' + Cpos = 'rho' + z = src_grd.z_t + Mp, Lp = dst_grd.hgrid.mask_rho.shape + wts_file = 'remap_weights_' + src_grd.name + '_to_' + dst_grd.name + '_bilinear_t_to_rho.nc' + dst_varname = 'salt' + dimensions = ('ocean_time', 's_rho', 'eta_rho', 'xi_rho') + long_name = 'salinity' + units = 'none' + field = 'salt_north, scalar, series' + else: + raise ValueError, 'Undefined src_varname' + + + if ndim == 3: + # build intermediate zgrid + zlevel = -z[::-1] + nzlevel = len(zlevel) + dst_zcoord = pyroms.vgrid.z_coordinate(dst_grd.vgrid.h, zlevel, nzlevel) + dst_grdz = pyroms.grid.ROMS_Grid(dst_grd.name+'_Z', dst_grd.hgrid, dst_zcoord) + + + # create variable in file + print 'Creating variable', dst_varname + nc.createVariable(dst_varname, 'f8', dimensions, fill_value=spval) + nc.variables[dst_varname].long_name = long_name + nc.variables[dst_varname].units = units + nc.variables[dst_varname].field = field + nc.variables[dst_varname].time = 'ocean_time' + + # remapping + print 'remapping', dst_varname, 'from', src_grd.name, \ + 'to', dst_grd.name + + if ndim == 3: + # flood the grid + print 'flood the grid' + src_varz = pyroms_toolbox.BGrid_GFDL.flood(src_var, src_grd, Bpos=Bpos, spval=spval, \ + dmax=dmax, cdepth=cdepth, kk=kk) + else: + src_varz = src_var + + # horizontal interpolation using scrip weights + print 'horizontal interpolation using scrip weights' + dst_varz = pyroms.remapping.remap(src_varz, wts_file, spval=spval) + + if ndim == 3: + # vertical interpolation from standard z level to sigma + print 'vertical interpolation from standard z level to sigma' + dst_var= pyroms.remapping.z2roms(dst_varz[::-1, :, :], \ + dst_grdz, dst_grd, Cpos=Cpos, spval=spval, \ + flood=False, irange=(0,Lp), jrange=(0,Mp)) + else: + dst_var = dst_varz + + # write data in destination file + print 'write data in destination file\n' + nc.variables['ocean_time'][0] = time + nc.variables[dst_varname][0] = np.squeeze(dst_var) + + # close file + nc.close() + cdf.close() + + if src_varname == 'ssh': + return dst_varz diff --git a/examples/Arctic_SODA3.3.1/remap_bdry.py b/examples/Arctic_SODA3.3.1/remap_bdry.py new file mode 100644 index 0000000..241c941 --- /dev/null +++ b/examples/Arctic_SODA3.3.1/remap_bdry.py @@ -0,0 +1,241 @@ +import numpy as np +import os +try: + import netCDF4 as netCDF +except: + import netCDF3 as netCDF +import matplotlib.pyplot as plt +import time +import datetime as dt +from matplotlib.dates import date2num, num2date + +import pyroms +import pyroms_toolbox +import _remapping + +class nctime(object): + pass + +def remap_bdry(src_varname, src_file, src_grd, dst_grd, dst_file, dmax=0, cdepth=0, kk=0, dst_dir='./'): + + # ARCTIC4 grid sub-sample + ystart=235 +# xrange=src_grd.xrange; yrange=src_grd.yrange + + # get time + nctime.long_name = 'time' + nctime.units = 'days since 1900-01-01 00:00:00' + + # create boundary file + print 'Creating boundary file', dst_file + if os.path.exists(dst_file) is True: + os.remove(dst_file) + pyroms_toolbox.nc_create_roms_bdry_file(dst_file, dst_grd, nctime) + + # open boundary file + nc = netCDF.Dataset(dst_file, 'a', format='NETCDF3_64BIT') + + #load var + #load var + cdf = netCDF.Dataset(src_file) + src_var = cdf.variables[src_varname] + + tmp = cdf.variables['time'][:] + if len(tmp) > 1: + print 'error : multiple frames in input file' ; exit() + else: + time = tmp[0] + + # we need to correct the time axis + ref_soda = dt.datetime(1980,1,1,0,0) + ref_roms = dt.datetime(1900,1,1,0,0) + ndays = (ref_soda - ref_roms).days + time = time + ndays + + #get missing value + spval = src_var.missing_value + + # determine variable dimension + ndim = len(src_var.dimensions) - 1 + + # ARCTIC4 grid sub-sample + if ndim == 3: +# src_var = src_var[0,:, yrange[0]:yrange[1]+1, xrange[0]:xrange[1]+1] + print 'subgrid 3d', src_var.shape + src_var = np.squeeze(src_var, axis=(0,)) + src_var = src_var[:,np.r_[ystart:np.size(src_var,1),-1],:] + print 'subgrid 3d', src_var.shape + elif ndim == 2: +# src_var = src_var[0,yrange[0]:yrange[1]+1, xrange[0]:xrange[1]+1] + print 'subgrid 2d', src_var.shape + src_var = np.squeeze(src_var, axis=(0,)) + src_var = src_var[np.r_[ystart:np.size(src_var,0),-1],:] + print 'subgrid 2d', src_var.shape + + if src_varname == 'ssh': + Bpos = 't' + Cpos = 'rho' + z = src_grd.z_t + Mp, Lp = dst_grd.hgrid.mask_rho.shape + wts_file = 'remap_weights_' + src_grd.name + '_to_' + dst_grd.name + '_bilinear_t_to_rho.nc' + dst_varname = 'zeta' + dimensions = ('ocean_time', 'eta_rho', 'xi_rho') + long_name = 'free-surface' + dst_varname_north = 'zeta_north' + dimensions_north = ('ocean_time', 'xi_rho') + long_name_north = 'free-surface north boundary condition' + field_north = 'zeta_north, scalar, series' + dst_varname_south = 'zeta_south' + dimensions_south = ('ocean_time', 'xi_rho') + long_name_south = 'free-surface south boundary condition' + field_south = 'zeta_south, scalar, series' + dst_varname_east = 'zeta_east' + dimensions_east = ('ocean_time', 'eta_rho') + long_name_east = 'free-surface east boundary condition' + field_east = 'zeta_east, scalar, series' + dst_varname_west = 'zeta_west' + dimensions_west = ('ocean_time', 'eta_rho') + long_name_west = 'free-surface west boundary condition' + field_west = 'zeta_west, scalar, series' + units = 'meter' + elif src_varname == 'temp': + src_var = src_var + Bpos = 't' + Cpos = 'rho' + z = src_grd.z_t + Mp, Lp = dst_grd.hgrid.mask_rho.shape + wts_file = 'remap_weights_' + src_grd.name + '_to_' + dst_grd.name + '_bilinear_t_to_rho.nc' + dst_varname = 'temperature' + dst_varname_north = 'temp_north' + dimensions_north = ('ocean_time', 's_rho', 'xi_rho') + long_name_north = 'potential temperature north boundary condition' + field_north = 'temp_north, scalar, series' + dst_varname_south = 'temp_south' + dimensions_south = ('ocean_time', 's_rho', 'xi_rho') + long_name_south = 'potential temperature south boundary condition' + field_south = 'temp_south, scalar, series' + dst_varname_east = 'temp_east' + dimensions_east = ('ocean_time', 's_rho', 'eta_rho') + long_name_east = 'potential temperature east boundary condition' + field_east = 'temp_east, scalar, series' + dst_varname_west = 'temp_west' + dimensions_west = ('ocean_time', 's_rho', 'eta_rho') + long_name_west = 'potential temperature west boundary condition' + field_west = 'temp_west, scalar, series' + units = 'Celsius' + elif src_varname == 'salt': + Bpos = 't' + Cpos = 'rho' + z = src_grd.z_t + Mp, Lp = dst_grd.hgrid.mask_rho.shape + wts_file = 'remap_weights_' + src_grd.name + '_to_' + dst_grd.name + '_bilinear_t_to_rho.nc' + dst_varname = 'salinity' + dst_varname_north = 'salt_north' + dimensions_north = ('ocean_time', 's_rho', 'xi_rho') + long_name_north = 'salinity north boundary condition' + field_north = 'salt_north, scalar, series' + dst_varname_south = 'salt_south' + dimensions_south = ('ocean_time', 's_rho', 'xi_rho') + long_name_south = 'salinity south boundary condition' + field_south = 'salt_south, scalar, series' + dst_varname_east = 'salt_east' + dimensions_east = ('ocean_time', 's_rho', 'eta_rho') + long_name_east = 'salinity east boundary condition' + field_east = 'salt_east, scalar, series' + dst_varname_west = 'salt_west' + dimensions_west = ('ocean_time', 's_rho', 'eta_rho') + long_name_west = 'salinity west boundary condition' + field_west = 'salt_west, scalar, series' + units = 'PSU' + else: + raise ValueError, 'Undefined src_varname' + + + if ndim == 3: + # build intermediate zgrid + zlevel = -z[::-1] + nzlevel = len(zlevel) + dst_zcoord = pyroms.vgrid.z_coordinate(dst_grd.vgrid.h, zlevel, nzlevel) + dst_grdz = pyroms.grid.ROMS_Grid(dst_grd.name+'_Z', dst_grd.hgrid, dst_zcoord) + + + # create variable in boudary file + print 'Creating variable', dst_varname_north + nc.createVariable(dst_varname_north, 'f8', dimensions_north, fill_value=spval) + nc.variables[dst_varname_north].long_name = long_name_north + nc.variables[dst_varname_north].units = units + nc.variables[dst_varname_north].field = field_north + nc.variables[dst_varname_north].time = 'ocean_time' + + print 'Creating variable', dst_varname_south + nc.createVariable(dst_varname_south, 'f8', dimensions_south, fill_value=spval) + nc.variables[dst_varname_south].long_name = long_name_south + nc.variables[dst_varname_south].units = units + nc.variables[dst_varname_south].field = field_south + nc.variables[dst_varname_south].time = 'ocean_time' + + print 'Creating variable', dst_varname_east + nc.createVariable(dst_varname_east, 'f8', dimensions_east, fill_value=spval) + nc.variables[dst_varname_east].long_name = long_name_east + nc.variables[dst_varname_east].units = units + nc.variables[dst_varname_east].field = field_east + nc.variables[dst_varname_east].time = 'ocean_time' + + print 'Creating variable', dst_varname_west + nc.createVariable(dst_varname_west, 'f8', dimensions_west, fill_value=spval) + nc.variables[dst_varname_west].long_name = long_name_west + nc.variables[dst_varname_west].units = units + nc.variables[dst_varname_west].field = field_west + nc.variables[dst_varname_west].time = 'ocean_time' + + # remapping + print 'remapping', dst_varname, 'from', src_grd.name, \ + 'to', dst_grd.name + + if ndim == 3: + # flood the grid + print 'flood the grid', src_var.shape + src_varz = pyroms_toolbox.BGrid_GFDL.flood(src_var, src_grd, Bpos=Bpos, spval=spval, \ + dmax=dmax, cdepth=cdepth, kk=kk) + else: + src_varz = src_var + + # horizontal interpolation using scrip weights + print 'horizontal interpolation using scrip weights' + dst_varz = pyroms.remapping.remap(src_varz, wts_file, spval=spval) + + if ndim == 3: + # vertical interpolation from standard z level to sigma + print 'vertical interpolation from standard z level to sigma' + dst_var_north = pyroms.remapping.z2roms(dst_varz[::-1, Mp-1:Mp, 0:Lp], \ + dst_grdz, dst_grd, Cpos=Cpos, spval=spval, \ + flood=False, irange=(0,Lp), jrange=(Mp-1,Mp)) + dst_var_south = pyroms.remapping.z2roms(dst_varz[::-1, 0:1, :], \ + dst_grdz, dst_grd, Cpos=Cpos, spval=spval, \ + flood=False, irange=(0,Lp), jrange=(0,1)) + dst_var_east = pyroms.remapping.z2roms(dst_varz[::-1, :, Lp-1:Lp], \ + dst_grdz, dst_grd, Cpos=Cpos, spval=spval, \ + flood=False, irange=(Lp-1,Lp), jrange=(0,Mp)) + dst_var_west = pyroms.remapping.z2roms(dst_varz[::-1, :, 0:1], \ + dst_grdz, dst_grd, Cpos=Cpos, spval=spval, \ + flood=False, irange=(0,1), jrange=(0,Mp)) + else: + dst_var_north = dst_varz[-1, :] + dst_var_south = dst_varz[0, :] + dst_var_east = dst_varz[:, -1] + dst_var_west = dst_varz[:, 0] + + # write data in destination file + print 'write data in destination file\n' + nc.variables['ocean_time'][0] = time + nc.variables[dst_varname_north][0] = np.squeeze(dst_var_north) + nc.variables[dst_varname_south][0] = np.squeeze(dst_var_south) + nc.variables[dst_varname_east][0] = np.squeeze(dst_var_east) + nc.variables[dst_varname_west][0] = np.squeeze(dst_var_west) + + # close file + nc.close() + cdf.close() + + if src_varname == 'ssh': + return dst_varz diff --git a/examples/Arctic_SODA3.3.1/remap_bdry_uv.py b/examples/Arctic_SODA3.3.1/remap_bdry_uv.py new file mode 100644 index 0000000..7a73f17 --- /dev/null +++ b/examples/Arctic_SODA3.3.1/remap_bdry_uv.py @@ -0,0 +1,375 @@ +import numpy as np +import os +try: + import netCDF4 as netCDF +except: + import netCDF3 as netCDF +import matplotlib.pyplot as plt +import time +import datetime as dt +from matplotlib.dates import date2num, num2date + +import pyroms +import pyroms_toolbox +import _remapping + +class nctime(object): + pass + +def remap_bdry_uv(src_fileuv, src_grd, dst_grd, dst_fileu, dst_filev, dmax=0, cdepth=0, kk=0, dst_dir='./'): + + # ARCTIC4 grid sub-sample +# xrange=src_grd.xrange; yrange=src_grd.yrange + ystart=235 + + # get time + nctime.long_name = 'time' + nctime.units = 'days since 1900-01-01 00:00:00' + + # get dimensions + Mp, Lp = dst_grd.hgrid.mask_rho.shape + + # create destination file + print '\nCreating destination file', dst_fileu + if os.path.exists(dst_fileu) is True: + os.remove(dst_fileu) + pyroms_toolbox.nc_create_roms_file(dst_fileu, dst_grd, nctime) + print 'Creating destination file', dst_filev + if os.path.exists(dst_filev) is True: + os.remove(dst_filev) + pyroms_toolbox.nc_create_roms_bdry_file(dst_filev, dst_grd, nctime) + + # open destination file + ncu = netCDF.Dataset(dst_fileu, 'a', format='NETCDF3_64BIT') + ncv = netCDF.Dataset(dst_filev, 'a', format='NETCDF3_64BIT') + + #load var + cdfuv = netCDF.Dataset(src_fileuv) + src_varu = cdfuv.variables['u'] + src_varv = cdfuv.variables['v'] + + tmp = cdfuv.variables['time'][:] + if len(tmp) > 1: + print 'error : multiple frames in input file' ; exit() + else: + time = tmp[0] + + # we need to correct the time axis + ref_soda = dt.datetime(1980,1,1,0,0) + ref_roms = dt.datetime(1900,1,1,0,0) + ndays = (ref_soda - ref_roms).days + time = time + ndays + + #get missing value + spval = src_varu.missing_value + + # ARCTIC4 grid sub-sample +# src_varu = src_varu[0,:, yrange[0]:yrange[1]+1, xrange[0]:xrange[1]+1] + src_varu = np.squeeze(src_varu, axis=(0,)) + src_varu = src_varu[:,np.r_[ystart:np.size(src_varu,1),-1],:] + print 'subgrid 3d', src_varu.shape +# src_varv = src_varv[0,:, yrange[0]:yrange[1]+1, xrange[0]:xrange[1]+1] + src_varv = np.squeeze(src_varv, axis=(0,)) + src_varv = src_varv[:,np.r_[ystart:np.size(src_varv,1),-1],:] + print 'subgrid 3d', src_varv.shape + + # get weights file + wts_file = 'remap_weights_' + src_grd.name + '_to_' + dst_grd.name + '_bilinear_uv_to_rho.nc' + + # build intermediate zgrid + zlevel = -src_grd.z_t[::-1] + nzlevel = len(zlevel) + dst_zcoord = pyroms.vgrid.z_coordinate(dst_grd.vgrid.h, zlevel, nzlevel) + dst_grdz = pyroms.grid.ROMS_Grid(dst_grd.name+'_Z', dst_grd.hgrid, dst_zcoord) + + # create variable in destination file + print 'Creating variable u_north' + ncu.createVariable('u_north', 'f8', ('ocean_time', 's_rho', 'xi_u'), fill_value=spval) + ncu.variables['u_north'].long_name = '3D u-momentum north boundary condition' + ncu.variables['u_north'].units = 'meter second-1' + ncu.variables['u_north'].field = 'u_north, scalar, series' + ncu.variables['u_north'].time = 'ocean_time' + + print 'Creating variable u_south' + ncu.createVariable('u_south', 'f8', ('ocean_time', 's_rho', 'xi_u'), fill_value=spval) + ncu.variables['u_south'].long_name = '3D u-momentum south boundary condition' + ncu.variables['u_south'].units = 'meter second-1' + ncu.variables['u_south'].field = 'u_south, scalar, series' + ncu.variables['u_south'].time = 'ocean_time' + + print 'Creating variable u_east' + ncu.createVariable('u_east', 'f8', ('ocean_time', 's_rho', 'eta_u'), fill_value=spval) + ncu.variables['u_east'].long_name = '3D u-momentum east boundary condition' + ncu.variables['u_east'].units = 'meter second-1' + ncu.variables['u_east'].field = 'u_east, scalar, series' + ncu.variables['u_east'].time = 'ocean_time' + + print 'Creating variable u_west' + ncu.createVariable('u_west', 'f8', ('ocean_time', 's_rho', 'eta_u'), fill_value=spval) + ncu.variables['u_west'].long_name = '3D u-momentum west boundary condition' + ncu.variables['u_west'].units = 'meter second-1' + ncu.variables['u_west'].field = 'u_east, scalar, series' + ncu.variables['u_west'].time = 'ocean_time' + + # create variable in destination file + print 'Creating variable ubar_north' + ncu.createVariable('ubar_north', 'f8', ('ocean_time', 'xi_u'), fill_value=spval) + ncu.variables['ubar_north'].long_name = '2D u-momentum north boundary condition' + ncu.variables['ubar_north'].units = 'meter second-1' + ncu.variables['ubar_north'].field = 'ubar_north, scalar, series' + ncu.variables['ubar_north'].time = 'ocean_time' + + print 'Creating variable ubar_south' + ncu.createVariable('ubar_south', 'f8', ('ocean_time', 'xi_u'), fill_value=spval) + ncu.variables['ubar_south'].long_name = '2D u-momentum south boundary condition' + ncu.variables['ubar_south'].units = 'meter second-1' + ncu.variables['ubar_south'].field = 'ubar_south, scalar, series' + ncu.variables['ubar_south'].time = 'ocean_time' + + print 'Creating variable ubar_east' + ncu.createVariable('ubar_east', 'f8', ('ocean_time', 'eta_u'), fill_value=spval) + ncu.variables['ubar_east'].long_name = '2D u-momentum east boundary condition' + ncu.variables['ubar_east'].units = 'meter second-1' + ncu.variables['ubar_east'].field = 'ubar_east, scalar, series' + ncu.variables['ubar_east'].time = 'ocean_time' + + print 'Creating variable ubar_west' + ncu.createVariable('ubar_west', 'f8', ('ocean_time', 'eta_u'), fill_value=spval) + ncu.variables['ubar_west'].long_name = '2D u-momentum west boundary condition' + ncu.variables['ubar_west'].units = 'meter second-1' + ncu.variables['ubar_west'].field = 'ubar_east, scalar, series' + ncu.variables['ubar_west'].time = 'ocean_time' + + print 'Creating variable v_north' + ncv.createVariable('v_north', 'f8', ('ocean_time', 's_rho', 'xi_v'), fill_value=spval) + ncv.variables['v_north'].long_name = '3D v-momentum north boundary condition' + ncv.variables['v_north'].units = 'meter second-1' + ncv.variables['v_north'].field = 'v_north, scalar, series' + ncv.variables['v_north'].time = 'ocean_time' + + print 'Creating variable v_south' + ncv.createVariable('v_south', 'f8', ('ocean_time', 's_rho', 'xi_v'), fill_value=spval) + ncv.variables['v_south'].long_name = '3D v-momentum south boundary condition' + ncv.variables['v_south'].units = 'meter second-1' + ncv.variables['v_south'].field = 'v_south, scalar, series' + ncv.variables['v_south'].time = 'ocean_time' + + print 'Creating variable v_east' + ncv.createVariable('v_east', 'f8', ('ocean_time', 's_rho', 'eta_v'), fill_value=spval) + ncv.variables['v_east'].long_name = '3D v-momentum east boundary condition' + ncv.variables['v_east'].units = 'meter second-1' + ncv.variables['v_east'].field = 'v_east, scalar, series' + ncv.variables['v_east'].time = 'ocean_time' + + print 'Creating variable v_west' + ncv.createVariable('v_west', 'f8', ('ocean_time', 's_rho', 'eta_v'), fill_value=spval) + ncv.variables['v_west'].long_name = '3D v-momentum west boundary condition' + ncv.variables['v_west'].units = 'meter second-1' + ncv.variables['v_west'].field = 'v_east, scalar, series' + ncv.variables['v_west'].time = 'ocean_time' + + print 'Creating variable vbar_north' + ncv.createVariable('vbar_north', 'f8', ('ocean_time', 'xi_v'), fill_value=spval) + ncv.variables['vbar_north'].long_name = '2D v-momentum north boundary condition' + ncv.variables['vbar_north'].units = 'meter second-1' + ncv.variables['vbar_north'].field = 'vbar_north, scalar, series' + ncv.variables['vbar_north'].time = 'ocean_time' + + print 'Creating variable vbar_south' + ncv.createVariable('vbar_south', 'f8', ('ocean_time', 'xi_v'), fill_value=spval) + ncv.variables['vbar_south'].long_name = '2D v-momentum south boundary condition' + ncv.variables['vbar_south'].units = 'meter second-1' + ncv.variables['vbar_south'].field = 'vbar_south, scalar, series' + ncv.variables['vbar_south'].time = 'ocean_time' + + print 'Creating variable vbar_east' + ncv.createVariable('vbar_east', 'f8', ('ocean_time', 'eta_v'), fill_value=spval) + ncv.variables['vbar_east'].long_name = '2D v-momentum east boundary condition' + ncv.variables['vbar_east'].units = 'meter second-1' + ncv.variables['vbar_east'].field = 'vbar_east, scalar, series' + ncv.variables['vbar_east'].time = 'ocean_time' + + print 'Creating variable vbar_west' + ncv.createVariable('vbar_west', 'f8', ('ocean_time', 'eta_v'), fill_value=spval) + ncv.variables['vbar_west'].long_name = '2D v-momentum west boundary condition' + ncv.variables['vbar_west'].units = 'meter second-1' + ncv.variables['vbar_west'].field = 'vbar_east, scalar, series' + ncv.variables['vbar_west'].time = 'ocean_time' + + + # remaping + print 'remapping and rotating u and v from', src_grd.name, \ + 'to', dst_grd.name + print 'time =', time + + # flood the grid + print 'flood the grid' + src_uz = pyroms_toolbox.BGrid_GFDL.flood(src_varu, src_grd, Bpos='uv', \ + spval=spval, dmax=dmax, cdepth=cdepth, kk=kk) + src_vz = pyroms_toolbox.BGrid_GFDL.flood(src_varv, src_grd, Bpos='uv', \ + spval=spval, dmax=dmax, cdepth=cdepth, kk=kk) + + # horizontal interpolation using scrip weights + print 'horizontal interpolation using scrip weights' + dst_uz = pyroms.remapping.remap(src_uz, wts_file, \ + spval=spval) + dst_vz = pyroms.remapping.remap(src_vz, wts_file, \ + spval=spval) + + # vertical interpolation from standard z level to sigma + print 'vertical interpolation from standard z level to sigma' + dst_u_north = pyroms.remapping.z2roms(dst_uz[::-1, Mp-2:Mp, 0:Lp], \ + dst_grdz, dst_grd, Cpos='rho', spval=spval, \ + flood=False, irange=(0,Lp), jrange=(Mp-2,Mp)) + dst_u_south = pyroms.remapping.z2roms(dst_uz[::-1, 0:2, 0:Lp], \ + dst_grdz, dst_grd, Cpos='rho', spval=spval, \ + flood=False, irange=(0,Lp), jrange=(0,2)) + dst_u_east = pyroms.remapping.z2roms(dst_uz[::-1, 0:Mp, Lp-2:Lp], \ + dst_grdz, dst_grd, Cpos='rho', spval=spval, \ + flood=False, irange=(Lp-2,Lp), jrange=(0,Mp)) + dst_u_west = pyroms.remapping.z2roms(dst_uz[::-1, 0:Mp, 0:2], \ + dst_grdz, dst_grd, Cpos='rho', spval=spval, \ + flood=False, irange=(0,2), jrange=(0,Mp)) + + dst_v_north = pyroms.remapping.z2roms(dst_vz[::-1, Mp-2:Mp, 0:Lp], \ + dst_grdz, dst_grd, Cpos='rho', spval=spval, \ + flood=False, irange=(0,Lp), jrange=(Mp-2,Mp)) + dst_v_south = pyroms.remapping.z2roms(dst_vz[::-1, 0:2, 0:Lp], \ + dst_grdz, dst_grd, Cpos='rho', spval=spval, \ + flood=False, irange=(0,Lp), jrange=(0,2)) + dst_v_east = pyroms.remapping.z2roms(dst_vz[::-1, 0:Mp, Lp-2:Lp], \ + dst_grdz, dst_grd, Cpos='rho', spval=spval, \ + flood=False, irange=(Lp-2,Lp), jrange=(0,Mp)) + dst_v_west = pyroms.remapping.z2roms(dst_vz[::-1, 0:Mp, 0:2], \ + dst_grdz, dst_grd, Cpos='rho', spval=spval, \ + flood=False, irange=(0,2), jrange=(0,Mp)) + + + # rotate u,v fields + src_angle = np.zeros(dst_grd.hgrid.angle_rho.shape) + dst_angle = dst_grd.hgrid.angle_rho + angle = dst_angle - src_angle + angle = np.tile(angle, (dst_grd.vgrid.N, 1, 1)) + + U_north = dst_u_north + dst_v_north*1j + eitheta_north = np.exp(-1j*angle[:,Mp-2:Mp, 0:Lp]) + U_north = U_north * eitheta_north + dst_u_north = np.real(U_north) + dst_v_north = np.imag(U_north) + + U_south = dst_u_south + dst_v_south*1j + eitheta_south = np.exp(-1j*angle[:,0:2, 0:Lp]) + U_south = U_south * eitheta_south + dst_u_south = np.real(U_south) + dst_v_south = np.imag(U_south) + + U_east = dst_u_east + dst_v_east*1j + eitheta_east = np.exp(-1j*angle[:,0:Mp, Lp-2:Lp]) + U_east = U_east * eitheta_east + dst_u_east = np.real(U_east) + dst_v_east = np.imag(U_east) + + U_west = dst_u_west + dst_v_west*1j + eitheta_west = np.exp(-1j*angle[:,0:Mp, 0:2]) + U_west = U_west * eitheta_west + dst_u_west = np.real(U_west) + dst_v_west = np.imag(U_west) + + # move back to u,v points + dst_u_north = 0.5 * np.squeeze(dst_u_north[:,-1,:-1] + dst_u_north[:,-1,1:]) + dst_v_north = 0.5 * np.squeeze(dst_v_north[:,:-1,:] + dst_v_north[:,1:,:]) + dst_u_south = 0.5 * np.squeeze(dst_u_south[:,0,:-1] + dst_u_south[:,0,1:]) + dst_v_south = 0.5 * np.squeeze(dst_v_south[:,:-1,:] + dst_v_south[:,1:,:]) + dst_u_east = 0.5 * np.squeeze(dst_u_east[:,:,:-1] + dst_u_east[:,:,1:]) + dst_v_east = 0.5 * np.squeeze(dst_v_east[:,:-1,-1] + dst_v_east[:,1:,-1]) + dst_u_west = 0.5 * np.squeeze(dst_u_west[:,:,:-1] + dst_u_west[:,:,1:]) + dst_v_west = 0.5 * np.squeeze(dst_v_west[:,:-1,0] + dst_v_west[:,1:,0]) + + # spval + idxu_north = np.where(dst_grd.hgrid.mask_u[-1,:] == 0) + idxv_north = np.where(dst_grd.hgrid.mask_v[-1,:] == 0) + idxu_south = np.where(dst_grd.hgrid.mask_u[0,:] == 0) + idxv_south = np.where(dst_grd.hgrid.mask_v[0,:] == 0) + idxu_east = np.where(dst_grd.hgrid.mask_u[:,-1] == 0) + idxv_east = np.where(dst_grd.hgrid.mask_v[:,-1] == 0) + idxu_west = np.where(dst_grd.hgrid.mask_u[:,0] == 0) + idxv_west = np.where(dst_grd.hgrid.mask_v[:,0] == 0) + for n in range(dst_grd.vgrid.N): + dst_u_north[n, idxu_north[0]] = spval + dst_v_north[n, idxv_north[0]] = spval + dst_u_south[n, idxu_south[0]] = spval + dst_v_south[n, idxv_south[0]] = spval + dst_u_east[n, idxu_east[0]] = spval + dst_v_east[n, idxv_east[0]] = spval + dst_u_west[n, idxu_west[0]] = spval + dst_v_west[n, idxv_west[0]] = spval + + # compute depth average velocity ubar and vbar + # get z at the right position + z_u_north = 0.5 * (dst_grd.vgrid.z_w[0,:,-1,:-1] + dst_grd.vgrid.z_w[0,:,-1,1:]) + z_v_north = 0.5 * (dst_grd.vgrid.z_w[0,:,-1,:] + dst_grd.vgrid.z_w[0,:,-2,:]) + z_u_south = 0.5 * (dst_grd.vgrid.z_w[0,:,0,:-1] + dst_grd.vgrid.z_w[0,:,0,1:]) + z_v_south = 0.5 * (dst_grd.vgrid.z_w[0,:,0,:] + dst_grd.vgrid.z_w[0,:,1,:]) + z_u_east = 0.5 * (dst_grd.vgrid.z_w[0,:,:,-1] + dst_grd.vgrid.z_w[0,:,:,-2]) + z_v_east = 0.5 * (dst_grd.vgrid.z_w[0,:,:-1,-1] + dst_grd.vgrid.z_w[0,:,1:,-1]) + z_u_west = 0.5 * (dst_grd.vgrid.z_w[0,:,:,0] + dst_grd.vgrid.z_w[0,:,:,1]) + z_v_west = 0.5 * (dst_grd.vgrid.z_w[0,:,:-1,0] + dst_grd.vgrid.z_w[0,:,1:,0]) + + dst_ubar_north = np.zeros(dst_u_north.shape[1]) + dst_ubar_south = np.zeros(dst_u_south.shape[1]) + dst_ubar_east = np.zeros(dst_u_east.shape[1]) + dst_ubar_west = np.zeros(dst_u_west.shape[1]) + dst_vbar_north = np.zeros(dst_v_north.shape[1]) + dst_vbar_south = np.zeros(dst_v_south.shape[1]) + dst_vbar_east = np.zeros(dst_v_east.shape[1]) + dst_vbar_west = np.zeros(dst_v_west.shape[1]) + + for i in range(dst_u_north.shape[1]): + dst_ubar_north[i] = (dst_u_north[:,i] * np.diff(z_u_north[:,i])).sum() / -z_u_north[0,i] + dst_ubar_south[i] = (dst_u_south[:,i] * np.diff(z_u_south[:,i])).sum() / -z_u_south[0,i] + for i in range(dst_v_north.shape[1]): + dst_vbar_north[i] = (dst_v_north[:,i] * np.diff(z_v_north[:,i])).sum() / -z_v_north[0,i] + dst_vbar_south[i] = (dst_v_south[:,i] * np.diff(z_v_south[:,i])).sum() / -z_v_south[0,i] + for j in range(dst_u_east.shape[1]): + dst_ubar_east[j] = (dst_u_east[:,j] * np.diff(z_u_east[:,j])).sum() / -z_u_east[0,j] + dst_ubar_west[j] = (dst_u_west[:,j] * np.diff(z_u_west[:,j])).sum() / -z_u_west[0,j] + for j in range(dst_v_east.shape[1]): + dst_vbar_east[j] = (dst_v_east[:,j] * np.diff(z_v_east[:,j])).sum() / -z_v_east[0,j] + dst_vbar_west[j] = (dst_v_west[:,j] * np.diff(z_v_west[:,j])).sum() / -z_v_west[0,j] + + #mask + dst_ubar_north = np.ma.masked_where(dst_grd.hgrid.mask_u[-1,:] == 0, dst_ubar_north) + dst_ubar_south = np.ma.masked_where(dst_grd.hgrid.mask_u[0,:] == 0, dst_ubar_south) + dst_ubar_east = np.ma.masked_where(dst_grd.hgrid.mask_u[:,-1] == 0, dst_ubar_east) + dst_ubar_west = np.ma.masked_where(dst_grd.hgrid.mask_u[:,0] == 0, dst_ubar_west) + dst_vbar_north = np.ma.masked_where(dst_grd.hgrid.mask_v[-1,:] == 0, dst_vbar_north) + dst_vbar_south = np.ma.masked_where(dst_grd.hgrid.mask_v[0,:] == 0, dst_vbar_south) + dst_vbar_east = np.ma.masked_where(dst_grd.hgrid.mask_v[:,-1] == 0, dst_vbar_east) + dst_vbar_west = np.ma.masked_where(dst_grd.hgrid.mask_v[:,0] == 0, dst_vbar_west) + + # write data in destination file + print 'write data in destination file' + ncu.variables['ocean_time'][0] = time + ncu.variables['u_north'][0] = dst_u_north + ncu.variables['u_south'][0] = dst_u_south + ncu.variables['u_east'][0] = dst_u_east + ncu.variables['u_west'][0] = dst_u_west + ncu.variables['ubar_north'][0] = dst_ubar_north + ncu.variables['ubar_south'][0] = dst_ubar_south + ncu.variables['ubar_east'][0] = dst_ubar_east + ncu.variables['ubar_west'][0] = dst_ubar_west + + ncv.variables['ocean_time'][0] = time + ncv.variables['v_north'][0] = dst_v_north + ncv.variables['v_south'][0] = dst_v_south + ncv.variables['v_east'][0] = dst_v_east + ncv.variables['v_west'][0] = dst_v_west + ncv.variables['vbar_north'][0] = dst_vbar_north + ncv.variables['vbar_south'][0] = dst_vbar_south + ncv.variables['vbar_east'][0] = dst_vbar_east + ncv.variables['vbar_west'][0] = dst_vbar_west + + # close file + ncu.close() + ncv.close() + cdfuv.close() diff --git a/examples/Arctic_SODA3.3.1/remap_uv.py b/examples/Arctic_SODA3.3.1/remap_uv.py new file mode 100644 index 0000000..e7ad132 --- /dev/null +++ b/examples/Arctic_SODA3.3.1/remap_uv.py @@ -0,0 +1,206 @@ +import numpy as np +import os +try: + import netCDF4 as netCDF +except: + import netCDF3 as netCDF +import matplotlib.pyplot as plt +import time +import datetime as dt +from matplotlib.dates import date2num, num2date + +import pyroms +import pyroms_toolbox +import _remapping + +class nctime(object): + pass + +def remap_uv(src_fileuv, src_grd, dst_grd, dst_fileu, dst_filev, dmax=0, cdepth=0, kk=0, dst_dir='./'): + + # ARCTIC4 grid sub-sample +# xrange=src_grd.xrange; yrange=src_grd.yrange + ystart=235 + + # get time + nctime.long_name = 'time' + nctime.units = 'days since 1900-01-01 00:00:00' + + # get dimensions + Mp, Lp = dst_grd.hgrid.mask_rho.shape + + # create destination file + print '\nCreating destination file', dst_fileu + if os.path.exists(dst_fileu) is True: + os.remove(dst_fileu) + pyroms_toolbox.nc_create_roms_file(dst_fileu, dst_grd, nctime) + print 'Creating destination file', dst_filev + if os.path.exists(dst_filev) is True: + os.remove(dst_filev) + pyroms_toolbox.nc_create_roms_file(dst_filev, dst_grd, nctime) + + # open destination file + ncu = netCDF.Dataset(dst_fileu, 'a', format='NETCDF3_64BIT') + ncv = netCDF.Dataset(dst_filev, 'a', format='NETCDF3_64BIT') + + #load var + cdfuv = netCDF.Dataset(src_fileuv) + src_varu = cdfuv.variables['u'] + src_varv = cdfuv.variables['v'] + + tmp = cdfuv.variables['time'][:] + if len(tmp) > 1: + print 'error : multiple frames in input file' ; exit() + else: + time = tmp[0] + + # we need to correct the time axis + ref_soda = dt.datetime(1980,1,1,0,0) + ref_roms = dt.datetime(1900,1,1,0,0) + ndays = (ref_soda - ref_roms).days + time = time + ndays + + #get missing value + spval = src_varu.missing_value + + # ARCTIC4 grid sub-sample +# src_varu = src_varu[0,:, yrange[0]:yrange[1]+1, xrange[0]:xrange[1]+1] + print 'subgrid u', src_varu.shape + src_varu = np.squeeze(src_varu, axis=(0,)) + src_varu = src_varu[:,np.r_[ystart:np.size(src_varu,1),-1],:] + print 'subgrid u', src_varu.shape +# src_varv = src_varv[0,:, yrange[0]:yrange[1]+1, xrange[0]:xrange[1]+1] + print 'subgrid v', src_varv.shape + src_varv = np.squeeze(src_varv, axis=(0,)) + src_varv = src_varv[:,np.r_[ystart:np.size(src_varv,1),-1],:] + print 'subgrid v', src_varv.shape + + # get weights file + wts_file = 'remap_weights_' + src_grd.name + '_to_' + dst_grd.name + '_bilinear_uv_to_rho.nc' + + # build intermediate zgrid + zlevel = -src_grd.z_t[::-1] + nzlevel = len(zlevel) + dst_zcoord = pyroms.vgrid.z_coordinate(dst_grd.vgrid.h, zlevel, nzlevel) + dst_grdz = pyroms.grid.ROMS_Grid(dst_grd.name+'_Z', dst_grd.hgrid, dst_zcoord) + + # create variable in destination file + print 'Creating variable u' + ncu.createVariable('u', 'f8', ('ocean_time', 's_rho', 'eta_u', 'xi_u'), fill_value=spval) + ncu.variables['u'].long_name = '3D u-momentum component' + ncu.variables['u'].units = 'meter second-1' + ncu.variables['u'].field = 'u, scalar, series' + ncu.variables['u'].time = 'ocean_time' + + # create variable in destination file + print 'Creating variable ubar' + ncu.createVariable('ubar', 'f8', ('ocean_time', 'eta_u', 'xi_u'), fill_value=spval) + ncu.variables['ubar'].long_name = '2D u-momentum component' + ncu.variables['ubar'].units = 'meter second-1' + ncu.variables['ubar'].field = 'ubar, scalar, series' + ncu.variables['ubar'].time = 'ocean_time' + + print 'Creating variable v' + ncv.createVariable('v', 'f8', ('ocean_time', 's_rho', 'eta_v', 'xi_v'), fill_value=spval) + ncv.variables['v'].long_name = '3D v-momentum component' + ncv.variables['v'].units = 'meter second-1' + ncv.variables['v'].field = 'v, scalar, series' + ncv.variables['v'].time = 'ocean_time' + + print 'Creating variable vbar' + ncv.createVariable('vbar', 'f8', ('ocean_time', 'eta_v', 'xi_v'), fill_value=spval) + ncv.variables['vbar'].long_name = '2D v-momentum component' + ncv.variables['vbar'].units = 'meter second-1' + ncv.variables['vbar'].field = 'vbar, scalar, series' + ncv.variables['vbar'].time = 'ocean_time' + + + # remaping + print 'remapping and rotating u and v from', src_grd.name, \ + 'to', dst_grd.name + + # flood the grid + print 'flood the grid' + src_uz = pyroms_toolbox.BGrid_GFDL.flood(src_varu, src_grd, Bpos='uv', \ + spval=spval, dmax=dmax, cdepth=cdepth, kk=kk) + src_vz = pyroms_toolbox.BGrid_GFDL.flood(src_varv, src_grd, Bpos='uv', \ + spval=spval, dmax=dmax, cdepth=cdepth, kk=kk) + + # horizontal interpolation using scrip weights + print 'horizontal interpolation using scrip weights' + dst_uz = pyroms.remapping.remap(src_uz, wts_file, \ + spval=spval) + dst_vz = pyroms.remapping.remap(src_vz, wts_file, \ + spval=spval) + + # vertical interpolation from standard z level to sigma + print 'vertical interpolation from standard z level to sigma' + dst_u = pyroms.remapping.z2roms(dst_uz[::-1,:,:], dst_grdz, \ + dst_grd, Cpos='rho', spval=spval, flood=False) + dst_v = pyroms.remapping.z2roms(dst_vz[::-1,:,:], dst_grdz, \ + dst_grd, Cpos='rho', spval=spval, flood=False) +# dst_u = pyroms.remapping.z2roms(dst_uz[::-1, :, :], \ +# dst_grdz, dst_grd, Cpos='rho', spval=spval, \ +# flood=False, irange=(0,Lp), jrange=(0,Mp)) + +# dst_v = pyroms.remapping.z2roms(dst_vz[::-1, :, :], \ +# dst_grdz, dst_grd, Cpos='rho', spval=spval, \ +# flood=False, irange=(0,Lp), jrange=(0,Mp)) + + # rotate u,v fields + src_angle = np.zeros(dst_grd.hgrid.angle_rho.shape) + dst_angle = dst_grd.hgrid.angle_rho + angle = dst_angle - src_angle + angle = np.tile(angle, (dst_grd.vgrid.N, 1, 1)) + + U = dst_u + dst_v*1j + eitheta = np.exp(-1j*angle[:,:,:]) + U = U * eitheta + dst_u = np.real(U) + dst_v = np.imag(U) + + # move back to u,v points + dst_u = 0.5 * np.squeeze(dst_u[:,:,:-1] + dst_u[:,:,1:]) + dst_v = 0.5 * np.squeeze(dst_v[:,:-1,:] + dst_v[:,1:,:]) + + # spval + idxu = np.where(dst_grd.hgrid.mask_u == 0) + idxv = np.where(dst_grd.hgrid.mask_v == 0) + for n in range(dst_grd.vgrid.N): + dst_u[n, idxu[0], idxu[1]] = spval + dst_v[n, idxv[0], idxv[1]] = spval + + # compute depth average velocity ubar and vbar + # get z at the right position + z_u = 0.5 * (dst_grd.vgrid.z_w[0,:,:,:-1] + dst_grd.vgrid.z_w[0,:,:,1:]) + z_v = 0.5 * (dst_grd.vgrid.z_w[0,:,:-1,:] + dst_grd.vgrid.z_w[0,:,1:,:]) + + print 'shapes', dst_u.shape, dst_v.shape + dst_ubar = np.zeros([dst_u.shape[1], dst_u.shape[2]]) + dst_vbar = np.zeros([dst_v.shape[1], dst_v.shape[2]]) + + for i in range(dst_ubar.shape[1]): + for j in range(dst_ubar.shape[0]): + dst_ubar[j,i] = (dst_u[:,j,i] * np.diff(z_u[:,j,i])).sum() / -z_u[0,j,i] + for i in range(dst_vbar.shape[1]): + for j in range(dst_vbar.shape[0]): + dst_vbar[j,i] = (dst_v[:,j,i] * np.diff(z_v[:,j,i])).sum() / -z_v[0,j,i] + + #mask + dst_ubar = np.ma.masked_where(dst_grd.hgrid.mask_u == 0, dst_ubar) + dst_vbar = np.ma.masked_where(dst_grd.hgrid.mask_v == 0, dst_vbar) + + # write data in destination file + print 'write data in destination file' + ncu.variables['ocean_time'][0] = time + ncu.variables['u'][0] = dst_u + ncu.variables['ubar'][0] = dst_ubar + + ncv.variables['ocean_time'][0] = time + ncv.variables['v'][0] = dst_v + ncv.variables['vbar'][0] = dst_vbar + + # close file + ncu.close() + ncv.close() + cdfuv.close() diff --git a/examples/Arctic_runoff/addto_runoff_file.py b/examples/Arctic_runoff/addto_runoff_file.py index d550df4..79610ae 100644 --- a/examples/Arctic_runoff/addto_runoff_file.py +++ b/examples/Arctic_runoff/addto_runoff_file.py @@ -14,7 +14,7 @@ # load 2-dimentional interannual discharge data # from 1948-2007. See Dai and Trenberth (2002) and Dai et al. (2009) -print 'Load interannual discharge data' +print('Load interannual discharge data') nc_data = netCDF.Dataset('/archive/u1/uaf/kate/CORE2/runoff.daitren.iaf.10FEB2011.nc', 'r') data = nc_data.variables['runoff'][:] # time with leap year @@ -112,7 +112,7 @@ nct=0 for t in range(374,376): #for t in range(nt): - print 'Remapping runoff for time %f' %time[t] + print('Remapping runoff for time %f' %time[t]) # conservative horizontal interpolation using scrip runoff_raw = pyroms.remapping.remap(data[t,:,:], wts_file, \ spval=spval) @@ -125,8 +125,8 @@ runoff_spread = np.zeros((Mp,Lp)) idx = np.where(runoff != 0) for p in range(np.size(idx,1)): - j = range(max(0,idx[0][p]-rspread), min(Mp-1,idx[0][p]+rspread+1)) - i = range(max(0,idx[1][p]-rspread), min(Lp-1,idx[1][p]+rspread+1)) + j = list(range(max(0,idx[0][p]-rspread), min(Mp-1,idx[0][p]+rspread+1))) + i = list(range(max(0,idx[1][p]-rspread), min(Lp-1,idx[1][p]+rspread+1))) ji = np.meshgrid(j,i) sidx = np.where(maskl[ji] == 1) nbpt = np.size(sidx) / 2 diff --git a/examples/Arctic_runoff/compute_daitren_remap_weights.py b/examples/Arctic_runoff/compute_daitren_remap_weights.py index aaf799e..ad252b5 100644 --- a/examples/Arctic_runoff/compute_daitren_remap_weights.py +++ b/examples/Arctic_runoff/compute_daitren_remap_weights.py @@ -8,7 +8,7 @@ ## load 2-dimentional interannual discharge data ## from 1948-2007. See Dai and Trenberth (2002) and Dai et al. (2009) -print 'Load interannual discharge data' +print('Load interannual discharge data') nc_data = netCDF.Dataset('/archive/u1/uaf/kate/CORE2/runoff.daitren.iaf.10FEB2011.nc', 'r') runoff = nc_data.variables['runoff'][:] lon = nc_data.variables['xc'][:] @@ -18,7 +18,7 @@ mask = nc_data.variables['mask'][:] ## create data remap file for scrip -print 'Create remap grid file for Dai and Trenberth runoff' +print('Create remap grid file for Dai and Trenberth runoff') remap_filename = 'remap_grid_daitren.nc' nc = netCDF.Dataset(remap_filename, 'w', format='NETCDF3_CLASSIC') nc.Description = 'remap grid file for Dai and Trenberth runoff data' @@ -84,14 +84,14 @@ ## create Arctic2 remap file for scrip -print 'Create remap grid file for Arctic2 grid' +print('Create remap grid file for Arctic2 grid') dstgrd = pyroms.grid.get_ROMS_grid('ARCTIC2') dstgrd.hgrid.mask_rho = np.ones(dstgrd.hgrid.mask_rho.shape) pyroms.remapping.make_remap_grid_file(dstgrd, Cpos='rho') ## compute remap weights -print 'compute remap weights using scrip' +print('compute remap weights using scrip') # input namelist variables for conservative remapping at rho points grid1_file = 'remap_grid_daitren.nc' grid2_file = 'remap_grid_ARCTIC2_rho.nc' diff --git a/examples/Arctic_runoff/make_runoff_clim.py b/examples/Arctic_runoff/make_runoff_clim.py index a28752a..20d8ff5 100644 --- a/examples/Arctic_runoff/make_runoff_clim.py +++ b/examples/Arctic_runoff/make_runoff_clim.py @@ -8,7 +8,7 @@ # load 2-dimentional interannual discharge data # from 1948-2007. See Dai and Trenberth (2002) and Dai et al. (2009) -print 'Load interannual discharge data' +print('Load interannual discharge data') nc_data = netCDF.Dataset('/archive/u1/uaf/kate/CORE2/runoff.daitren.clim.10FEB2011.nc', 'r') data = nc_data.variables['runoff'][:] @@ -92,7 +92,7 @@ nct=0 for t in range(nt): - print 'Remapping runoff for time %f' %time[nct] + print('Remapping runoff for time %f' %time[nct]) # conservative horizontal interpolation using scrip runoff_raw = pyroms.remapping.remap(data[t,:,:], wts_file, \ spval=spval) @@ -105,8 +105,8 @@ runoff_spread = np.zeros((Mp,Lp)) idx = np.where(runoff != 0) for p in range(np.size(idx,1)): - j = range(max(0,idx[0][p]-rspread), min(Mp-1,idx[0][p]+rspread+1)) - i = range(max(0,idx[1][p]-rspread), min(Lp-1,idx[1][p]+rspread+1)) + j = list(range(max(0,idx[0][p]-rspread), min(Mp-1,idx[0][p]+rspread+1))) + i = list(range(max(0,idx[1][p]-rspread), min(Lp-1,idx[1][p]+rspread+1))) ji = np.meshgrid(j,i) sidx = np.where(maskl[ji] == 1) nbpt = np.size(sidx) / 2 diff --git a/examples/Arctic_runoff/make_runoff_file.py b/examples/Arctic_runoff/make_runoff_file.py index 69cf30f..7fd52fd 100644 --- a/examples/Arctic_runoff/make_runoff_file.py +++ b/examples/Arctic_runoff/make_runoff_file.py @@ -14,7 +14,7 @@ # load 2-dimentional interannual discharge data # from 1948-2007. See Dai and Trenberth (2002) and Dai et al. (2009) -print 'Load interannual discharge data' +print('Load interannual discharge data') nc_data = netCDF.Dataset('/archive/u1/uaf/kate/CORE2/runoff.daitren.iaf.10FEB2011.nc', 'r') data = nc_data.variables['runoff'][:] # time with leap year @@ -111,7 +111,7 @@ nct=0 for t in range(nt): - print 'Remapping runoff for time %f' %time[nct] + print('Remapping runoff for time %f' %time[nct]) # conservative horizontal interpolation using scrip runoff_raw = pyroms.remapping.remap(data[t,:,:], wts_file, \ spval=spval) @@ -124,8 +124,8 @@ runoff_spread = np.zeros((Mp,Lp)) idx = np.where(runoff != 0) for p in range(np.size(idx,1)): - j = range(max(0,idx[0][p]-rspread), min(Mp-1,idx[0][p]+rspread+1)) - i = range(max(0,idx[1][p]-rspread), min(Lp-1,idx[1][p]+rspread+1)) + j = list(range(max(0,idx[0][p]-rspread), min(Mp-1,idx[0][p]+rspread+1))) + i = list(range(max(0,idx[1][p]-rspread), min(Lp-1,idx[1][p]+rspread+1))) ji = np.meshgrid(j,i) sidx = np.where(maskl[ji] == 1) nbpt = np.size(sidx) / 2 diff --git a/examples/BC_from_sta/make_bry_file.py b/examples/BC_from_sta/make_bry_file.py index 74d7148..b6bfb73 100644 --- a/examples/BC_from_sta/make_bry_file.py +++ b/examples/BC_from_sta/make_bry_file.py @@ -8,14 +8,14 @@ import pyroms from station_bound import * -import commands +import subprocess import pdb irange = None jrange = None def do_file(file): - print 'file is: ' + file + print('file is: ' + file) var_list = ['u', 'v', 'temp', 'salt', 'zeta'] # pdb.set_trace() dst_var = station_bound(var_list, file,\ @@ -23,7 +23,7 @@ def do_file(file): # Change src_filename to your directory for the file's containing variable data data_dir = '/archive/u1/uaf/kate/NGOA/run05/' -lst = commands.getoutput('ls ' + data_dir + 'nwgoa_sta.nc') +lst = subprocess.getoutput('ls ' + data_dir + 'nwgoa_sta.nc') lst_file = lst.split() src_grd = pyroms.sta_grid.get_Stations_grid('NWGOA3', lst_file[0]) diff --git a/examples/BC_from_sta/station_bound.py b/examples/BC_from_sta/station_bound.py index 7ce754c..2464510 100644 --- a/examples/BC_from_sta/station_bound.py +++ b/examples/BC_from_sta/station_bound.py @@ -78,14 +78,14 @@ def station_bound(varname, srcfile, srcgrd, dst_grd, \ varname = [varname] nvar = len(varname) else: - raise ValueError, 'varname must be a str or a list of str' + raise ValueError('varname must be a str or a list of str') # if we're working on u and v, we'll compute ubar,vbar afterwards compute_ubar = False if (varname.__contains__('u') == 1 and varname.__contains__('v') == 1) or \ (varname.__contains__('u_eastward') == 1 and varname.__contains__('v_northward') == 1): compute_ubar = True - print 'ubar/vbar to be computed from u/v' + print('ubar/vbar to be computed from u/v') if varname.__contains__('ubar'): varname.remove('ubar') nvar = nvar-1 @@ -96,8 +96,8 @@ def station_bound(varname, srcfile, srcgrd, dst_grd, \ # if rotate_uv=True, check that u and v are in varname if rotate_uv is True: if varname.__contains__(uvar) == 0 or varname.__contains__(vvar) == 0: - raise Warning, 'varname must include uvar and vvar in order to' \ - + ' rotate the velocity field' + raise Warning('varname must include uvar and vvar in order to' \ + + ' rotate the velocity field') else: varname.remove(uvar) varname.remove(vvar) @@ -110,7 +110,7 @@ def station_bound(varname, srcfile, srcgrd, dst_grd, \ srcfile = sorted(glob.glob(srcfile)) nfile = len(srcfile) else: - raise ValueError, 'src_srcfile must be a str or a list of str' + raise ValueError('src_srcfile must be a str or a list of str') sides = ['_west','_east','_north','_south'] long = {'_west':'Western', '_east':'Eastern', \ @@ -121,7 +121,7 @@ def station_bound(varname, srcfile, srcgrd, dst_grd, \ nctidx = 0 # loop over the srcfile for nf in range(nfile): - print 'Working with file', srcfile[nf], '...' + print('Working with file', srcfile[nf], '...') # get time ocean_time = pyroms.utility.get_nc_var('ocean_time', srcfile[nf]) @@ -129,14 +129,14 @@ def station_bound(varname, srcfile, srcgrd, dst_grd, \ # trange argument if trange is None: - trange = range(ntime) + trange = list(range(ntime)) # create destination file if nctidx == 0: dstfile = dstdir + os.path.basename(srcfile[nf])[:-3] + '_' \ + dst_grd.name + '_bdry.nc' if os.path.exists(dstfile) is False: - print 'Creating destination file', dstfile + print('Creating destination file', dstfile) pyroms_toolbox.nc_create_roms_file(dstfile, dst_grd, \ ocean_time, lgrid=False) @@ -150,10 +150,10 @@ def station_bound(varname, srcfile, srcgrd, dst_grd, \ # loop over variable for nv in range(nvar): - print ' ' - print 'extracting', varname[nv], 'from', srcgrd.name, \ - 'to', dst_grd.name - print 'time =', ocean_time[nt] + print(' ') + print('extracting', varname[nv], 'from', srcgrd.name, \ + 'to', dst_grd.name) + print('time =', ocean_time[nt]) Mp, Lp = dst_grd.hgrid.mask_rho.shape if varname[nv] == uvar: Lp = Lp-1 @@ -170,7 +170,7 @@ def station_bound(varname, srcfile, srcgrd, dst_grd, \ try: spval = src_var._FillValue except: - print Warning, 'Did not find a _FillValue attribute.' + print(Warning, 'Did not find a _FillValue attribute.') # srange if srange is None: @@ -181,14 +181,14 @@ def station_bound(varname, srcfile, srcgrd, dst_grd, \ # determine where on the C-grid these variable lies Cpos='rho' - print 'Arakawa C-grid position is', Cpos + print('Arakawa C-grid position is', Cpos) # create variable in _destination file if nctidx == 0: for sid in sides: varn = varname[nv]+str(sid) dimens = [i for i in src_var.dimensions] - print 'dimens', dimens, len(dimens) + print('dimens', dimens, len(dimens)) if len(dimens) == 3: dimens = ['ocean_time', 's_rho', \ dimincl[sid]] @@ -200,15 +200,15 @@ def station_bound(varname, srcfile, srcgrd, dst_grd, \ if varname[nv] == vvar: foo = dimens[-1].replace('rho', 'v') dimens[-1] = foo - print 'Creating variable', varn, dimens + print('Creating variable', varn, dimens) nc.createVariable(varn, 'f8', dimens, \ fill_value=spval) nc.variables[varn].long_name = varname[nv] + \ - ' ' + long[sid] + ' boundary condition' + ' ' + int[sid] + ' boundary condition' try: nc.variables[varn].units = src_var.units except: - print varn+' has no units' + print(varn+' has no units') nc.variables[varn].time = src_var.time nc.variables[varn].coordinates = \ str(dimens.reverse()) @@ -216,7 +216,7 @@ def station_bound(varname, srcfile, srcgrd, dst_grd, \ if ndim == 2: # vertical interpolation from sigma to standard z level - print 'vertical interpolation from sigma to standard z level' + print('vertical interpolation from sigma to standard z level') src_varz = pyroms.remapping.sta2z( \ src_var[nt,:,ssrange[0]:ssrange[1]], \ srcgrd, srcgrdz, Cpos=Cpos, spval=spval, \ @@ -273,7 +273,7 @@ def station_bound(varname, srcfile, srcgrd, dst_grd, \ dst_varz[44:79, 0] = src_varz[0:35] dst_varz[0, 56:87] = src_varz[35:] - print datetime.datetime.now() + print(datetime.datetime.now()) # horizontal placement of stations into target grid. if ndim == 2: @@ -313,7 +313,7 @@ def station_bound(varname, srcfile, srcgrd, dst_grd, \ # print datetime.datetime.now() # write data in destination file - print 'write data in destination file' + print('write data in destination file') sid = '_west' varn = varname[nv]+str(sid) nc.variables[varn][nctidx] = np.squeeze(dst_var_west) @@ -332,9 +332,9 @@ def station_bound(varname, srcfile, srcgrd, dst_grd, \ # rotate the velocity field if requested if rotate_uv is True: - print ' ' - print 'remapping and rotating u and v from', srcgrd.name, \ - 'to', dst_grd.name + print(' ') + print('remapping and rotating u and v from', srcgrd.name, \ + 'to', dst_grd.name) # get source data src_u = pyroms.utility.get_nc_var(uvar, srcfile[nf]) @@ -344,16 +344,16 @@ def station_bound(varname, srcfile, srcgrd, dst_grd, \ try: spval = src_v._FillValue except: - print Warning, 'Did not find a _FillValue attribute.' + print(Warning, 'Did not find a _FillValue attribute.') if rotate_part: ndim = len(src_u.dimensions)-1 ind = uvar.find('_eastward') uvar_out = uvar[0:ind] - print "Warning: renaming uvar to", uvar_out + print("Warning: renaming uvar to", uvar_out) ind = vvar.find('_northward') vvar_out = vvar[0:ind] - print "Warning: renaming vvar to", vvar_out + print("Warning: renaming vvar to", vvar_out) if ndim == 3: dimens_u = ['ocean_time', 's_rho', 'eta_u', 'xi_u'] dimens_v = ['ocean_time', 's_rho', 'eta_v', 'xi_v'] @@ -369,10 +369,10 @@ def station_bound(varname, srcfile, srcgrd, dst_grd, \ # create variable in destination file if nctidx == 0: - print 'Creating boundary variables for '+uvar + print('Creating boundary variables for '+uvar) for sid in sides: varn = uvar_out+str(sid) - print 'Creating variable', varn + print('Creating variable', varn) dimens = list(dimens_u) # for dim in dimens: # if re.match(dimexcl[sid],dim): @@ -380,19 +380,19 @@ def station_bound(varname, srcfile, srcgrd, dst_grd, \ nc.createVariable(varn, 'f8', dimens, \ fill_value=spval) nc.variables[varn].long_name = uvar_out + \ - ' ' + long[sid] + ' boundary condition' + ' ' + int[sid] + ' boundary condition' try: nc.variables[varn].units = src_u.units except: - print varn+' has no units' + print(varn+' has no units') nc.variables[varn].time = src_u.time nc.variables[varn].coordinates = \ str(dimens.reverse()) nc.variables[varn].field = src_u.field - print 'Creating boundary variables for '+vvar + print('Creating boundary variables for '+vvar) for sid in sides: varn = vvar_out+str(sid) - print 'Creating variable', varn + print('Creating variable', varn) dimens = list(dimens_v) for dim in dimens: if re.match(dimexcl[sid],dim): @@ -400,11 +400,11 @@ def station_bound(varname, srcfile, srcgrd, dst_grd, \ nc.createVariable(varn, 'f8', dimens, \ fill_value=spval) nc.variables[varn].long_name = vvar_out + \ - ' ' + long[sid] + ' boundary condition' + ' ' + int[sid] + ' boundary condition' try: nc.variables[varn].units = src_v.units except: - print varn+' has no units' + print(varn+' has no units') nc.variables[varn].time = src_v.time nc.variables[varn].coordinates = \ str(dimens.reverse()) @@ -433,7 +433,7 @@ def station_bound(varname, srcfile, srcgrd, dst_grd, \ ndim = len(src_v.dimensions)-1 if ndim == 3: - print 'vertical interpolation from sigma to standard z level' + print('vertical interpolation from sigma to standard z level') src_uz = pyroms.remapping.sta2z( \ src_u[nt,:,ssrange[0]:ssrange[1]], \ srcgrd, srcgrdz, Cpos=Cpos_u, spval=spval, \ @@ -464,7 +464,7 @@ def station_bound(varname, srcfile, srcgrd, dst_grd, \ if ndim == 3: # vertical interpolation from standard z level to sigma - print 'vertical interpolation from standard z level to sigma' + print('vertical interpolation from standard z level to sigma') dst_u_north = pyroms.remapping.z2roms(dst_uz[:, Mp-2:Mp, 0:Lp], \ dst_grdz, dst_grd, Cpos='rho', spval=spval, \ flood=False, irange=(0,Lp), jrange=(Mp-2,Mp)) @@ -610,7 +610,7 @@ def station_bound(varname, srcfile, srcgrd, dst_grd, \ dst_v_west[idxv_west[0]] = spval # write data in destination file - print 'write data in destination file' + print('write data in destination file') sid = '_west' varn = uvar_out+str(sid) nc.variables[varn][nctidx] = dst_u_west @@ -637,7 +637,7 @@ def station_bound(varname, srcfile, srcgrd, dst_grd, \ if compute_ubar: if nctidx == 0: - print 'Creating variable ubar_north' + print('Creating variable ubar_north') nc.createVariable('ubar_north', 'f8', \ ('ocean_time', 'xi_u'), fill_value=spval) nc.variables['ubar_north'].long_name = \ @@ -646,7 +646,7 @@ def station_bound(varname, srcfile, srcgrd, dst_grd, \ nc.variables['ubar_north'].time = 'ocean_time' nc.variables['ubar_north'].coordinates = 'xi_u ocean_time' nc.variables['ubar_north'].field = 'ubar_north, scalar, series' - print 'Creating variable vbar_north' + print('Creating variable vbar_north') nc.createVariable('vbar_north', 'f8', \ ('ocean_time', 'xi_v'), fill_value=spval) nc.variables['vbar_north'].long_name = \ @@ -656,7 +656,7 @@ def station_bound(varname, srcfile, srcgrd, dst_grd, \ nc.variables['vbar_north'].coordinates = 'xi_v ocean_time' nc.variables['vbar_north'].field = 'vbar_north,, scalar, series' - print 'Creating variable ubar_south' + print('Creating variable ubar_south') nc.createVariable('ubar_south', 'f8', \ ('ocean_time', 'xi_u'), fill_value=spval) nc.variables['ubar_south'].long_name = \ @@ -665,7 +665,7 @@ def station_bound(varname, srcfile, srcgrd, dst_grd, \ nc.variables['ubar_south'].time = 'ocean_time' nc.variables['ubar_south'].coordinates = 'xi_u ocean_time' nc.variables['ubar_south'].field = 'ubar_south, scalar, series' - print 'Creating variable vbar_south' + print('Creating variable vbar_south') nc.createVariable('vbar_south', 'f8', \ ('ocean_time', 'xi_v'), fill_value=spval) nc.variables['vbar_south'].long_name = \ @@ -674,7 +674,7 @@ def station_bound(varname, srcfile, srcgrd, dst_grd, \ nc.variables['vbar_south'].time = 'ocean_time' nc.variables['vbar_south'].coordinates = 'xi_v ocean_time' - print 'Creating variable ubar_west' + print('Creating variable ubar_west') nc.createVariable('ubar_west', 'f8', \ ('ocean_time', 'eta_u'), fill_value=spval) nc.variables['ubar_west'].long_name = \ @@ -683,7 +683,7 @@ def station_bound(varname, srcfile, srcgrd, dst_grd, \ nc.variables['ubar_west'].time = 'ocean_time' nc.variables['ubar_west'].coordinates = 'eta_u ocean_time' nc.variables['ubar_west'].field = 'ubar_west, scalar, series' - print 'Creating variable vbar_west' + print('Creating variable vbar_west') nc.createVariable('vbar_west', 'f8', \ ('ocean_time', 'eta_v'), fill_value=spval) nc.variables['vbar_west'].long_name = \ @@ -692,7 +692,7 @@ def station_bound(varname, srcfile, srcgrd, dst_grd, \ nc.variables['vbar_west'].time = 'ocean_time' nc.variables['vbar_west'].coordinates = 'eta_v ocean_time' - print 'Creating variable ubar_east' + print('Creating variable ubar_east') nc.createVariable('ubar_east', 'f8', \ ('ocean_time', 'eta_u'), fill_value=spval) nc.variables['ubar_east'].long_name = \ @@ -701,7 +701,7 @@ def station_bound(varname, srcfile, srcgrd, dst_grd, \ nc.variables['ubar_east'].time = 'ocean_time' nc.variables['ubar_east'].coordinates = 'eta_u ocean_time' nc.variables['ubar_east'].field = 'ubar_east, scalar, series' - print 'Creating variable vbar_east' + print('Creating variable vbar_east') nc.createVariable('vbar_east', 'f8', \ ('ocean_time', 'eta_v'), fill_value=spval) nc.variables['vbar_east'].long_name = \ @@ -712,7 +712,7 @@ def station_bound(varname, srcfile, srcgrd, dst_grd, \ # compute depth average velocity ubar and vbar # get z at the right position - print 'Computing ubar/vbar from u/v' + print('Computing ubar/vbar from u/v') z_u_north = 0.5 * (dst_grd.vgrid.z_w[0,:,-1,:-1] + dst_grd.vgrid.z_w[0,:,-1, 1:]) z_v_north = 0.5 * (dst_grd.vgrid.z_w[0,:,-1,:] + @@ -800,7 +800,7 @@ def station_bound(varname, srcfile, srcgrd, dst_grd, \ nc.variables['vbar_west'][nctidx] = dst_vbar_west nctidx = nctidx + 1 - print 'ADDING to nctidx ', nctidx + print('ADDING to nctidx ', nctidx) nc.sync() # close files here? how? diff --git a/examples/Beaufort/make_clm_file.py b/examples/Beaufort/make_clm_file.py index eee8219..2d00509 100644 --- a/examples/Beaufort/make_clm_file.py +++ b/examples/Beaufort/make_clm_file.py @@ -9,7 +9,7 @@ import pyroms import pyroms_toolbox -import commands +import subprocess irange=(370,580) jrange=(460,580) diff --git a/examples/Beaufort/make_ice_bdry_file.py b/examples/Beaufort/make_ice_bdry_file.py index 55eb33f..d2a246a 100644 --- a/examples/Beaufort/make_ice_bdry_file.py +++ b/examples/Beaufort/make_ice_bdry_file.py @@ -33,7 +33,7 @@ def do_file(month): src_filename = part_filename + month + '*.nc' lcopy = list(src_varname) - print 'working on file '+src_filename + print('working on file '+src_filename) # didn't work even with processes=1 # pdb.set_trace() dst_var = pyroms_toolbox.remapping_bound(lcopy, src_filename,\ diff --git a/examples/Beaufort/make_weight_files.py b/examples/Beaufort/make_weight_files.py index 19f532c..766314e 100644 --- a/examples/Beaufort/make_weight_files.py +++ b/examples/Beaufort/make_weight_files.py @@ -30,7 +30,7 @@ num_maps = 1 map_method = 'bilinear' - print "Making "+str(interp_file1)+"..." + print("Making "+str(interp_file1)+"...") pyroms.remapping.compute_remap_weights(grid1_file,grid2_file,\ interp_file1,interp_file2,map1_name,\ diff --git a/examples/Bering/make_weight_files.py b/examples/Bering/make_weight_files.py index decb20e..c8db342 100644 --- a/examples/Bering/make_weight_files.py +++ b/examples/Bering/make_weight_files.py @@ -23,7 +23,7 @@ num_maps = 1 map_method = 'bilinear' - print "Making "+str(interp_file1)+"..." + print("Making "+str(interp_file1)+"...") pyroms.remapping.compute_remap_weights(grid1_file,grid2_file,\ interp_file1,interp_file2,map1_name,\ diff --git a/examples/CCS1_SODA3.3.1/Boundary/make_bdry_file.py b/examples/CCS1_SODA3.3.1/Boundary/make_bdry_file.py index 0f4b26c..b984a32 100644 --- a/examples/CCS1_SODA3.3.1/Boundary/make_bdry_file.py +++ b/examples/CCS1_SODA3.3.1/Boundary/make_bdry_file.py @@ -1,7 +1,7 @@ import subprocess import os import sys -import commands +import subprocess import numpy as np #increase the maximum number of open files allowed @@ -27,7 +27,7 @@ for filein in filelst: tag=filein.replace('soda3.3.1_5dy_ocean_reg_','').replace('.nc','') - print '\nBuild OBC file for time %s' %filein + print('\nBuild OBC file for time %s' %filein) zeta_dst_file = dst_dir + dst_grd.name + '_bdry_zeta_' + tag + '_' + src_grd.name + '.nc' temp_dst_file = dst_dir + dst_grd.name + '_bdry_temp_' + tag + '_' + src_grd.name + '.nc' salt_dst_file = dst_dir + dst_grd.name + '_bdry_salt_' + tag + '_' + src_grd.name + '.nc' diff --git a/examples/CCS1_SODA3.3.1/Boundary/remap_bdry.py b/examples/CCS1_SODA3.3.1/Boundary/remap_bdry.py index a8b8451..1a8a76d 100644 --- a/examples/CCS1_SODA3.3.1/Boundary/remap_bdry.py +++ b/examples/CCS1_SODA3.3.1/Boundary/remap_bdry.py @@ -26,7 +26,7 @@ def remap_bdry(src_varname, src_file, src_grd, dst_grd, dst_file, dmax=0, cdepth nctime.units = 'days since 1900-01-01 00:00:00' # create boundary file - print 'Creating boundary file', dst_file + print('Creating boundary file', dst_file) if os.path.exists(dst_file) is True: os.remove(dst_file) pyroms_toolbox.nc_create_roms_bdry_file(dst_file, dst_grd, nctime) @@ -41,7 +41,7 @@ def remap_bdry(src_varname, src_file, src_grd, dst_grd, dst_file, dmax=0, cdepth tmp = cdf.variables['time'][:] if len(tmp) > 1: - print 'error : multiple frames in input file' ; exit() + print('error : multiple frames in input file') ; exit() else: time = tmp[0] @@ -128,7 +128,7 @@ def remap_bdry(src_varname, src_file, src_grd, dst_grd, dst_file, dmax=0, cdepth field_west = 'salt_west, scalar, series' units = 'PSU' else: - raise ValueError, 'Undefined src_varname' + raise ValueError('Undefined src_varname') if ndim == 3: @@ -140,21 +140,21 @@ def remap_bdry(src_varname, src_file, src_grd, dst_grd, dst_file, dmax=0, cdepth # create variable in boudary file - print 'Creating variable', dst_varname_north + print('Creating variable', dst_varname_north) nc.createVariable(dst_varname_north, 'f8', dimensions_north, fill_value=spval) nc.variables[dst_varname_north].long_name = long_name_north nc.variables[dst_varname_north].units = units nc.variables[dst_varname_north].field = field_north #nc.variables[dst_varname_north]._FillValue = spval - print 'Creating variable', dst_varname_south + print('Creating variable', dst_varname_south) nc.createVariable(dst_varname_south, 'f8', dimensions_south, fill_value=spval) nc.variables[dst_varname_south].long_name = long_name_south nc.variables[dst_varname_south].units = units nc.variables[dst_varname_south].field = field_south #nc.variables[dst_varname_south]._FillValue = spval - print 'Creating variable', dst_varname_west + print('Creating variable', dst_varname_west) nc.createVariable(dst_varname_west, 'f8', dimensions_west, fill_value=spval) nc.variables[dst_varname_west].long_name = long_name_west nc.variables[dst_varname_west].units = units @@ -162,24 +162,24 @@ def remap_bdry(src_varname, src_file, src_grd, dst_grd, dst_file, dmax=0, cdepth #nc.variables[dst_varname_west]._FillValue = spval # remapping - print 'remapping', dst_varname, 'from', src_grd.name, \ - 'to', dst_grd.name + print('remapping', dst_varname, 'from', src_grd.name, \ + 'to', dst_grd.name) if ndim == 3: # flood the grid - print 'flood the grid' + print('flood the grid') src_varz = pyroms_toolbox.BGrid_SODA.flood(src_var, src_grd, Bpos=Bpos, spval=spval, \ dmax=dmax, cdepth=cdepth, kk=kk) else: src_varz = src_var # horizontal interpolation using scrip weights - print 'horizontal interpolation using scrip weights' + print('horizontal interpolation using scrip weights') dst_varz = pyroms.remapping.remap(src_varz, wts_file, spval=spval) if ndim == 3: # vertical interpolation from standard z level to sigma - print 'vertical interpolation from standard z level to sigma' + print('vertical interpolation from standard z level to sigma') dst_var_north = pyroms.remapping.z2roms(dst_varz[::-1, Mp-1:Mp, 0:Lp], \ dst_grdz, dst_grd, Cpos=Cpos, spval=spval, \ flood=False, irange=(0,Lp), jrange=(Mp-1,Mp)) @@ -195,7 +195,7 @@ def remap_bdry(src_varname, src_file, src_grd, dst_grd, dst_file, dmax=0, cdepth dst_var_west = dst_varz[:, 0] # write data in destination file - print 'write data in destination file\n' + print('write data in destination file\n') nc.variables['ocean_time'][0] = time nc.variables[dst_varname_north][0] = np.squeeze(dst_var_north) nc.variables[dst_varname_south][0] = np.squeeze(dst_var_south) diff --git a/examples/CCS1_SODA3.3.1/Boundary/remap_bdry_uv.py b/examples/CCS1_SODA3.3.1/Boundary/remap_bdry_uv.py index 309ed8a..c2516cf 100644 --- a/examples/CCS1_SODA3.3.1/Boundary/remap_bdry_uv.py +++ b/examples/CCS1_SODA3.3.1/Boundary/remap_bdry_uv.py @@ -29,11 +29,11 @@ def remap_bdry_uv(src_fileuv, src_grd, dst_grd, dst_fileu, dst_filev, dmax=0, cd Mp, Lp = dst_grd.hgrid.mask_rho.shape # create destination file - print '\nCreating destination file', dst_fileu + print('\nCreating destination file', dst_fileu) if os.path.exists(dst_fileu) is True: os.remove(dst_fileu) pyroms_toolbox.nc_create_roms_file(dst_fileu, dst_grd, nctime) - print 'Creating destination file', dst_filev + print('Creating destination file', dst_filev) if os.path.exists(dst_filev) is True: os.remove(dst_filev) pyroms_toolbox.nc_create_roms_bdry_file(dst_filev, dst_grd, nctime) @@ -49,7 +49,7 @@ def remap_bdry_uv(src_fileuv, src_grd, dst_grd, dst_fileu, dst_filev, dmax=0, cd tmp = cdfuv.variables['time'][:] if len(tmp) > 1: - print 'error : multiple frames in input file' ; exit() + print('error : multiple frames in input file') ; exit() else: time = tmp[0] @@ -76,19 +76,19 @@ def remap_bdry_uv(src_fileuv, src_grd, dst_grd, dst_fileu, dst_filev, dmax=0, cd dst_grdz = pyroms.grid.ROMS_Grid(dst_grd.name+'_Z', dst_grd.hgrid, dst_zcoord) # create variable in destination file - print 'Creating variable u_north' + print('Creating variable u_north') ncu.createVariable('u_north', 'f8', ('ocean_time', 's_rho', 'xi_u'), fill_value=spval) ncu.variables['u_north'].long_name = '3D u-momentum north boundary condition' ncu.variables['u_north'].units = 'meter second-1' ncu.variables['u_north'].field = 'u_north, scalar, series' #ncu.variables['u_north']._FillValue = spval - print 'Creating variable u_south' + print('Creating variable u_south') ncu.createVariable('u_south', 'f8', ('ocean_time', 's_rho', 'xi_u'), fill_value=spval) ncu.variables['u_south'].long_name = '3D u-momentum south boundary condition' ncu.variables['u_south'].units = 'meter second-1' ncu.variables['u_south'].field = 'u_south, scalar, series' #ncu.variables['u_south']._FillValue = spval - print 'Creating variable u_west' + print('Creating variable u_west') ncu.createVariable('u_west', 'f8', ('ocean_time', 's_rho', 'eta_u'), fill_value=spval) ncu.variables['u_west'].long_name = '3D u-momentum west boundary condition' ncu.variables['u_west'].units = 'meter second-1' @@ -96,57 +96,57 @@ def remap_bdry_uv(src_fileuv, src_grd, dst_grd, dst_fileu, dst_filev, dmax=0, cd #ncu.variables['u_west']._FillValue = spval # create variable in destination file - print 'Creating variable ubar_north' + print('Creating variable ubar_north') ncu.createVariable('ubar_north', 'f8', ('ocean_time', 'xi_u'), fill_value=spval) ncu.variables['ubar_north'].long_name = '2D u-momentum north boundary condition' ncu.variables['ubar_north'].units = 'meter second-1' ncu.variables['ubar_north'].field = 'ubar_north, scalar, series' #ncu.variables['ubar_north']._FillValue = spval - print 'Creating variable ubar_south' + print('Creating variable ubar_south') ncu.createVariable('ubar_south', 'f8', ('ocean_time', 'xi_u'), fill_value=spval) ncu.variables['ubar_south'].long_name = '2D u-momentum south boundary condition' ncu.variables['ubar_south'].units = 'meter second-1' ncu.variables['ubar_south'].field = 'ubar_south, scalar, series' #ncu.variables['ubar_south']._FillValue = spval - print 'Creating variable ubar_west' + print('Creating variable ubar_west') ncu.createVariable('ubar_west', 'f8', ('ocean_time', 'eta_u'), fill_value=spval) ncu.variables['ubar_west'].long_name = '2D u-momentum west boundary condition' ncu.variables['ubar_west'].units = 'meter second-1' ncu.variables['ubar_west'].field = 'ubar_east, scalar, series' #ncu.variables['ubar_west']._FillValue = spval - print 'Creating variable v_north' + print('Creating variable v_north') ncv.createVariable('v_north', 'f8', ('ocean_time', 's_rho', 'xi_v'), fill_value=spval) ncv.variables['v_north'].long_name = '3D v-momentum north boundary condition' ncv.variables['v_north'].units = 'meter second-1' ncv.variables['v_north'].field = 'v_north, scalar, series' #ncv.variables['v_north']._FillValue = spval - print 'Creating variable v_south' + print('Creating variable v_south') ncv.createVariable('v_south', 'f8', ('ocean_time', 's_rho', 'xi_v'), fill_value=spval) ncv.variables['v_south'].long_name = '3D v-momentum south boundary condition' ncv.variables['v_south'].units = 'meter second-1' ncv.variables['v_south'].field = 'v_south, scalar, series' #ncv.variables['v_south']._FillValue = spval - print 'Creating variable v_west' + print('Creating variable v_west') ncv.createVariable('v_west', 'f8', ('ocean_time', 's_rho', 'eta_v'), fill_value=spval) ncv.variables['v_west'].long_name = '3D v-momentum west boundary condition' ncv.variables['v_west'].units = 'meter second-1' ncv.variables['v_west'].field = 'v_east, scalar, series' #ncv.variables['v_west']._FillValue = spval - print 'Creating variable vbar_north' + print('Creating variable vbar_north') ncv.createVariable('vbar_north', 'f8', ('ocean_time', 'xi_v'), fill_value=spval) ncv.variables['vbar_north'].long_name = '2D v-momentum north boundary condition' ncv.variables['vbar_north'].units = 'meter second-1' ncv.variables['vbar_north'].field = 'vbar_north, scalar, series' #ncv.variables['vbar_north']._FillValue = spval - print 'Creating variable vbar_south' + print('Creating variable vbar_south') ncv.createVariable('vbar_south', 'f8', ('ocean_time', 'xi_v'), fill_value=spval) ncv.variables['vbar_south'].long_name = '2D v-momentum south boundary condition' ncv.variables['vbar_south'].units = 'meter second-1' ncv.variables['vbar_south'].field = 'vbar_south, scalar, series' #ncv.variables['vbar_south']._FillValue = spval - print 'Creating variable vbar_west' + print('Creating variable vbar_west') ncv.createVariable('vbar_west', 'f8', ('ocean_time', 'eta_v'), fill_value=spval) ncv.variables['vbar_west'].long_name = '2D v-momentum west boundary condition' ncv.variables['vbar_west'].units = 'meter second-1' @@ -156,25 +156,25 @@ def remap_bdry_uv(src_fileuv, src_grd, dst_grd, dst_fileu, dst_filev, dmax=0, cd # remaping - print 'remapping and rotating u and v from', src_grd.name, \ - 'to', dst_grd.name + print('remapping and rotating u and v from', src_grd.name, \ + 'to', dst_grd.name) # flood the grid - print 'flood the grid' + print('flood the grid') src_uz = pyroms_toolbox.BGrid_SODA.flood(src_varu, src_grd, Bpos='uv', \ spval=spval, dmax=dmax, cdepth=cdepth, kk=kk) src_vz = pyroms_toolbox.BGrid_SODA.flood(src_varv, src_grd, Bpos='uv', \ spval=spval, dmax=dmax, cdepth=cdepth, kk=kk) # horizontal interpolation using scrip weights - print 'horizontal interpolation using scrip weights' + print('horizontal interpolation using scrip weights') dst_uz = pyroms.remapping.remap(src_uz, wts_file, \ spval=spval) dst_vz = pyroms.remapping.remap(src_vz, wts_file, \ spval=spval) # vertical interpolation from standard z level to sigma - print 'vertical interpolation from standard z level to sigma' + print('vertical interpolation from standard z level to sigma') dst_u_north = pyroms.remapping.z2roms(dst_uz[::-1, Mp-2:Mp, 0:Lp], \ dst_grdz, dst_grd, Cpos='rho', spval=spval, \ flood=False, irange=(0,Lp), jrange=(Mp-2,Mp)) @@ -278,7 +278,7 @@ def remap_bdry_uv(src_fileuv, src_grd, dst_grd, dst_fileu, dst_filev, dmax=0, cd dst_vbar_west = np.ma.masked_where(dst_grd.hgrid.mask_v[:,0] == 0, dst_vbar_west) # write data in destination file - print 'write data in destination file' + print('write data in destination file') ncu.variables['ocean_time'][0] = time ncu.variables['u_north'][0] = dst_u_north ncu.variables['u_south'][0] = dst_u_south diff --git a/examples/CCS1_SODA3.3.1/Clim/make_clim_file.py b/examples/CCS1_SODA3.3.1/Clim/make_clim_file.py index 0a2157b..cf6e4c0 100644 --- a/examples/CCS1_SODA3.3.1/Clim/make_clim_file.py +++ b/examples/CCS1_SODA3.3.1/Clim/make_clim_file.py @@ -1,7 +1,7 @@ import subprocess import os import sys -import commands +import subprocess import numpy as np #increase the maximum number of open files allowed @@ -24,15 +24,15 @@ filelstyear = [] for ff in filelst: - if ff.find(str(my_year)) > 0: - filelstyear.append(ff) + if ff.find(str(my_year)) > 0: + filelstyear.append(ff) src_grd = pyroms_toolbox.BGrid_GFDL.get_nc_BGrid_GFDL(data_dir + 'grid/SODA3_0.5deg_grid.nc', name='SODA3.3.1', xrange=(400, 500), yrange=(180, 280) ) dst_grd = pyroms.grid.get_ROMS_grid('CCS') for filein in filelstyear: tag=filein.replace('soda3.3.1_monthly_ocean_reg_','').replace('.nc','') - print '\nBuild OBC file for time %s' %filein + print('\nBuild OBC file for time %s' %filein) zeta_dst_file = dst_dir + dst_grd.name + '_clim_zeta_' + tag + '_' + src_grd.name + '.nc' temp_dst_file = dst_dir + dst_grd.name + '_clim_temp_' + tag + '_' + src_grd.name + '.nc' salt_dst_file = dst_dir + dst_grd.name + '_clim_salt_' + tag + '_' + src_grd.name + '.nc' diff --git a/examples/CCS1_SODA3.3.1/Clim/remap.py b/examples/CCS1_SODA3.3.1/Clim/remap.py index 7231c91..59aba44 100644 --- a/examples/CCS1_SODA3.3.1/Clim/remap.py +++ b/examples/CCS1_SODA3.3.1/Clim/remap.py @@ -26,7 +26,7 @@ def remap(src_varname, src_file, src_grd, dst_grd, dst_file, dmax=0, cdepth=0, k nctime.units = 'days since 1900-01-01 00:00:00' # create IC file - print '\nCreating initial condition file', dst_file + print('\nCreating initial condition file', dst_file) if os.path.exists(dst_file) is True: os.remove(dst_file) pyroms_toolbox.nc_create_roms_file(dst_file, dst_grd, nctime) @@ -40,7 +40,7 @@ def remap(src_varname, src_file, src_grd, dst_grd, dst_file, dmax=0, cdepth=0, k tmp = cdf.variables['time'][:] if len(tmp) > 1: - print 'error : multiple frames in input file' ; exit() + print('error : multiple frames in input file') ; exit() else: time = tmp[0] @@ -98,7 +98,7 @@ def remap(src_varname, src_file, src_grd, dst_grd, dst_file, dmax=0, cdepth=0, k units = 'PSU' field = 'salinity, scalar, series' else: - raise ValueError, 'Undefined src_varname' + raise ValueError('Undefined src_varname') if ndim == 3: @@ -110,7 +110,7 @@ def remap(src_varname, src_file, src_grd, dst_grd, dst_file, dmax=0, cdepth=0, k # create variable in file - print 'Creating variable', dst_varname + print('Creating variable', dst_varname) nc.createVariable(dst_varname, 'f8', dimensions, fill_value=spval) nc.variables[dst_varname].long_name = long_name nc.variables[dst_varname].units = units @@ -119,31 +119,31 @@ def remap(src_varname, src_file, src_grd, dst_grd, dst_file, dmax=0, cdepth=0, k # remapping - print 'remapping', dst_varname, 'from', src_grd.name, \ - 'to', dst_grd.name + print('remapping', dst_varname, 'from', src_grd.name, \ + 'to', dst_grd.name) if ndim == 3: # flood the grid - print 'flood the grid' + print('flood the grid') src_varz = pyroms_toolbox.BGrid_SODA.flood(src_var, src_grd, Bpos=Bpos, spval=spval, \ dmax=dmax, cdepth=cdepth, kk=kk) else: src_varz = src_var # horizontal interpolation using scrip weights - print 'horizontal interpolation using scrip weights' + print('horizontal interpolation using scrip weights') dst_varz = pyroms.remapping.remap(src_varz, wts_file, spval=spval) if ndim == 3: # vertical interpolation from standard z level to sigma - print 'vertical interpolation from standard z level to sigma' + print('vertical interpolation from standard z level to sigma') dst_var = pyroms.remapping.z2roms(dst_varz[::-1,:,:], dst_grdz, \ dst_grd, Cpos=Cpos, spval=spval, flood=False) else: dst_var = dst_varz # write data in destination file - print 'write data in destination file\n' + print('write data in destination file\n') nc.variables['ocean_time'][0] = time nc.variables[dst_varname][0] = dst_var diff --git a/examples/CCS1_SODA3.3.1/Clim/remap_uv.py b/examples/CCS1_SODA3.3.1/Clim/remap_uv.py index 14f7a7c..5819eb6 100644 --- a/examples/CCS1_SODA3.3.1/Clim/remap_uv.py +++ b/examples/CCS1_SODA3.3.1/Clim/remap_uv.py @@ -29,11 +29,11 @@ def remap_uv(src_fileuv, src_grd, dst_grd, dst_fileu, dst_filev, dmax=0, cdepth= Mp, Lp = dst_grd.hgrid.mask_rho.shape # create destination file - print '\nCreating destination file', dst_fileu + print('\nCreating destination file', dst_fileu) if os.path.exists(dst_fileu) is True: os.remove(dst_fileu) pyroms_toolbox.nc_create_roms_file(dst_fileu, dst_grd, nctime) - print 'Creating destination file', dst_filev + print('Creating destination file', dst_filev) if os.path.exists(dst_filev) is True: os.remove(dst_filev) pyroms_toolbox.nc_create_roms_file(dst_filev, dst_grd, nctime) @@ -49,7 +49,7 @@ def remap_uv(src_fileuv, src_grd, dst_grd, dst_fileu, dst_filev, dmax=0, cdepth= tmp = cdfuv.variables['time'][:] if len(tmp) > 1: - print 'error : multiple frames in input file' ; exit() + print('error : multiple frames in input file') ; exit() else: time = tmp[0] @@ -76,27 +76,27 @@ def remap_uv(src_fileuv, src_grd, dst_grd, dst_fileu, dst_filev, dmax=0, cdepth= dst_grdz = pyroms.grid.ROMS_Grid(dst_grd.name+'_Z', dst_grd.hgrid, dst_zcoord) # create variable in destination file - print 'Creating variable u' + print('Creating variable u') ncu.createVariable('u', 'f8', ('ocean_time', 's_rho', 'eta_u', 'xi_u'), fill_value=spval) ncu.variables['u'].long_name = '3D u-momentum component' ncu.variables['u'].units = 'meter second-1' ncu.variables['u'].field = 'u-velocity, scalar, series' #ncu.variables['u_north']._FillValue = spval # create variable in destination file - print 'Creating variable ubar' + print('Creating variable ubar') ncu.createVariable('ubar', 'f8', ('ocean_time', 'eta_u', 'xi_u'), fill_value=spval) ncu.variables['ubar'].long_name = '2D u-momentum component' ncu.variables['ubar'].units = 'meter second-1' ncu.variables['ubar'].field = 'ubar-velocity,, scalar, series' #ncu.variables['ubar_north']._FillValue = spval - print 'Creating variable v' + print('Creating variable v') ncv.createVariable('v', 'f8', ('ocean_time', 's_rho', 'eta_v', 'xi_v'), fill_value=spval) ncv.variables['v'].long_name = '3D v-momentum component' ncv.variables['v'].units = 'meter second-1' ncv.variables['v'].field = 'v-velocity, scalar, series' #ncv.variables['v_north']._FillValue = spval - print 'Creating variable vbar' + print('Creating variable vbar') ncv.createVariable('vbar', 'f8', ('ocean_time', 'eta_v', 'xi_v'), fill_value=spval) ncv.variables['vbar'].long_name = '2D v-momentum component' ncv.variables['vbar'].units = 'meter second-1' @@ -105,25 +105,25 @@ def remap_uv(src_fileuv, src_grd, dst_grd, dst_fileu, dst_filev, dmax=0, cdepth= # remaping - print 'remapping and rotating u and v from', src_grd.name, \ - 'to', dst_grd.name + print('remapping and rotating u and v from', src_grd.name, \ + 'to', dst_grd.name) # flood the grid - print 'flood the grid' + print('flood the grid') src_uz = pyroms_toolbox.BGrid_SODA.flood(src_varu, src_grd, Bpos='uv', \ spval=spval, dmax=dmax, cdepth=cdepth, kk=kk) src_vz = pyroms_toolbox.BGrid_SODA.flood(src_varv, src_grd, Bpos='uv', \ spval=spval, dmax=dmax, cdepth=cdepth, kk=kk) # horizontal interpolation using scrip weights - print 'horizontal interpolation using scrip weights' + print('horizontal interpolation using scrip weights') dst_uz = pyroms.remapping.remap(src_uz, wts_file, \ spval=spval) dst_vz = pyroms.remapping.remap(src_vz, wts_file, \ spval=spval) # vertical interpolation from standard z level to sigma - print 'vertical interpolation from standard z level to sigma' + print('vertical interpolation from standard z level to sigma') dst_u = pyroms.remapping.z2roms(dst_uz[::-1,:,:], dst_grdz, \ dst_grd, Cpos='rho', spval=spval, flood=False) dst_v = pyroms.remapping.z2roms(dst_vz[::-1,:,:], dst_grdz, \ @@ -175,7 +175,7 @@ def remap_uv(src_fileuv, src_grd, dst_grd, dst_fileu, dst_filev, dmax=0, cdepth= dst_vbar[idxv[0], idxv[1]] = spval # write data in destination file - print 'write data in destination file\n' + print('write data in destination file\n') ncu.variables['ocean_time'][0] = time ncu.variables['u'][0] = dst_u ncu.variables['ubar'][0] = dst_ubar diff --git a/examples/CCS1_SODA3.3.1/Initial/make_ic_file.py b/examples/CCS1_SODA3.3.1/Initial/make_ic_file.py index 06821b2..a15568f 100644 --- a/examples/CCS1_SODA3.3.1/Initial/make_ic_file.py +++ b/examples/CCS1_SODA3.3.1/Initial/make_ic_file.py @@ -1,7 +1,7 @@ import subprocess import os import sys -import commands +import subprocess import numpy as np import pyroms import pyroms_toolbox @@ -10,17 +10,17 @@ from remap_uv import remap_uv #date to remap -data_dir = '/Volumes/P1/Data/SODA/SODA_3.3.1/' +data_dir = '/import/archive/u1/uaf/AKWATERS/kshedstrom/SODA/' dst_dir='./' -tag='1980_01_08' +tag='2010_12_26' filein=data_dir + 'soda3.3.1_5dy_ocean_reg_' + tag + '.nc' # load grids -src_grd = pyroms_toolbox.BGrid_GFDL.get_nc_BGrid_GFDL(data_dir + 'grid/SODA3_0.5deg_grid.nc', name='SODA3.3.1', xrange=(400, 500), yrange=(180, 280)) -dst_grd = pyroms.grid.get_ROMS_grid('CCS') +src_grd = pyroms_toolbox.BGrid_GFDL.get_nc_BGrid_GFDL(data_dir + 'SODA3_0.5deg_grid.nc', name='SODA3.3.1', xrange=(400, 500), yrange=(180, 280)) +dst_grd = pyroms.grid.get_ROMS_grid('CCS_10') -print '\nBuild IC file from %s' %filein +print('\nBuild IC file from %s' %filein) zeta_dst_file = dst_dir + dst_grd.name + '_ic_zeta_' + tag + '_' + src_grd.name + '.nc' temp_dst_file = dst_dir + dst_grd.name + '_ic_temp_' + tag + '_' + src_grd.name + '.nc' @@ -32,7 +32,7 @@ zeta = remap('ssh', filein, src_grd, dst_grd, zeta_dst_file, dst_dir=dst_dir) # reload grid with zeta (more accurate) -dst_grd = pyroms.grid.get_ROMS_grid('CCS', zeta=zeta) +dst_grd = pyroms.grid.get_ROMS_grid('CCS_10', zeta=zeta) # regrid temp, salt and velocities remap('temp',filein, src_grd, dst_grd, temp_dst_file, dst_dir=dst_dir) diff --git a/examples/CCS1_SODA3.3.1/Initial/make_remap_weights_file.py b/examples/CCS1_SODA3.3.1/Initial/make_remap_weights_file.py index 11043ba..df8780b 100644 --- a/examples/CCS1_SODA3.3.1/Initial/make_remap_weights_file.py +++ b/examples/CCS1_SODA3.3.1/Initial/make_remap_weights_file.py @@ -2,8 +2,8 @@ import pyroms_toolbox # load the grid -srcgrd = pyroms_toolbox.BGrid_GFDL.get_nc_BGrid_GFDL('/Volumes/P1/Data/SODA/SODA_3.3.1/grid/SODA3_0.5deg_grid.nc', name='SODA3.3.1', xrange=(400, 500), yrange=(180, 280)) -dstgrd = pyroms.grid.get_ROMS_grid('CCS') +srcgrd = pyroms_toolbox.BGrid_GFDL.get_nc_BGrid_GFDL('/import/archive/u1/uaf/AKWATERS/kshedstrom/SODA/SODA3_0.5deg_grid.nc', name='SODA3.3.1', xrange=(400, 500), yrange=(180, 280)) +dstgrd = pyroms.grid.get_ROMS_grid('CCS_10') # make remap grid file for scrip pyroms_toolbox.BGrid_GFDL.make_remap_grid_file(srcgrd, Bpos='t') diff --git a/examples/CCS1_SODA3.3.1/Initial/remap.py b/examples/CCS1_SODA3.3.1/Initial/remap.py index 7231c91..59aba44 100644 --- a/examples/CCS1_SODA3.3.1/Initial/remap.py +++ b/examples/CCS1_SODA3.3.1/Initial/remap.py @@ -26,7 +26,7 @@ def remap(src_varname, src_file, src_grd, dst_grd, dst_file, dmax=0, cdepth=0, k nctime.units = 'days since 1900-01-01 00:00:00' # create IC file - print '\nCreating initial condition file', dst_file + print('\nCreating initial condition file', dst_file) if os.path.exists(dst_file) is True: os.remove(dst_file) pyroms_toolbox.nc_create_roms_file(dst_file, dst_grd, nctime) @@ -40,7 +40,7 @@ def remap(src_varname, src_file, src_grd, dst_grd, dst_file, dmax=0, cdepth=0, k tmp = cdf.variables['time'][:] if len(tmp) > 1: - print 'error : multiple frames in input file' ; exit() + print('error : multiple frames in input file') ; exit() else: time = tmp[0] @@ -98,7 +98,7 @@ def remap(src_varname, src_file, src_grd, dst_grd, dst_file, dmax=0, cdepth=0, k units = 'PSU' field = 'salinity, scalar, series' else: - raise ValueError, 'Undefined src_varname' + raise ValueError('Undefined src_varname') if ndim == 3: @@ -110,7 +110,7 @@ def remap(src_varname, src_file, src_grd, dst_grd, dst_file, dmax=0, cdepth=0, k # create variable in file - print 'Creating variable', dst_varname + print('Creating variable', dst_varname) nc.createVariable(dst_varname, 'f8', dimensions, fill_value=spval) nc.variables[dst_varname].long_name = long_name nc.variables[dst_varname].units = units @@ -119,31 +119,31 @@ def remap(src_varname, src_file, src_grd, dst_grd, dst_file, dmax=0, cdepth=0, k # remapping - print 'remapping', dst_varname, 'from', src_grd.name, \ - 'to', dst_grd.name + print('remapping', dst_varname, 'from', src_grd.name, \ + 'to', dst_grd.name) if ndim == 3: # flood the grid - print 'flood the grid' + print('flood the grid') src_varz = pyroms_toolbox.BGrid_SODA.flood(src_var, src_grd, Bpos=Bpos, spval=spval, \ dmax=dmax, cdepth=cdepth, kk=kk) else: src_varz = src_var # horizontal interpolation using scrip weights - print 'horizontal interpolation using scrip weights' + print('horizontal interpolation using scrip weights') dst_varz = pyroms.remapping.remap(src_varz, wts_file, spval=spval) if ndim == 3: # vertical interpolation from standard z level to sigma - print 'vertical interpolation from standard z level to sigma' + print('vertical interpolation from standard z level to sigma') dst_var = pyroms.remapping.z2roms(dst_varz[::-1,:,:], dst_grdz, \ dst_grd, Cpos=Cpos, spval=spval, flood=False) else: dst_var = dst_varz # write data in destination file - print 'write data in destination file\n' + print('write data in destination file\n') nc.variables['ocean_time'][0] = time nc.variables[dst_varname][0] = dst_var diff --git a/examples/CCS1_SODA3.3.1/Initial/remap_uv.py b/examples/CCS1_SODA3.3.1/Initial/remap_uv.py index 14f7a7c..5819eb6 100644 --- a/examples/CCS1_SODA3.3.1/Initial/remap_uv.py +++ b/examples/CCS1_SODA3.3.1/Initial/remap_uv.py @@ -29,11 +29,11 @@ def remap_uv(src_fileuv, src_grd, dst_grd, dst_fileu, dst_filev, dmax=0, cdepth= Mp, Lp = dst_grd.hgrid.mask_rho.shape # create destination file - print '\nCreating destination file', dst_fileu + print('\nCreating destination file', dst_fileu) if os.path.exists(dst_fileu) is True: os.remove(dst_fileu) pyroms_toolbox.nc_create_roms_file(dst_fileu, dst_grd, nctime) - print 'Creating destination file', dst_filev + print('Creating destination file', dst_filev) if os.path.exists(dst_filev) is True: os.remove(dst_filev) pyroms_toolbox.nc_create_roms_file(dst_filev, dst_grd, nctime) @@ -49,7 +49,7 @@ def remap_uv(src_fileuv, src_grd, dst_grd, dst_fileu, dst_filev, dmax=0, cdepth= tmp = cdfuv.variables['time'][:] if len(tmp) > 1: - print 'error : multiple frames in input file' ; exit() + print('error : multiple frames in input file') ; exit() else: time = tmp[0] @@ -76,27 +76,27 @@ def remap_uv(src_fileuv, src_grd, dst_grd, dst_fileu, dst_filev, dmax=0, cdepth= dst_grdz = pyroms.grid.ROMS_Grid(dst_grd.name+'_Z', dst_grd.hgrid, dst_zcoord) # create variable in destination file - print 'Creating variable u' + print('Creating variable u') ncu.createVariable('u', 'f8', ('ocean_time', 's_rho', 'eta_u', 'xi_u'), fill_value=spval) ncu.variables['u'].long_name = '3D u-momentum component' ncu.variables['u'].units = 'meter second-1' ncu.variables['u'].field = 'u-velocity, scalar, series' #ncu.variables['u_north']._FillValue = spval # create variable in destination file - print 'Creating variable ubar' + print('Creating variable ubar') ncu.createVariable('ubar', 'f8', ('ocean_time', 'eta_u', 'xi_u'), fill_value=spval) ncu.variables['ubar'].long_name = '2D u-momentum component' ncu.variables['ubar'].units = 'meter second-1' ncu.variables['ubar'].field = 'ubar-velocity,, scalar, series' #ncu.variables['ubar_north']._FillValue = spval - print 'Creating variable v' + print('Creating variable v') ncv.createVariable('v', 'f8', ('ocean_time', 's_rho', 'eta_v', 'xi_v'), fill_value=spval) ncv.variables['v'].long_name = '3D v-momentum component' ncv.variables['v'].units = 'meter second-1' ncv.variables['v'].field = 'v-velocity, scalar, series' #ncv.variables['v_north']._FillValue = spval - print 'Creating variable vbar' + print('Creating variable vbar') ncv.createVariable('vbar', 'f8', ('ocean_time', 'eta_v', 'xi_v'), fill_value=spval) ncv.variables['vbar'].long_name = '2D v-momentum component' ncv.variables['vbar'].units = 'meter second-1' @@ -105,25 +105,25 @@ def remap_uv(src_fileuv, src_grd, dst_grd, dst_fileu, dst_filev, dmax=0, cdepth= # remaping - print 'remapping and rotating u and v from', src_grd.name, \ - 'to', dst_grd.name + print('remapping and rotating u and v from', src_grd.name, \ + 'to', dst_grd.name) # flood the grid - print 'flood the grid' + print('flood the grid') src_uz = pyroms_toolbox.BGrid_SODA.flood(src_varu, src_grd, Bpos='uv', \ spval=spval, dmax=dmax, cdepth=cdepth, kk=kk) src_vz = pyroms_toolbox.BGrid_SODA.flood(src_varv, src_grd, Bpos='uv', \ spval=spval, dmax=dmax, cdepth=cdepth, kk=kk) # horizontal interpolation using scrip weights - print 'horizontal interpolation using scrip weights' + print('horizontal interpolation using scrip weights') dst_uz = pyroms.remapping.remap(src_uz, wts_file, \ spval=spval) dst_vz = pyroms.remapping.remap(src_vz, wts_file, \ spval=spval) # vertical interpolation from standard z level to sigma - print 'vertical interpolation from standard z level to sigma' + print('vertical interpolation from standard z level to sigma') dst_u = pyroms.remapping.z2roms(dst_uz[::-1,:,:], dst_grdz, \ dst_grd, Cpos='rho', spval=spval, flood=False) dst_v = pyroms.remapping.z2roms(dst_vz[::-1,:,:], dst_grdz, \ @@ -175,7 +175,7 @@ def remap_uv(src_fileuv, src_grd, dst_grd, dst_fileu, dst_filev, dmax=0, cdepth= dst_vbar[idxv[0], idxv[1]] = spval # write data in destination file - print 'write data in destination file\n' + print('write data in destination file\n') ncu.variables['ocean_time'][0] = time ncu.variables['u'][0] = dst_u ncu.variables['ubar'][0] = dst_ubar diff --git a/examples/CCS1_SODA3.3.1/make_SODA3_grid.py b/examples/CCS1_SODA3.3.1/make_SODA3_grid.py index d5bc98c..e4ee835 100644 --- a/examples/CCS1_SODA3.3.1/make_SODA3_grid.py +++ b/examples/CCS1_SODA3.3.1/make_SODA3_grid.py @@ -56,26 +56,26 @@ kmt = np.empty((ny,nx)) ht = np.empty((ny,nx)) for ky in np.arange(ny): - for kx in np.arange(nx): - indlist = np.where(mask_t[:,ky,kx] == 1)[0] - if len(indlist) == 0: - kmt[ky,kx] = 0 - ht[ky,kx] = 0 - else: - kmt[ky,kx] = indlist.max() + 1 - ht[ky,kx] = st_edges_ocean[indlist.max() + 1] + for kx in np.arange(nx): + indlist = np.where(mask_t[:,ky,kx] == 1)[0] + if len(indlist) == 0: + kmt[ky,kx] = 0 + ht[ky,kx] = 0 + else: + kmt[ky,kx] = indlist.max() + 1 + ht[ky,kx] = st_edges_ocean[indlist.max() + 1] kmt[np.where(kmt == 0)] = spval ht[np.where(ht == 0)] = spval kmu = np.empty((ny,nx)) for ky in np.arange(ny): - for kx in np.arange(nx): - indlist = np.where(mask_u[:,ky,kx] == 1)[0] - if len(indlist) == 0: - kmu[ky,kx] = 0 - else: - kmu[ky,kx] = indlist.max() + 1 + for kx in np.arange(nx): + indlist = np.where(mask_u[:,ky,kx] == 1)[0] + if len(indlist) == 0: + kmu[ky,kx] = 0 + else: + kmu[ky,kx] = indlist.max() + 1 kmu[np.where(kmu == 0)] = spval diff --git a/examples/MERRA-2/get_MERRA_Pair_from_nasa_opendap_3hours.py b/examples/MERRA-2/get_MERRA_Pair_from_nasa_opendap_3hours.py index aa66dfa..0cf48c6 100644 --- a/examples/MERRA-2/get_MERRA_Pair_from_nasa_opendap_3hours.py +++ b/examples/MERRA-2/get_MERRA_Pair_from_nasa_opendap_3hours.py @@ -6,9 +6,9 @@ import pyroms import pyroms_toolbox -import cookielib +import http.cookiejar import netrc -import urllib2 +import urllib.request, urllib.error, urllib.parse import re import pydap.lib from pydap.exceptions import ClientError @@ -25,26 +25,26 @@ def install_basic_client(uri='', user='', passwd='', use_netrc=True): # Create special opener with support for Cookies - cj = cookielib.CookieJar() + cj = http.cookiejar.CookieJar() # Create the password manager and load with the credentials using - pwMgr = urllib2.HTTPPasswordMgrWithDefaultRealm() + pwMgr = urllib.request.HTTPPasswordMgrWithDefaultRealm() # Get passwords from the .netrc file nless use_netrc is False if use_netrc: logins = netrc.netrc() accounts = logins.hosts # a dist of hosts and tuples - for host, info in accounts.iteritems(): + for host, info in accounts.items(): login, account, password = info # log.debug('Host: %s; login: %s; account: %s; password: %s' % (host, login, account, password)) pwMgr.add_password(None, host, login, password) if uri and user and passwd: pwMgr.add_password(None, uri, user, passwd) - opener = urllib2.build_opener(urllib2.HTTPBasicAuthHandler(pwMgr), urllib2.HTTPCookieProcessor(cj)) + opener = urllib.request.build_opener(urllib.request.HTTPBasicAuthHandler(pwMgr), urllib.request.HTTPCookieProcessor(cj)) opener.addheaders = [('User-agent', pydap.lib.USER_AGENT)] - urllib2.install_opener(opener) + urllib.request.install_opener(opener) def new_request(url): if url[-1] is '&': url = url[0:-1] # log.debug('Opening %s (install_basic_client)' % url) - r = urllib2.urlopen(url) + r = urllib.request.urlopen(url) resp = r.headers.dict resp['status'] = str(r.code) data = r.read() diff --git a/examples/MERRA-2/get_MERRA_Qair_from_nasa_opendap_3hours.py b/examples/MERRA-2/get_MERRA_Qair_from_nasa_opendap_3hours.py index f9d9abe..f44119d 100644 --- a/examples/MERRA-2/get_MERRA_Qair_from_nasa_opendap_3hours.py +++ b/examples/MERRA-2/get_MERRA_Qair_from_nasa_opendap_3hours.py @@ -6,9 +6,9 @@ import pyroms import pyroms_toolbox -import cookielib +import http.cookiejar import netrc -import urllib2 +import urllib.request, urllib.error, urllib.parse import re import pydap.lib from pydap.exceptions import ClientError @@ -25,26 +25,26 @@ def install_basic_client(uri='', user='', passwd='', use_netrc=True): # Create special opener with support for Cookies - cj = cookielib.CookieJar() + cj = http.cookiejar.CookieJar() # Create the password manager and load with the credentials using - pwMgr = urllib2.HTTPPasswordMgrWithDefaultRealm() + pwMgr = urllib.request.HTTPPasswordMgrWithDefaultRealm() # Get passwords from the .netrc file nless use_netrc is False if use_netrc: logins = netrc.netrc() accounts = logins.hosts # a dist of hosts and tuples - for host, info in accounts.iteritems(): + for host, info in accounts.items(): login, account, password = info # log.debug('Host: %s; login: %s; account: %s; password: %s' % (host, login, account, password)) pwMgr.add_password(None, host, login, password) if uri and user and passwd: pwMgr.add_password(None, uri, user, passwd) - opener = urllib2.build_opener(urllib2.HTTPBasicAuthHandler(pwMgr), urllib2.HTTPCookieProcessor(cj)) + opener = urllib.request.build_opener(urllib.request.HTTPBasicAuthHandler(pwMgr), urllib.request.HTTPCookieProcessor(cj)) opener.addheaders = [('User-agent', pydap.lib.USER_AGENT)] - urllib2.install_opener(opener) + urllib.request.install_opener(opener) def new_request(url): if url[-1] is '&': url = url[0:-1] # log.debug('Opening %s (install_basic_client)' % url) - r = urllib2.urlopen(url) + r = urllib.request.urlopen(url) resp = r.headers.dict resp['status'] = str(r.code) data = r.read() diff --git a/examples/MERRA-2/get_MERRA_Tair_from_nasa_opendap_3hours.py b/examples/MERRA-2/get_MERRA_Tair_from_nasa_opendap_3hours.py index fe68e94..c436d22 100644 --- a/examples/MERRA-2/get_MERRA_Tair_from_nasa_opendap_3hours.py +++ b/examples/MERRA-2/get_MERRA_Tair_from_nasa_opendap_3hours.py @@ -6,9 +6,9 @@ import pyroms import pyroms_toolbox -import cookielib +import http.cookiejar import netrc -import urllib2 +import urllib.request, urllib.error, urllib.parse import re import pydap.lib from pydap.exceptions import ClientError @@ -25,26 +25,26 @@ def install_basic_client(uri='', user='', passwd='', use_netrc=True): # Create special opener with support for Cookies - cj = cookielib.CookieJar() + cj = http.cookiejar.CookieJar() # Create the password manager and load with the credentials using - pwMgr = urllib2.HTTPPasswordMgrWithDefaultRealm() + pwMgr = urllib.request.HTTPPasswordMgrWithDefaultRealm() # Get passwords from the .netrc file nless use_netrc is False if use_netrc: logins = netrc.netrc() accounts = logins.hosts # a dist of hosts and tuples - for host, info in accounts.iteritems(): + for host, info in accounts.items(): login, account, password = info # log.debug('Host: %s; login: %s; account: %s; password: %s' % (host, login, account, password)) pwMgr.add_password(None, host, login, password) if uri and user and passwd: pwMgr.add_password(None, uri, user, passwd) - opener = urllib2.build_opener(urllib2.HTTPBasicAuthHandler(pwMgr), urllib2.HTTPCookieProcessor(cj)) + opener = urllib.request.build_opener(urllib.request.HTTPBasicAuthHandler(pwMgr), urllib.request.HTTPCookieProcessor(cj)) opener.addheaders = [('User-agent', pydap.lib.USER_AGENT)] - urllib2.install_opener(opener) + urllib.request.install_opener(opener) def new_request(url): if url[-1] is '&': url = url[0:-1] # log.debug('Opening %s (install_basic_client)' % url) - r = urllib2.urlopen(url) + r = urllib.request.urlopen(url) resp = r.headers.dict resp['status'] = str(r.code) data = r.read() diff --git a/examples/MERRA-2/get_MERRA_Uwind_from_nasa_opendap_3hours.py b/examples/MERRA-2/get_MERRA_Uwind_from_nasa_opendap_3hours.py index 623e4eb..4aebbf2 100644 --- a/examples/MERRA-2/get_MERRA_Uwind_from_nasa_opendap_3hours.py +++ b/examples/MERRA-2/get_MERRA_Uwind_from_nasa_opendap_3hours.py @@ -6,9 +6,9 @@ import pyroms import pyroms_toolbox -import cookielib +import http.cookiejar import netrc -import urllib2 +import urllib.request, urllib.error, urllib.parse import re import pydap.lib from pydap.exceptions import ClientError @@ -25,26 +25,26 @@ def install_basic_client(uri='', user='', passwd='', use_netrc=True): # Create special opener with support for Cookies - cj = cookielib.CookieJar() + cj = http.cookiejar.CookieJar() # Create the password manager and load with the credentials using - pwMgr = urllib2.HTTPPasswordMgrWithDefaultRealm() + pwMgr = urllib.request.HTTPPasswordMgrWithDefaultRealm() # Get passwords from the .netrc file nless use_netrc is False if use_netrc: logins = netrc.netrc() accounts = logins.hosts # a dist of hosts and tuples - for host, info in accounts.iteritems(): + for host, info in accounts.items(): login, account, password = info # log.debug('Host: %s; login: %s; account: %s; password: %s' % (host, login, account, password)) pwMgr.add_password(None, host, login, password) if uri and user and passwd: pwMgr.add_password(None, uri, user, passwd) - opener = urllib2.build_opener(urllib2.HTTPBasicAuthHandler(pwMgr), urllib2.HTTPCookieProcessor(cj)) + opener = urllib.request.build_opener(urllib.request.HTTPBasicAuthHandler(pwMgr), urllib.request.HTTPCookieProcessor(cj)) opener.addheaders = [('User-agent', pydap.lib.USER_AGENT)] - urllib2.install_opener(opener) + urllib.request.install_opener(opener) def new_request(url): if url[-1] is '&': url = url[0:-1] # log.debug('Opening %s (install_basic_client)' % url) - r = urllib2.urlopen(url) + r = urllib.request.urlopen(url) resp = r.headers.dict resp['status'] = str(r.code) data = r.read() diff --git a/examples/MERRA-2/get_MERRA_Vwind_from_nasa_opendap_3hours.py b/examples/MERRA-2/get_MERRA_Vwind_from_nasa_opendap_3hours.py index 5d46c48..bcde044 100644 --- a/examples/MERRA-2/get_MERRA_Vwind_from_nasa_opendap_3hours.py +++ b/examples/MERRA-2/get_MERRA_Vwind_from_nasa_opendap_3hours.py @@ -6,9 +6,9 @@ import pyroms import pyroms_toolbox -import cookielib +import http.cookiejar import netrc -import urllib2 +import urllib.request, urllib.error, urllib.parse import re import pydap.lib from pydap.exceptions import ClientError @@ -25,26 +25,26 @@ def install_basic_client(uri='', user='', passwd='', use_netrc=True): # Create special opener with support for Cookies - cj = cookielib.CookieJar() + cj = http.cookiejar.CookieJar() # Create the password manager and load with the credentials using - pwMgr = urllib2.HTTPPasswordMgrWithDefaultRealm() + pwMgr = urllib.request.HTTPPasswordMgrWithDefaultRealm() # Get passwords from the .netrc file nless use_netrc is False if use_netrc: logins = netrc.netrc() accounts = logins.hosts # a dist of hosts and tuples - for host, info in accounts.iteritems(): + for host, info in accounts.items(): login, account, password = info # log.debug('Host: %s; login: %s; account: %s; password: %s' % (host, login, account, password)) pwMgr.add_password(None, host, login, password) if uri and user and passwd: pwMgr.add_password(None, uri, user, passwd) - opener = urllib2.build_opener(urllib2.HTTPBasicAuthHandler(pwMgr), urllib2.HTTPCookieProcessor(cj)) + opener = urllib.request.build_opener(urllib.request.HTTPBasicAuthHandler(pwMgr), urllib.request.HTTPCookieProcessor(cj)) opener.addheaders = [('User-agent', pydap.lib.USER_AGENT)] - urllib2.install_opener(opener) + urllib.request.install_opener(opener) def new_request(url): if url[-1] is '&': url = url[0:-1] # log.debug('Opening %s (install_basic_client)' % url) - r = urllib2.urlopen(url) + r = urllib.request.urlopen(url) resp = r.headers.dict resp['status'] = str(r.code) data = r.read() diff --git a/examples/MERRA-2/get_MERRA_albedo_from_nasa_opendap_daily.py b/examples/MERRA-2/get_MERRA_albedo_from_nasa_opendap_daily.py index e9cb33e..cebfccf 100644 --- a/examples/MERRA-2/get_MERRA_albedo_from_nasa_opendap_daily.py +++ b/examples/MERRA-2/get_MERRA_albedo_from_nasa_opendap_daily.py @@ -6,9 +6,9 @@ import pyroms import pyroms_toolbox -import cookielib +import http.cookiejar import netrc -import urllib2 +import urllib.request, urllib.error, urllib.parse import re import pydap.lib from pydap.exceptions import ClientError @@ -25,26 +25,26 @@ def install_basic_client(uri='', user='', passwd='', use_netrc=True): # Create special opener with support for Cookies - cj = cookielib.CookieJar() + cj = http.cookiejar.CookieJar() # Create the password manager and load with the credentials using - pwMgr = urllib2.HTTPPasswordMgrWithDefaultRealm() + pwMgr = urllib.request.HTTPPasswordMgrWithDefaultRealm() # Get passwords from the .netrc file nless use_netrc is False if use_netrc: logins = netrc.netrc() accounts = logins.hosts # a dist of hosts and tuples - for host, info in accounts.iteritems(): + for host, info in accounts.items(): login, account, password = info # log.debug('Host: %s; login: %s; account: %s; password: %s' % (host, login, account, password)) pwMgr.add_password(None, host, login, password) if uri and user and passwd: pwMgr.add_password(None, uri, user, passwd) - opener = urllib2.build_opener(urllib2.HTTPBasicAuthHandler(pwMgr), urllib2.HTTPCookieProcessor(cj)) + opener = urllib.request.build_opener(urllib.request.HTTPBasicAuthHandler(pwMgr), urllib.request.HTTPCookieProcessor(cj)) opener.addheaders = [('User-agent', pydap.lib.USER_AGENT)] - urllib2.install_opener(opener) + urllib.request.install_opener(opener) def new_request(url): if url[-1] is '&': url = url[0:-1] # log.debug('Opening %s (install_basic_client)' % url) - r = urllib2.urlopen(url) + r = urllib.request.urlopen(url) resp = r.headers.dict resp['status'] = str(r.code) data = r.read() diff --git a/examples/MERRA-2/get_MERRA_cloud_from_nasa_opendap_3hours.py b/examples/MERRA-2/get_MERRA_cloud_from_nasa_opendap_3hours.py index 51c4092..c407e33 100644 --- a/examples/MERRA-2/get_MERRA_cloud_from_nasa_opendap_3hours.py +++ b/examples/MERRA-2/get_MERRA_cloud_from_nasa_opendap_3hours.py @@ -6,9 +6,9 @@ import pyroms import pyroms_toolbox -import cookielib +import http.cookiejar import netrc -import urllib2 +import urllib.request, urllib.error, urllib.parse import re import pydap.lib from pydap.exceptions import ClientError @@ -25,26 +25,26 @@ def install_basic_client(uri='', user='', passwd='', use_netrc=True): # Create special opener with support for Cookies - cj = cookielib.CookieJar() + cj = http.cookiejar.CookieJar() # Create the password manager and load with the credentials using - pwMgr = urllib2.HTTPPasswordMgrWithDefaultRealm() + pwMgr = urllib.request.HTTPPasswordMgrWithDefaultRealm() # Get passwords from the .netrc file nless use_netrc is False if use_netrc: logins = netrc.netrc() accounts = logins.hosts # a dist of hosts and tuples - for host, info in accounts.iteritems(): + for host, info in accounts.items(): login, account, password = info # log.debug('Host: %s; login: %s; account: %s; password: %s' % (host, login, account, password)) pwMgr.add_password(None, host, login, password) if uri and user and passwd: pwMgr.add_password(None, uri, user, passwd) - opener = urllib2.build_opener(urllib2.HTTPBasicAuthHandler(pwMgr), urllib2.HTTPCookieProcessor(cj)) + opener = urllib.request.build_opener(urllib.request.HTTPBasicAuthHandler(pwMgr), urllib.request.HTTPCookieProcessor(cj)) opener.addheaders = [('User-agent', pydap.lib.USER_AGENT)] - urllib2.install_opener(opener) + urllib.request.install_opener(opener) def new_request(url): if url[-1] is '&': url = url[0:-1] # log.debug('Opening %s (install_basic_client)' % url) - r = urllib2.urlopen(url) + r = urllib.request.urlopen(url) resp = r.headers.dict resp['status'] = str(r.code) data = r.read() diff --git a/examples/MERRA-2/get_MERRA_lwrad_down_from_nasa_opendap_3hours.py b/examples/MERRA-2/get_MERRA_lwrad_down_from_nasa_opendap_3hours.py index d859190..ce63195 100644 --- a/examples/MERRA-2/get_MERRA_lwrad_down_from_nasa_opendap_3hours.py +++ b/examples/MERRA-2/get_MERRA_lwrad_down_from_nasa_opendap_3hours.py @@ -6,9 +6,9 @@ import pyroms import pyroms_toolbox -import cookielib +import http.cookiejar import netrc -import urllib2 +import urllib.request, urllib.error, urllib.parse import re import pydap.lib from pydap.exceptions import ClientError @@ -25,26 +25,26 @@ def install_basic_client(uri='', user='', passwd='', use_netrc=True): # Create special opener with support for Cookies - cj = cookielib.CookieJar() + cj = http.cookiejar.CookieJar() # Create the password manager and load with the credentials using - pwMgr = urllib2.HTTPPasswordMgrWithDefaultRealm() + pwMgr = urllib.request.HTTPPasswordMgrWithDefaultRealm() # Get passwords from the .netrc file nless use_netrc is False if use_netrc: logins = netrc.netrc() accounts = logins.hosts # a dist of hosts and tuples - for host, info in accounts.iteritems(): + for host, info in accounts.items(): login, account, password = info # log.debug('Host: %s; login: %s; account: %s; password: %s' % (host, login, account, password)) pwMgr.add_password(None, host, login, password) if uri and user and passwd: pwMgr.add_password(None, uri, user, passwd) - opener = urllib2.build_opener(urllib2.HTTPBasicAuthHandler(pwMgr), urllib2.HTTPCookieProcessor(cj)) + opener = urllib.request.build_opener(urllib.request.HTTPBasicAuthHandler(pwMgr), urllib.request.HTTPCookieProcessor(cj)) opener.addheaders = [('User-agent', pydap.lib.USER_AGENT)] - urllib2.install_opener(opener) + urllib.request.install_opener(opener) def new_request(url): if url[-1] is '&': url = url[0:-1] # log.debug('Opening %s (install_basic_client)' % url) - r = urllib2.urlopen(url) + r = urllib.request.urlopen(url) resp = r.headers.dict resp['status'] = str(r.code) data = r.read() diff --git a/examples/MERRA-2/get_MERRA_rain_from_nasa_opendap_3hours.py b/examples/MERRA-2/get_MERRA_rain_from_nasa_opendap_3hours.py index 49dcb14..4c8ace6 100644 --- a/examples/MERRA-2/get_MERRA_rain_from_nasa_opendap_3hours.py +++ b/examples/MERRA-2/get_MERRA_rain_from_nasa_opendap_3hours.py @@ -6,9 +6,9 @@ import pyroms import pyroms_toolbox -import cookielib +import http.cookiejar import netrc -import urllib2 +import urllib.request, urllib.error, urllib.parse import re import pydap.lib from pydap.exceptions import ClientError @@ -25,26 +25,26 @@ def install_basic_client(uri='', user='', passwd='', use_netrc=True): # Create special opener with support for Cookies - cj = cookielib.CookieJar() + cj = http.cookiejar.CookieJar() # Create the password manager and load with the credentials using - pwMgr = urllib2.HTTPPasswordMgrWithDefaultRealm() + pwMgr = urllib.request.HTTPPasswordMgrWithDefaultRealm() # Get passwords from the .netrc file nless use_netrc is False if use_netrc: logins = netrc.netrc() accounts = logins.hosts # a dist of hosts and tuples - for host, info in accounts.iteritems(): + for host, info in accounts.items(): login, account, password = info # log.debug('Host: %s; login: %s; account: %s; password: %s' % (host, login, account, password)) pwMgr.add_password(None, host, login, password) if uri and user and passwd: pwMgr.add_password(None, uri, user, passwd) - opener = urllib2.build_opener(urllib2.HTTPBasicAuthHandler(pwMgr), urllib2.HTTPCookieProcessor(cj)) + opener = urllib.request.build_opener(urllib.request.HTTPBasicAuthHandler(pwMgr), urllib.request.HTTPCookieProcessor(cj)) opener.addheaders = [('User-agent', pydap.lib.USER_AGENT)] - urllib2.install_opener(opener) + urllib.request.install_opener(opener) def new_request(url): if url[-1] is '&': url = url[0:-1] # log.debug('Opening %s (install_basic_client)' % url) - r = urllib2.urlopen(url) + r = urllib.request.urlopen(url) resp = r.headers.dict resp['status'] = str(r.code) data = r.read() diff --git a/examples/MERRA-2/get_MERRA_snow_from_nasa_opendap_3hours.py b/examples/MERRA-2/get_MERRA_snow_from_nasa_opendap_3hours.py index 6996d5a..cfd9af4 100644 --- a/examples/MERRA-2/get_MERRA_snow_from_nasa_opendap_3hours.py +++ b/examples/MERRA-2/get_MERRA_snow_from_nasa_opendap_3hours.py @@ -6,9 +6,9 @@ import pyroms import pyroms_toolbox -import cookielib +import http.cookiejar import netrc -import urllib2 +import urllib.request, urllib.error, urllib.parse import re import pydap.lib from pydap.exceptions import ClientError @@ -25,26 +25,26 @@ def install_basic_client(uri='', user='', passwd='', use_netrc=True): # Create special opener with support for Cookies - cj = cookielib.CookieJar() + cj = http.cookiejar.CookieJar() # Create the password manager and load with the credentials using - pwMgr = urllib2.HTTPPasswordMgrWithDefaultRealm() + pwMgr = urllib.request.HTTPPasswordMgrWithDefaultRealm() # Get passwords from the .netrc file nless use_netrc is False if use_netrc: logins = netrc.netrc() accounts = logins.hosts # a dist of hosts and tuples - for host, info in accounts.iteritems(): + for host, info in accounts.items(): login, account, password = info # log.debug('Host: %s; login: %s; account: %s; password: %s' % (host, login, account, password)) pwMgr.add_password(None, host, login, password) if uri and user and passwd: pwMgr.add_password(None, uri, user, passwd) - opener = urllib2.build_opener(urllib2.HTTPBasicAuthHandler(pwMgr), urllib2.HTTPCookieProcessor(cj)) + opener = urllib.request.build_opener(urllib.request.HTTPBasicAuthHandler(pwMgr), urllib.request.HTTPCookieProcessor(cj)) opener.addheaders = [('User-agent', pydap.lib.USER_AGENT)] - urllib2.install_opener(opener) + urllib.request.install_opener(opener) def new_request(url): if url[-1] is '&': url = url[0:-1] # log.debug('Opening %s (install_basic_client)' % url) - r = urllib2.urlopen(url) + r = urllib.request.urlopen(url) resp = r.headers.dict resp['status'] = str(r.code) data = r.read() diff --git a/examples/MERRA-2/get_MERRA_swrad_from_nasa_opendap_3hours.py b/examples/MERRA-2/get_MERRA_swrad_from_nasa_opendap_3hours.py index 972edb8..ca2e385 100644 --- a/examples/MERRA-2/get_MERRA_swrad_from_nasa_opendap_3hours.py +++ b/examples/MERRA-2/get_MERRA_swrad_from_nasa_opendap_3hours.py @@ -6,9 +6,9 @@ import pyroms import pyroms_toolbox -import cookielib +import http.cookiejar import netrc -import urllib2 +import urllib.request, urllib.error, urllib.parse import re import pydap.lib from pydap.exceptions import ClientError @@ -25,26 +25,26 @@ def install_basic_client(uri='', user='', passwd='', use_netrc=True): # Create special opener with support for Cookies - cj = cookielib.CookieJar() + cj = http.cookiejar.CookieJar() # Create the password manager and load with the credentials using - pwMgr = urllib2.HTTPPasswordMgrWithDefaultRealm() + pwMgr = urllib.request.HTTPPasswordMgrWithDefaultRealm() # Get passwords from the .netrc file nless use_netrc is False if use_netrc: logins = netrc.netrc() accounts = logins.hosts # a dist of hosts and tuples - for host, info in accounts.iteritems(): + for host, info in accounts.items(): login, account, password = info # log.debug('Host: %s; login: %s; account: %s; password: %s' % (host, login, account, password)) pwMgr.add_password(None, host, login, password) if uri and user and passwd: pwMgr.add_password(None, uri, user, passwd) - opener = urllib2.build_opener(urllib2.HTTPBasicAuthHandler(pwMgr), urllib2.HTTPCookieProcessor(cj)) + opener = urllib.request.build_opener(urllib.request.HTTPBasicAuthHandler(pwMgr), urllib.request.HTTPCookieProcessor(cj)) opener.addheaders = [('User-agent', pydap.lib.USER_AGENT)] - urllib2.install_opener(opener) + urllib.request.install_opener(opener) def new_request(url): if url[-1] is '&': url = url[0:-1] # log.debug('Opening %s (install_basic_client)' % url) - r = urllib2.urlopen(url) + r = urllib.request.urlopen(url) resp = r.headers.dict resp['status'] = str(r.code) data = r.read() diff --git a/examples/NWGOA3/Fetch_Pacific/get_pacific_3d.py b/examples/NWGOA3/Fetch_Pacific/get_pacific_3d.py index a336406..08e94ff 100644 --- a/examples/NWGOA3/Fetch_Pacific/get_pacific_3d.py +++ b/examples/NWGOA3/Fetch_Pacific/get_pacific_3d.py @@ -21,7 +21,7 @@ def create_HYCOM_file(name): global nc - print 'Creating file %s' %name + print('Creating file %s' %name) #create netCDF file nc = netCDF4.Dataset(name, 'w', format='NETCDF3_64BIT') @@ -79,7 +79,7 @@ def create_HYCOM_file(name): nc.variables[outvarname].coordinates = 'ocean_time s_rho lon_rho lat_rho' - print 'Done with header for file %s' %name + print('Done with header for file %s' %name) @@ -118,15 +118,15 @@ def create_HYCOM_file(name): create_HYCOM_file(outfile) day_out = 0 for day in range(rec_start,rec_end): - print 'Processing file for %s, day %d, year %04d' %(invarname, day_out*3, year) + print('Processing file for %s, day %d, year %04d' %(invarname, day_out*3, year)) #get data from server try: var = dataset.variables[invarname][day,:,170:215,195:265] # spval = var.get_fill_value() # dataset.close() - print 'Got %s from server...' %invarname + print('Got %s from server...' %invarname) except: - print 'No file on the server... We skip this day.' + print('No file on the server... We skip this day.') retry_day.append((day,day_out)) continue @@ -139,17 +139,17 @@ def create_HYCOM_file(name): if retry == 'True': if len(retry_day) != 0: - print "Some file have not been downloded... Let's try again" + print("Some file have not been downloded... Let's try again") while len(retry_day) != 0: for (day,day_out) in retry_day: - print 'Retry file for %s, day %03d, year %04d' %(invarname, day_out, year) + print('Retry file for %s, day %03d, year %04d' %(invarname, day_out, year)) #get data from server try: var = dataset.variables[invarname][day,:,170:215,195:265] # spval = var.get_fill_value() - print 'Got %s from server...' %invarname + print('Got %s from server...' %invarname) except: - print 'No file on the server... We skip this day.' + print('No file on the server... We skip this day.') continue #create netCDF file diff --git a/examples/NWGOA3/Fetch_Pacific/get_pacific_u.py b/examples/NWGOA3/Fetch_Pacific/get_pacific_u.py index e35181b..d969dbe 100644 --- a/examples/NWGOA3/Fetch_Pacific/get_pacific_u.py +++ b/examples/NWGOA3/Fetch_Pacific/get_pacific_u.py @@ -13,7 +13,7 @@ def create_HYCOM_file(name): global nc - print 'Creating file %s' %name + print('Creating file %s' %name) #create netCDF file nc = netCDF4.Dataset(name, 'w', format='NETCDF3_64BIT') @@ -72,7 +72,7 @@ def create_HYCOM_file(name): nc.variables[outvarname].coordinates = 'ocean_time s_rho lon_u lat_u' - print 'Done with header for file %s' %name + print('Done with header for file %s' %name) @@ -113,15 +113,15 @@ def create_HYCOM_file(name): create_HYCOM_file(outfile) day_out = 0 for day in range(rec_start,rec_end): - print 'Processing file for %s, day %d, year %04d' %(invarname, day_out*3, year) + print('Processing file for %s, day %d, year %04d' %(invarname, day_out*3, year)) #get data from server try: var = dataset.variables[invarname][day,:,170:215,195:265-1] # spval = var.get_fill_value() # dataset.close() - print 'Got %s from server...' %invarname + print('Got %s from server...' %invarname) except: - print 'No file on the server... We skip this day.' + print('No file on the server... We skip this day.') retry_day.append((day,day_out)) continue @@ -134,17 +134,17 @@ def create_HYCOM_file(name): if retry == 'True': if len(retry_day) != 0: - print "Some file have not been downloded... Let's try again" + print("Some file have not been downloded... Let's try again") while len(retry_day) != 0: for (day,day_out) in retry_day: - print 'Retry file for %s, day %03d, year %04d' %(invarname, day_out, year) + print('Retry file for %s, day %03d, year %04d' %(invarname, day_out, year)) #get data from server try: var = dataset.variables[invarname][day,:,170:215,195:265-1] #spval = var.get_fill_value() - print 'Got %s from server...' %invarname + print('Got %s from server...' %invarname) except: - print 'No file on the server... We skip this day.' + print('No file on the server... We skip this day.') continue #create netCDF file diff --git a/examples/NWGOA3/Fetch_Pacific/get_pacific_v.py b/examples/NWGOA3/Fetch_Pacific/get_pacific_v.py index 0d892c8..55da7d8 100644 --- a/examples/NWGOA3/Fetch_Pacific/get_pacific_v.py +++ b/examples/NWGOA3/Fetch_Pacific/get_pacific_v.py @@ -13,7 +13,7 @@ def create_HYCOM_file(name): global nc - print 'Creating file %s' %name + print('Creating file %s' %name) #create netCDF file nc = netCDF4.Dataset(name, 'w', format='NETCDF3_64BIT') @@ -72,7 +72,7 @@ def create_HYCOM_file(name): nc.variables[outvarname].coordinates = 'ocean_time s_rho lon_v lat_v' - print 'Done with header for file %s' %name + print('Done with header for file %s' %name) @@ -113,15 +113,15 @@ def create_HYCOM_file(name): create_HYCOM_file(outfile) day_out = 0 for day in range(rec_start,rec_end): - print 'Processing file for %s, day %d, year %04d' %(invarname, day_out*3, year) + print('Processing file for %s, day %d, year %04d' %(invarname, day_out*3, year)) #get data from server try: var = dataset.variables[invarname][day,:,170:215-1,195:265] # spval = var.get_fill_value() # dataset.close() - print 'Got %s from server...' %invarname + print('Got %s from server...' %invarname) except: - print 'No file on the server... We skip this day.' + print('No file on the server... We skip this day.') retry_day.append((day,day_out)) continue @@ -134,17 +134,17 @@ def create_HYCOM_file(name): if retry == 'True': if len(retry_day) != 0: - print "Some file have not been downloded... Let's try again" + print("Some file have not been downloded... Let's try again") while len(retry_day) != 0: for (day,day_out) in retry_day: - print 'Retry file for %s, day %03d, year %04d' %(invarname, day_out, year) + print('Retry file for %s, day %03d, year %04d' %(invarname, day_out, year)) #get data from server try: var = dataset.variables[invarname][day,:,170:215-1,195:265] # spval = var.get_fill_value() - print 'Got %s from server...' %invarname + print('Got %s from server...' %invarname) except: - print 'No file on the server... We skip this day.' + print('No file on the server... We skip this day.') continue #create netCDF file diff --git a/examples/NWGOA3/Fetch_Pacific/get_pacific_zeta.py b/examples/NWGOA3/Fetch_Pacific/get_pacific_zeta.py index 4a3536c..34a65fa 100644 --- a/examples/NWGOA3/Fetch_Pacific/get_pacific_zeta.py +++ b/examples/NWGOA3/Fetch_Pacific/get_pacific_zeta.py @@ -13,7 +13,7 @@ def create_HYCOM_file(name): global nc - print 'Creating file %s' %name + print('Creating file %s' %name) #create netCDF file nc = netCDF4.Dataset(name, 'w', format='NETCDF3_64BIT') @@ -71,7 +71,7 @@ def create_HYCOM_file(name): nc.variables[outvarname].coordinates = 'ocean_time lon_rho lat_rho' - print 'Done with header for file %s' %name + print('Done with header for file %s' %name) @@ -113,15 +113,15 @@ def create_HYCOM_file(name): create_HYCOM_file(outfile) day_out = 0 for day in range(rec_start,rec_end): - print 'Processing file for %s, day %d, year %04d' %(invarname, day_out*3, year) + print('Processing file for %s, day %d, year %04d' %(invarname, day_out*3, year)) #get data from server try: var = dataset.variables[invarname][day,170:215,195:265] # spval = var.get_fill_value() # dataset.close() - print 'Got %s from server...' %invarname + print('Got %s from server...' %invarname) except: - print 'No file on the server... We skip this day.' + print('No file on the server... We skip this day.') retry_day.append((day,day_out)) continue @@ -134,17 +134,17 @@ def create_HYCOM_file(name): if retry == 'True': if len(retry_day) != 0: - print "Some file have not been downloded... Let's try again" + print("Some file have not been downloded... Let's try again") while len(retry_day) != 0: for (day,day_out) in retry_day: - print 'Retry file for %s, day %03d, year %04d' %(invarname, day_out, year) + print('Retry file for %s, day %03d, year %04d' %(invarname, day_out, year)) #get data from server try: var = dataset.variables[invarname][day,170:215,195:265] # spval = var.get_fill_value() - print 'Got %s from server...' %invarname + print('Got %s from server...' %invarname) except: - print 'No file on the server... We skip this day.' + print('No file on the server... We skip this day.') continue #create netCDF file diff --git a/examples/NWGOA3/Fetch_Pacific/make_months.py b/examples/NWGOA3/Fetch_Pacific/make_months.py index 0b67eae..47d1f7d 100644 --- a/examples/NWGOA3/Fetch_Pacific/make_months.py +++ b/examples/NWGOA3/Fetch_Pacific/make_months.py @@ -4,7 +4,7 @@ import subprocess import os import sys -import commands +import subprocess import numpy as np from multiprocessing import Pool @@ -19,7 +19,7 @@ dst_dir='./months/' -lst = commands.getoutput('ls ' + data_dir + 'Pacific_*' + year + '.nc') +lst = subprocess.getoutput('ls ' + data_dir + 'Pacific_*' + year + '.nc') lst_file = lst.split() #print 'Make monthly average files from the following file list:' @@ -33,46 +33,46 @@ def do_file(file): # print 'Output filename ', outfile, index command = ('ncra', '-d', 'ocean_time,0,9', file, outfile+'.01') - print 'ncra -d ocean_time,0,9', file, outfile+'.01' + print('ncra -d ocean_time,0,9', file, outfile+'.01') # subprocess.check_call(command) command = ('ncra', '-d', 'ocean_time,10,19', file, outfile+'.02') - print 'ncra -d ocean_time,10,19', file, outfile+'.02' + print('ncra -d ocean_time,10,19', file, outfile+'.02') # subprocess.check_call(command) command = ('ncra', '-d', 'ocean_time,20,29', file, outfile+'.03') - print 'ncra -d ocean_time,20,29', file, outfile+'.03' + print('ncra -d ocean_time,20,29', file, outfile+'.03') # subprocess.check_call(command) command = ('ncra', '-d', 'ocean_time,30,39', file, outfile+'.04') - print 'ncra -d ocean_time,30,39', file, outfile+'.04' + print('ncra -d ocean_time,30,39', file, outfile+'.04') # subprocess.check_call(command) command = ('ncra', '-d', 'ocean_time,40,49', file, outfile+'.05') - print 'ncra -d ocean_time,40,49', file, outfile+'.05' + print('ncra -d ocean_time,40,49', file, outfile+'.05') # subprocess.check_call(command) command = ('ncra', '-d', 'ocean_time,50,59', file, outfile+'.06') - print 'ncra -d ocean_time,50,59', file, outfile+'.06' + print('ncra -d ocean_time,50,59', file, outfile+'.06') # subprocess.check_call(command) command = ('ncra', '-d', 'ocean_time,60,69', file, outfile+'.07') - print 'ncra -d ocean_time,60,69', file, outfile+'.07' + print('ncra -d ocean_time,60,69', file, outfile+'.07') # subprocess.check_call(command) command = ('ncra', '-d', 'ocean_time,70,79', file, outfile+'.08') - print 'ncra -d ocean_time,70,79', file, outfile+'.08' + print('ncra -d ocean_time,70,79', file, outfile+'.08') # subprocess.check_call(command) command = ('ncra', '-d', 'ocean_time,80,89', file, outfile+'.09') - print 'ncra -d ocean_time,80,89', file, outfile+'.09' + print('ncra -d ocean_time,80,89', file, outfile+'.09') # subprocess.check_call(command) command = ('ncra', '-d', 'ocean_time,90,99', file, outfile+'.10') - print 'ncra -d ocean_time,90,99', file, outfile+'.10' + print('ncra -d ocean_time,90,99', file, outfile+'.10') # subprocess.check_call(command) command = ('ncra', '-d', 'ocean_time,100,109', file, outfile+'.11') - print 'ncra -d ocean_time,100,109', file, outfile+'.11' + print('ncra -d ocean_time,100,109', file, outfile+'.11') # subprocess.check_call(command) command = ('ncra', '-d', 'ocean_time,110,119', file, outfile+'.12') - print 'ncra -d ocean_time,110,119', file, outfile+'.12' + print('ncra -d ocean_time,110,119', file, outfile+'.12') # subprocess.check_call(command) command = ('ncrcat', outfile+'.*', outfile) - print 'ncrcat', outfile+'.*', outfile + print('ncrcat', outfile+'.*', outfile) # subprocess.check_call(command) command = ('rm', outfile+'.*') - print 'rm', outfile+'.*' + print('rm', outfile+'.*') # subprocess.check_call(command) processes = 1 diff --git a/examples/NWGOA3/make_bry_file.py b/examples/NWGOA3/make_bry_file.py index a6e1187..eff1784 100644 --- a/examples/NWGOA3/make_bry_file.py +++ b/examples/NWGOA3/make_bry_file.py @@ -9,17 +9,17 @@ import pyroms import pyroms_toolbox -import commands +import subprocess irange = None jrange = None def do_file(file): - print 'file is: ' + file + print('file is: ' + file) start = string.find(file,'_') end = string.find(file,'_',start+1) var = file[start+1:end] - print 'var is: ' + var + print('var is: ' + var) if var == 'uv': dst_var = pyroms_toolbox.remapping_bound(['u', 'v'], file,\ wts_file,src_grd,dst_grd,rotate_uv=True,\ @@ -34,7 +34,7 @@ def do_file(file): data_dir = '/archive/u1/uaf/kate/COSINE/data/' #lst = commands.getoutput('ls ' + data_dir + '*_1979.nc') #lst = commands.getoutput('ls ' + data_dir + 'Pacific_s?o[n4]_1979.nc') -lst = commands.getoutput('ls ' + data_dir + 'Pacific_[tuvz]*_1979.nc') +lst = subprocess.getoutput('ls ' + data_dir + 'Pacific_[tuvz]*_1979.nc') lst = lst.split() lst_file = lst_file + lst diff --git a/examples/NWGOA3/make_clm_file.py b/examples/NWGOA3/make_clm_file.py index 5aecad1..bcfe5e7 100644 --- a/examples/NWGOA3/make_clm_file.py +++ b/examples/NWGOA3/make_clm_file.py @@ -9,17 +9,17 @@ import pyroms import pyroms_toolbox -import commands +import subprocess irange = None jrange = None def do_file(file): - print 'file is: ' + file + print('file is: ' + file) start = string.find(file,'_') end = string.find(file,'_',start+1) var = file[start+1:end] - print 'var is: ' + var + print('var is: ' + var) if var == 'uv': dst_var = pyroms_toolbox.remapping(['u', 'v'], file,\ wts_file,src_grd,dst_grd,rotate_uv=True,\ @@ -32,7 +32,7 @@ def do_file(file): # Change src_filename to your directory for the file's containing variable data data_dir = '/archive/u1/uaf/kate/COSINE/months/' -lst = commands.getoutput('ls ' + data_dir + 'Pac*_2008.nc') +lst = subprocess.getoutput('ls ' + data_dir + 'Pac*_2008.nc') #lst = commands.getoutput('ls ' + data_dir + 'Pacific_z*2_1987.nc') #lst = commands.getoutput('ls ' + data_dir + 'Pacific_[tuvz]*_2008.nc') lst = lst.split() diff --git a/examples/NWGOA3/make_ini_file.py b/examples/NWGOA3/make_ini_file.py index 46f323b..7fb8fc8 100644 --- a/examples/NWGOA3/make_ini_file.py +++ b/examples/NWGOA3/make_ini_file.py @@ -7,10 +7,10 @@ #src_varname = ['u', 'v'] src_varname = ['zeta', 'temp', 'salt', 'u', 'v', 'bac', 'c1', 'c2', 'c3', \ 'chl1', 'chl2', 'chl3', 'cldoc', 'csdoc', 'ddc', 'ddca', \ - 'ddn', 'ddsi', 'ldoc', 'ldon', 'nh4', 'no3', 'ox', 'po4', \ + 'ddn', 'ddsi', 'ldoc', 'ldon', 'nh4', 'no3', 'ox', 'po4', \ 's1', 's2', 's3', 'sdoc', 'sdon', 'sio4', 'talk', 'tco2', \ - 'zz1', 'zz2', 'zzc1', 'zzc2'] -print 'Number of variables', len(src_varname) + 'zz1', 'zz2', 'zzc1', 'zzc2'] +print('Number of variables', len(src_varname)) irange = None jrange = None diff --git a/examples/Palau_HYCOM/get_hycom_GLBa0.08_salt_2015.py b/examples/Palau_HYCOM/get_hycom_GLBa0.08_salt_2015.py index 994fe36..1027593 100644 --- a/examples/Palau_HYCOM/get_hycom_GLBa0.08_salt_2015.py +++ b/examples/Palau_HYCOM/get_hycom_GLBa0.08_salt_2015.py @@ -12,7 +12,7 @@ def create_HYCOM_file(name, time, lon, lat, z, var): - print 'Write with file %s' %name + print('Write with file %s' %name) #create netCDF file nc = netCDF4.Dataset(name, 'w', format='NETCDF3_64BIT') @@ -57,7 +57,7 @@ def create_HYCOM_file(name, time, lon, lat, z, var): nc.close() - print 'Done with file %s' %name + print('Done with file %s' %name) @@ -91,7 +91,7 @@ def create_HYCOM_file(name, time, lon, lat, z, var): # daysinyear = 365 daysinyear = 32 for day in range(1,daysinyear+1): - print 'Processing file for %s, day %03d, year %04d' %(invarname, day, year) + print('Processing file for %s, day %03d, year %04d' %(invarname, day, year)) url='http://tds.hycom.org/thredds/dodsC/datasets/GLBa0.08/expt_91.1/2015/salt/archv.%04d_%03d_00_3zs.nc' %(year,day) #get data from server try: @@ -99,9 +99,9 @@ def create_HYCOM_file(name, time, lon, lat, z, var): var = dataset.variables[invarname][0,:,1500-9:1800,600:940] spval = var.get_fill_value() dataset.close() - print 'Got %s from server...' %invarname + print('Got %s from server...' %invarname) except: - print 'No file on the server... We skip this day.' + print('No file on the server... We skip this day.') retry_day.append(day) continue @@ -113,10 +113,10 @@ def create_HYCOM_file(name, time, lon, lat, z, var): if retry == 'True': if len(retry_day) != 0: - print "Some file have not been downloded... Let's try again" + print("Some file have not been downloded... Let's try again") while len(retry_day) != 0: for day in retry_day: - print 'Retry file for %s, day %03d, year %04d' %(invarname, day, year) + print('Retry file for %s, day %03d, year %04d' %(invarname, day, year)) url='http://tds.hycom.org/thredds/dodsC/datasets/GLBa0.08/expt_91.1/2015/salt/archv.%04d_%03d_00_3zs.nc' %(year,day) #get data from server try: @@ -124,9 +124,9 @@ def create_HYCOM_file(name, time, lon, lat, z, var): var = dataset.variables[invarname][0,:,1500-9:1800,600:940] spval = var.get_fill_value() dataset.close() - print 'Got %s from server...' %invarname + print('Got %s from server...' %invarname) except: - print 'No file on the server... We skip this day.' + print('No file on the server... We skip this day.') continue #create netCDF file diff --git a/examples/Palau_HYCOM/get_hycom_GLBa0.08_ssh_2015.py b/examples/Palau_HYCOM/get_hycom_GLBa0.08_ssh_2015.py index 254cb0b..0f1a834 100644 --- a/examples/Palau_HYCOM/get_hycom_GLBa0.08_ssh_2015.py +++ b/examples/Palau_HYCOM/get_hycom_GLBa0.08_ssh_2015.py @@ -48,7 +48,7 @@ def create_HYCOM_file(name, time, lon, lat, var): nc.close() - print 'Done with file %s' %name + print('Done with file %s' %name) @@ -81,7 +81,7 @@ def create_HYCOM_file(name, time, lon, lat, var): # daysinyear = 365 daysinyear = 32 for day in range(1,daysinyear+1): - print 'Processing file for day %03d, year %04d' %(day, year) + print('Processing file for day %03d, year %04d' %(day, year)) url='http://tds.hycom.org/thredds/dodsC/datasets/GLBa0.08/expt_91.1/2015/2d/archv.%04d_%03d_00_2d.nc' %(year,day) #get data from server try: @@ -90,7 +90,7 @@ def create_HYCOM_file(name, time, lon, lat, var): spval = var.get_fill_value() dataset.close() except: - print 'No file on the server... We skip this day.' + print('No file on the server... We skip this day.') retry_day.append(day) continue @@ -102,10 +102,10 @@ def create_HYCOM_file(name, time, lon, lat, var): if retry == 'True': if len(retry_day) != 0: - print "Some files have not been downloded... Let's try again" + print("Some files have not been downloded... Let's try again") while len(retry_day) != 0: for day in retry_day: - print 'Retry file for day %03d, year %04d' %(day, year) + print('Retry file for day %03d, year %04d' %(day, year)) url='http://tds.hycom.org/thredds/dodsC/datasets/GLBa0.08/expt_91.1/2015/2d/archv.%04d_%03d_00_2d.nc' %(year,day) #get data from server try: @@ -114,7 +114,7 @@ def create_HYCOM_file(name, time, lon, lat, var): spval = var.get_fill_value() dataset.close() except: - print 'No file on the server... We skip this day.' + print('No file on the server... We skip this day.') continue #create netCDF file diff --git a/examples/Palau_HYCOM/get_hycom_GLBa0.08_temp_2015.py b/examples/Palau_HYCOM/get_hycom_GLBa0.08_temp_2015.py index deaadc8..ba72307 100644 --- a/examples/Palau_HYCOM/get_hycom_GLBa0.08_temp_2015.py +++ b/examples/Palau_HYCOM/get_hycom_GLBa0.08_temp_2015.py @@ -12,7 +12,7 @@ def create_HYCOM_file(name, time, lon, lat, z, var): - print 'Write with file %s' %name + print('Write with file %s' %name) #create netCDF file nc = netCDF4.Dataset(name, 'w', format='NETCDF3_64BIT') @@ -57,7 +57,7 @@ def create_HYCOM_file(name, time, lon, lat, z, var): nc.close() - print 'Done with file %s' %name + print('Done with file %s' %name) @@ -91,7 +91,7 @@ def create_HYCOM_file(name, time, lon, lat, z, var): # daysinyear = 365 daysinyear = 32 for day in range(1,daysinyear+1): - print 'Processing file for %s, day %03d, year %04d' %(invarname, day, year) + print('Processing file for %s, day %03d, year %04d' %(invarname, day, year)) url='http://tds.hycom.org/thredds/dodsC/datasets/GLBa0.08/expt_91.1/2015/temp/archv.%04d_%03d_00_3zt.nc' %(year,day) #get data from server try: @@ -99,9 +99,9 @@ def create_HYCOM_file(name, time, lon, lat, z, var): var = dataset.variables[invarname][0,:,1500-9:1800,600:940] spval = var.get_fill_value() dataset.close() - print 'Got %s from server...' %invarname + print('Got %s from server...' %invarname) except: - print 'No file on the server... We skip this day.' + print('No file on the server... We skip this day.') retry_day.append(day) continue @@ -113,10 +113,10 @@ def create_HYCOM_file(name, time, lon, lat, z, var): if retry == 'True': if len(retry_day) != 0: - print "Some file have not been downloded... Let's try again" + print("Some file have not been downloded... Let's try again") while len(retry_day) != 0: for day in retry_day: - print 'Retry file for %s, day %03d, year %04d' %(invarname, day, year) + print('Retry file for %s, day %03d, year %04d' %(invarname, day, year)) url='http://tds.hycom.org/thredds/dodsC/datasets/GLBa0.08/expt_91.1/2015/temp/archv.%04d_%03d_00_3zt.nc' %(year,day) #get data from server try: @@ -124,10 +124,10 @@ def create_HYCOM_file(name, time, lon, lat, z, var): var = dataset.variables[invarname][0,:,1500-9:1800,600:940] spval = var.get_fill_value() dataset.close() - print 'Got %s from server...' %invarname + print('Got %s from server...' %invarname) except: - print 'No file on the server... We skip this day.' + print('No file on the server... We skip this day.') continue #create netCDF file diff --git a/examples/Palau_HYCOM/get_hycom_GLBa0.08_u_2015.py b/examples/Palau_HYCOM/get_hycom_GLBa0.08_u_2015.py index 3315ec2..27a9352 100644 --- a/examples/Palau_HYCOM/get_hycom_GLBa0.08_u_2015.py +++ b/examples/Palau_HYCOM/get_hycom_GLBa0.08_u_2015.py @@ -12,7 +12,7 @@ def create_HYCOM_file(name, time, lon, lat, z, var): - print 'Write with file %s' %name + print('Write with file %s' %name) #create netCDF file nc = netCDF4.Dataset(name, 'w', format='NETCDF3_64BIT') @@ -57,7 +57,7 @@ def create_HYCOM_file(name, time, lon, lat, z, var): nc.close() - print 'Done with file %s' %name + print('Done with file %s' %name) @@ -93,7 +93,7 @@ def create_HYCOM_file(name, time, lon, lat, z, var): # daysinyear = 365 daysinyear = 32 for day in range(1,daysinyear+1): - print 'Processing file for %s, day %03d, year %04d' %(invarname, day, year) + print('Processing file for %s, day %03d, year %04d' %(invarname, day, year)) url='http://tds.hycom.org/thredds/dodsC/datasets/GLBa0.08/expt_91.1/2015/uvel/archv.%04d_%03d_00_3zu.nc' %(year,day) #get data from server try: @@ -101,9 +101,9 @@ def create_HYCOM_file(name, time, lon, lat, z, var): var = dataset.variables[invarname][0,:,1500-9:1800,600:940] spval = var.get_fill_value() dataset.close() - print 'Got %s from server...' %invarname + print('Got %s from server...' %invarname) except: - print 'No file on the server... We skip this day.' + print('No file on the server... We skip this day.') retry_day.append(day) continue @@ -115,10 +115,10 @@ def create_HYCOM_file(name, time, lon, lat, z, var): if retry == 'True': if len(retry_day) != 0: - print "Some file have not been downloded... Let's try again" + print("Some file have not been downloded... Let's try again") while len(retry_day) != 0: for day in retry_day: - print 'Retry file for %s, day %03d, year %04d' %(invarname, day, year) + print('Retry file for %s, day %03d, year %04d' %(invarname, day, year)) url='http://tds.hycom.org/thredds/dodsC/datasets/GLBa0.08/expt_91.1/2015/uvel/archv.%04d_%03d_00_3zu.nc' %(year,day) #get data from server try: @@ -126,9 +126,9 @@ def create_HYCOM_file(name, time, lon, lat, z, var): var = dataset.variables[invarname][0,:,1500-9:1800,600:940] spval = var.get_fill_value() dataset.close() - print 'Got %s from server...' %invarname + print('Got %s from server...' %invarname) except: - print 'No file on the server... We skip this day.' + print('No file on the server... We skip this day.') continue #create netCDF file diff --git a/examples/Palau_HYCOM/get_hycom_GLBa0.08_v_2015.py b/examples/Palau_HYCOM/get_hycom_GLBa0.08_v_2015.py index c7acc44..818bee7 100644 --- a/examples/Palau_HYCOM/get_hycom_GLBa0.08_v_2015.py +++ b/examples/Palau_HYCOM/get_hycom_GLBa0.08_v_2015.py @@ -12,7 +12,7 @@ def create_HYCOM_file(name, time, lon, lat, z, var): - print 'Write with file %s' %name + print('Write with file %s' %name) #create netCDF file nc = netCDF4.Dataset(name, 'w', format='NETCDF3_64BIT') @@ -57,7 +57,7 @@ def create_HYCOM_file(name, time, lon, lat, z, var): nc.close() - print 'Done with file %s' %name + print('Done with file %s' %name) @@ -91,7 +91,7 @@ def create_HYCOM_file(name, time, lon, lat, z, var): # daysinyear = 365 daysinyear = 32 for day in range(1,daysinyear+1): - print 'Processing file for %s, day %03d, year %04d' %(invarname, day, year) + print('Processing file for %s, day %03d, year %04d' %(invarname, day, year)) url='http://tds.hycom.org/thredds/dodsC/datasets/GLBa0.08/expt_91.1/2015/vvel/archv.%04d_%03d_00_3zv.nc' %(year,day) #get data from server try: @@ -99,9 +99,9 @@ def create_HYCOM_file(name, time, lon, lat, z, var): var = dataset.variables[invarname][0,:,1500-9:1800,600:940] spval = var.get_fill_value() dataset.close() - print 'Got %s from server...' %invarname + print('Got %s from server...' %invarname) except: - print 'No file on the server... We skip this day.' + print('No file on the server... We skip this day.') retry_day.append(day) continue @@ -113,10 +113,10 @@ def create_HYCOM_file(name, time, lon, lat, z, var): if retry == 'True': if len(retry_day) != 0: - print "Some file have not been downloded... Let's try again" + print("Some file have not been downloded... Let's try again") while len(retry_day) != 0: for day in retry_day: - print 'Retry file for %s, day %03d, year %04d' %(invarname, day, year) + print('Retry file for %s, day %03d, year %04d' %(invarname, day, year)) url='http://tds.hycom.org/thredds/dodsC/datasets/GLBa0.08/expt_91.1/2015/vvel/archv.%04d_%03d_00_3zv.nc' %(year,day) #get data from server try: @@ -124,9 +124,9 @@ def create_HYCOM_file(name, time, lon, lat, z, var): var = dataset.variables[invarname][0,:,1500-9:1800,600:940] spval = var.get_fill_value() dataset.close() - print 'Got %s from server...' %invarname + print('Got %s from server...' %invarname) except: - print 'No file on the server... We skip this day.' + print('No file on the server... We skip this day.') continue #create netCDF file diff --git a/examples/Palau_HYCOM/make_bdry_file.py b/examples/Palau_HYCOM/make_bdry_file.py index c625057..cfefa37 100644 --- a/examples/Palau_HYCOM/make_bdry_file.py +++ b/examples/Palau_HYCOM/make_bdry_file.py @@ -66,9 +66,9 @@ def do_file(file, src_grd, dst_grd): lst = lst.split() lst_file = lst_file + lst -print 'Build OBC file from the following file list:' -print lst_file -print ' ' +print('Build OBC file from the following file list:') +print(lst_file) +print(' ') src_grd_file = data_dir + '../HYCOM_GLBa0.08_PALAU_grid.nc' src_grd = pyroms_toolbox.Grid_HYCOM.get_nc_Grid_HYCOM(src_grd_file) diff --git a/examples/Palau_HYCOM/make_clm_file.py b/examples/Palau_HYCOM/make_clm_file.py index b8e3aea..8009a30 100644 --- a/examples/Palau_HYCOM/make_clm_file.py +++ b/examples/Palau_HYCOM/make_clm_file.py @@ -1,7 +1,7 @@ import subprocess import os import sys -import commands +import subprocess import numpy as np from datetime import datetime import matplotlib @@ -23,13 +23,13 @@ for year in lst_year: year = np.str(year) # lst = commands.getoutput('ls ' + data_dir + 'SODA_2.1.6_' + year + '_0*') - lst = commands.getoutput('ls ' + data_dir + '*' + year + '*') + lst = subprocess.getoutput('ls ' + data_dir + '*' + year + '*') lst = lst.split() lst_file = lst_file + lst -print 'Build CLM file from the following file list:' -print lst_file -print ' ' +print('Build CLM file from the following file list:') +print(lst_file) +print(' ') src_grd = pyroms_toolbox.Grid_HYCOM.get_nc_Grid_HYCOM('/archive/u1/uaf/kate/HYCOM/Svalbard/HYCOM_GLBa0.08_North_grid2.nc') dst_grd = pyroms.grid.get_ROMS_grid('ARCTIC2') @@ -47,26 +47,26 @@ out_file = dst_dir + file.rsplit('/')[-1][:-3] + '_ssh_clim_' + dst_grd.name + '.nc' command = ('ncks', '-a', '-O', out_file, clim_file) - print command + print(command) subprocess.check_call(command) os.remove(out_file) out_file = dst_dir + file.rsplit('/')[-1][:-3] + '_temp_clim_' + dst_grd.name + '.nc' - command = ('ncks', '-a', '-A', out_file, clim_file) - print command + command = ('ncks', '-a', '-A', out_file, clim_file) + print(command) subprocess.check_call(command) os.remove(out_file) out_file = dst_dir + file.rsplit('/')[-1][:-3] + '_salt_clim_' + dst_grd.name + '.nc' - command = ('ncks', '-a', '-A', out_file, clim_file) - print command + command = ('ncks', '-a', '-A', out_file, clim_file) + print(command) subprocess.check_call(command) os.remove(out_file) out_file = dst_dir + file.rsplit('/')[-1][:-3] + '_u_clim_' + dst_grd.name + '.nc' - command = ('ncks', '-a', '-A', out_file, clim_file) - print command + command = ('ncks', '-a', '-A', out_file, clim_file) + print(command) subprocess.check_call(command) os.remove(out_file) out_file = dst_dir + file.rsplit('/')[-1][:-3] + '_v_clim_' + dst_grd.name + '.nc' - command = ('ncks', '-a', '-A', out_file, clim_file) - print command + command = ('ncks', '-a', '-A', out_file, clim_file) + print(command) subprocess.check_call(command) os.remove(out_file) diff --git a/examples/Palau_HYCOM/make_ic_file.py b/examples/Palau_HYCOM/make_ic_file.py index 6da1876..6abc88b 100644 --- a/examples/Palau_HYCOM/make_ic_file.py +++ b/examples/Palau_HYCOM/make_ic_file.py @@ -1,6 +1,6 @@ import subprocess import os -import commands +import subprocess import numpy as np import matplotlib matplotlib.use('Agg') @@ -16,9 +16,9 @@ data_dir = '/archive/u1/uaf/kate/HYCOM/SCS/data/' dst_dir='./' -print 'Build IC file from the following file:' -print file -print ' ' +print('Build IC file from the following file:') +print(file) +print(' ') src_grd_file = data_dir + '../HYCOM_GLBa0.08_PALAU_grid.nc' src_grd = pyroms_toolbox.Grid_HYCOM.get_nc_Grid_HYCOM(src_grd_file) @@ -36,26 +36,26 @@ out_file = dst_dir + file.rsplit('/')[-1][:-3] + '_ssh_ic_' + dst_grd.name + '.nc' command = ('ncks', '-a', '-O', out_file, ic_file) -print command +print(command) subprocess.check_call(command) os.remove(out_file) out_file = dst_dir + file.rsplit('/')[-1][:-3] + '_temp_ic_' + dst_grd.name + '.nc' command = ('ncks', '-a', '-A', out_file, ic_file) -print command +print(command) subprocess.check_call(command) os.remove(out_file) out_file = dst_dir + file.rsplit('/')[-1][:-3] + '_salt_ic_' + dst_grd.name + '.nc' command = ('ncks', '-a', '-A', out_file, ic_file) -print command +print(command) subprocess.check_call(command) os.remove(out_file) out_file = dst_dir + file.rsplit('/')[-1][:-3] + '_u_ic_' + dst_grd.name + '.nc' command = ('ncks', '-a', '-A', out_file, ic_file) -print command +print(command) subprocess.check_call(command) os.remove(out_file) out_file = dst_dir + file.rsplit('/')[-1][:-3] + '_v_ic_' + dst_grd.name + '.nc' command = ('ncks', '-a', '-A', out_file, ic_file) -print command +print(command) subprocess.check_call(command) os.remove(out_file) diff --git a/examples/Palau_HYCOM/remap.py b/examples/Palau_HYCOM/remap.py index 7e410f9..3296416 100644 --- a/examples/Palau_HYCOM/remap.py +++ b/examples/Palau_HYCOM/remap.py @@ -41,7 +41,7 @@ def remap(src_file, src_varname, src_grd, dst_grd, dxy=20, cdepth=0, kk=0, dst_d # create IC file dst_file = src_file.rsplit('/')[-1] dst_file = dst_dir + dst_file[:-3] + '_' + src_varname + '_ic_' + dst_grd.name + '.nc' - print '\nCreating file', dst_file + print('\nCreating file', dst_file) if os.path.exists(dst_file) is True: os.remove(dst_file) pyroms_toolbox.nc_create_roms_file(dst_file, dst_grd, nctime) @@ -95,7 +95,7 @@ def remap(src_file, src_varname, src_grd, dst_grd, dxy=20, cdepth=0, kk=0, dst_d units = 'PSU' field = 'salinity, scalar, series' else: - raise ValueError, 'Undefined src_varname' + raise ValueError('Undefined src_varname') if ndim == 3: @@ -107,7 +107,7 @@ def remap(src_file, src_varname, src_grd, dst_grd, dxy=20, cdepth=0, kk=0, dst_d # create variable in file - print 'Creating variable', dst_varname + print('Creating variable', dst_varname) nc.createVariable(dst_varname, 'f8', dimensions, fill_value=spval) nc.variables[dst_varname].long_name = long_name nc.variables[dst_varname].units = units @@ -115,36 +115,36 @@ def remap(src_file, src_varname, src_grd, dst_grd, dxy=20, cdepth=0, kk=0, dst_d # remapping - print 'remapping', dst_varname, 'from', src_grd.name, \ - 'to', dst_grd.name - print 'time =', time + print('remapping', dst_varname, 'from', src_grd.name, \ + 'to', dst_grd.name) + print('time =', time) if ndim == 3: # flood the grid - print 'flood the grid' + print('flood the grid') src_varz = pyroms_toolbox.Grid_HYCOM.flood_fast(src_var, src_grd, pos=pos, spval=spval, \ dxy=dxy, cdepth=cdepth, kk=kk) else: src_varz = src_var # horizontal interpolation using scrip weights - print 'horizontal interpolation using scrip weights' - print 'about to call remap ' + wts_file - print src_varz.shape + print('horizontal interpolation using scrip weights') + print('about to call remap ' + wts_file) + print(src_varz.shape) dst_varz = pyroms.remapping.remap(src_varz, wts_file, \ spval=spval) if ndim == 3: # vertical interpolation from standard z level to sigma - print 'vertical interpolation from standard z level to sigma' + print('vertical interpolation from standard z level to sigma') dst_var = pyroms.remapping.z2roms(dst_varz[::-1,:,:], dst_grdz, \ dst_grd, Cpos=Cpos, spval=spval, flood=False) else: dst_var = dst_varz # write data in destination file - print 'write data in destination file' + print('write data in destination file') nc.variables['ocean_time'][0] = time nc.variables[dst_varname][0] = dst_var diff --git a/examples/Palau_HYCOM/remap_bdry.py b/examples/Palau_HYCOM/remap_bdry.py index 559860d..a28d554 100644 --- a/examples/Palau_HYCOM/remap_bdry.py +++ b/examples/Palau_HYCOM/remap_bdry.py @@ -18,7 +18,7 @@ class nctime(object): def remap_bdry(src_file, src_varname, src_grd, dst_grd, dxy=20, cdepth=0, kk=2, dst_dir='./'): - print src_file + print(src_file) # get time nctime.long_name = 'time' @@ -27,7 +27,7 @@ def remap_bdry(src_file, src_varname, src_grd, dst_grd, dxy=20, cdepth=0, kk=2, # create boundary file dst_file = src_file.rsplit('/')[-1] dst_file = dst_dir + dst_file[:-3] + '_' + src_varname + '_bdry_' + dst_grd.name + '.nc' - print '\nCreating boundary file', dst_file + print('\nCreating boundary file', dst_file) if os.path.exists(dst_file) is True: os.remove(dst_file) pyroms_toolbox.nc_create_roms_bdry_file(dst_file, dst_grd, nctime) @@ -39,7 +39,7 @@ def remap_bdry(src_file, src_varname, src_grd, dst_grd, dxy=20, cdepth=0, kk=2, cdf = netCDF.Dataset(src_file) src_var = cdf.variables[src_varname] time = cdf.variables['ocean_time'][0] - print time + print(time) #get missing value spval = src_var._FillValue @@ -124,7 +124,7 @@ def remap_bdry(src_file, src_varname, src_grd, dst_grd, dxy=20, cdepth=0, kk=2, field_west = 'salt_west, scalar, series' units = 'PSU' else: - raise ValueError, 'Undefined src_varname' + raise ValueError('Undefined src_varname') if ndim == 3: @@ -136,25 +136,25 @@ def remap_bdry(src_file, src_varname, src_grd, dst_grd, dxy=20, cdepth=0, kk=2, # create variable in boudary file - print 'Creating variable', dst_varname_north + print('Creating variable', dst_varname_north) nc.createVariable(dst_varname_north, 'f8', dimensions_north, fill_value=spval) nc.variables[dst_varname_north].long_name = long_name_north nc.variables[dst_varname_north].units = units nc.variables[dst_varname_north].field = field_north - print 'Creating variable', dst_varname_south + print('Creating variable', dst_varname_south) nc.createVariable(dst_varname_south, 'f8', dimensions_south, fill_value=spval) nc.variables[dst_varname_south].long_name = long_name_south nc.variables[dst_varname_south].units = units nc.variables[dst_varname_south].field = field_south - print 'Creating variable', dst_varname_east + print('Creating variable', dst_varname_east) nc.createVariable(dst_varname_east, 'f8', dimensions_east, fill_value=spval) nc.variables[dst_varname_east].long_name = long_name_east nc.variables[dst_varname_east].units = units nc.variables[dst_varname_east].field = field_east - print 'Creating variable', dst_varname_west + print('Creating variable', dst_varname_west) nc.createVariable(dst_varname_west, 'f8', dimensions_west, fill_value=spval) nc.variables[dst_varname_west].long_name = long_name_west nc.variables[dst_varname_west].units = units @@ -162,26 +162,26 @@ def remap_bdry(src_file, src_varname, src_grd, dst_grd, dxy=20, cdepth=0, kk=2, # remapping - print 'remapping', dst_varname, 'from', src_grd.name, \ - 'to', dst_grd.name - print 'time =', time + print('remapping', dst_varname, 'from', src_grd.name, \ + 'to', dst_grd.name) + print('time =', time) if ndim == 3: # flood the grid - print 'flood the grid' + print('flood the grid') src_varz = pyroms_toolbox.Grid_HYCOM.flood_fast(src_var, src_grd, pos=pos, spval=spval, \ dxy=dxy, cdepth=cdepth, kk=kk) else: src_varz = src_var # horizontal interpolation using scrip weights - print 'horizontal interpolation using scrip weights' + print('horizontal interpolation using scrip weights') dst_varz = pyroms.remapping.remap(src_varz, wts_file, spval=spval) if ndim == 3: # vertical interpolation from standard z level to sigma - print 'vertical interpolation from standard z level to sigma' + print('vertical interpolation from standard z level to sigma') dst_var_north = pyroms.remapping.z2roms(dst_varz[::-1, Mp-1:Mp, :], \ dst_grdz, dst_grd, Cpos=Cpos, spval=spval, \ flood=False, irange=(0,Lp), jrange=(Mp-1,Mp)) @@ -201,7 +201,7 @@ def remap_bdry(src_file, src_varname, src_grd, dst_grd, dxy=20, cdepth=0, kk=2, dst_var_west = dst_varz[:, 0] # write data in destination file - print 'write data in destination file' + print('write data in destination file') nc.variables['ocean_time'][0] = time nc.variables[dst_varname_north][0] = np.squeeze(dst_var_north) nc.variables[dst_varname_south][0] = np.squeeze(dst_var_south) diff --git a/examples/Palau_HYCOM/remap_bdry_uv.py b/examples/Palau_HYCOM/remap_bdry_uv.py index 20379ab..315d73b 100644 --- a/examples/Palau_HYCOM/remap_bdry_uv.py +++ b/examples/Palau_HYCOM/remap_bdry_uv.py @@ -28,12 +28,12 @@ def remap_bdry_uv(src_file, src_grd, dst_grd, dxy=20, cdepth=0, kk=2, dst_dir='. # create destination file dst_file = src_file.rsplit('/')[-1] dst_fileu = dst_dir + dst_file[:-3] + '_u_bdry_' + dst_grd.name + '.nc' - print '\nCreating destination file', dst_fileu + print('\nCreating destination file', dst_fileu) if os.path.exists(dst_fileu) is True: os.remove(dst_fileu) pyroms_toolbox.nc_create_roms_file(dst_fileu, dst_grd, nctime) dst_filev = dst_dir + dst_file[:-3] + '_v_bdry_' + dst_grd.name + '.nc' - print 'Creating destination file', dst_filev + print('Creating destination file', dst_filev) if os.path.exists(dst_filev) is True: os.remove(dst_filev) pyroms_toolbox.nc_create_roms_file(dst_filev, dst_grd, nctime) @@ -64,95 +64,95 @@ def remap_bdry_uv(src_file, src_grd, dst_grd, dxy=20, cdepth=0, kk=2, dst_dir='. dst_grdz = pyroms.grid.ROMS_Grid(dst_grd.name+'_Z', dst_grd.hgrid, dst_zcoord) # create variable in destination file - print 'Creating variable u_north' + print('Creating variable u_north') ncu.createVariable('u_north', 'f8', ('ocean_time', 's_rho', 'xi_u'), fill_value=spval) ncu.variables['u_north'].long_name = '3D u-momentum north boundary condition' ncu.variables['u_north'].units = 'meter second-1' ncu.variables['u_north'].field = 'u_north, scalar, series' - print 'Creating variable u_south' + print('Creating variable u_south') ncu.createVariable('u_south', 'f8', ('ocean_time', 's_rho', 'xi_u'), fill_value=spval) ncu.variables['u_south'].long_name = '3D u-momentum south boundary condition' ncu.variables['u_south'].units = 'meter second-1' ncu.variables['u_south'].field = 'u_south, scalar, series' - print 'Creating variable u_east' + print('Creating variable u_east') ncu.createVariable('u_east', 'f8', ('ocean_time', 's_rho', 'eta_u'), fill_value=spval) ncu.variables['u_east'].long_name = '3D u-momentum east boundary condition' ncu.variables['u_east'].units = 'meter second-1' ncu.variables['u_east'].field = 'u_east, scalar, series' - print 'Creating variable u_west' + print('Creating variable u_west') ncu.createVariable('u_west', 'f8', ('ocean_time', 's_rho', 'eta_u'), fill_value=spval) ncu.variables['u_west'].long_name = '3D u-momentum west boundary condition' ncu.variables['u_west'].units = 'meter second-1' ncu.variables['u_west'].field = 'u_east, scalar, series' # create variable in destination file - print 'Creating variable ubar_north' + print('Creating variable ubar_north') ncu.createVariable('ubar_north', 'f8', ('ocean_time', 'xi_u'), fill_value=spval) ncu.variables['ubar_north'].long_name = '2D u-momentum north boundary condition' ncu.variables['ubar_north'].units = 'meter second-1' ncu.variables['ubar_north'].field = 'ubar_north, scalar, series' - print 'Creating variable ubar_south' + print('Creating variable ubar_south') ncu.createVariable('ubar_south', 'f8', ('ocean_time', 'xi_u'), fill_value=spval) ncu.variables['ubar_south'].long_name = '2D u-momentum south boundary condition' ncu.variables['ubar_south'].units = 'meter second-1' ncu.variables['ubar_south'].field = 'ubar_south, scalar, series' - print 'Creating variable ubar_east' + print('Creating variable ubar_east') ncu.createVariable('ubar_east', 'f8', ('ocean_time', 'eta_u'), fill_value=spval) ncu.variables['ubar_east'].long_name = '2D u-momentum east boundary condition' ncu.variables['ubar_east'].units = 'meter second-1' ncu.variables['ubar_east'].field = 'ubar_east, scalar, series' - print 'Creating variable ubar_west' + print('Creating variable ubar_west') ncu.createVariable('ubar_west', 'f8', ('ocean_time', 'eta_u'), fill_value=spval) ncu.variables['ubar_west'].long_name = '2D u-momentum west boundary condition' ncu.variables['ubar_west'].units = 'meter second-1' ncu.variables['ubar_west'].field = 'ubar_east, scalar, series' - print 'Creating variable v_north' + print('Creating variable v_north') ncv.createVariable('v_north', 'f8', ('ocean_time', 's_rho', 'xi_v'), fill_value=spval) ncv.variables['v_north'].long_name = '3D v-momentum north boundary condition' ncv.variables['v_north'].units = 'meter second-1' ncv.variables['v_north'].field = 'v_north, scalar, series' - print 'Creating variable v_south' + print('Creating variable v_south') ncv.createVariable('v_south', 'f8', ('ocean_time', 's_rho', 'xi_v'), fill_value=spval) ncv.variables['v_south'].long_name = '3D v-momentum south boundary condition' ncv.variables['v_south'].units = 'meter second-1' ncv.variables['v_south'].field = 'v_south, scalar, series' - print 'Creating variable v_east' + print('Creating variable v_east') ncv.createVariable('v_east', 'f8', ('ocean_time', 's_rho', 'eta_v'), fill_value=spval) ncv.variables['v_east'].long_name = '3D v-momentum east boundary condition' ncv.variables['v_east'].units = 'meter second-1' ncv.variables['v_east'].field = 'v_east, scalar, series' - print 'Creating variable v_west' + print('Creating variable v_west') ncv.createVariable('v_west', 'f8', ('ocean_time', 's_rho', 'eta_v'), fill_value=spval) ncv.variables['v_west'].long_name = '3D v-momentum west boundary condition' ncv.variables['v_west'].units = 'meter second-1' ncv.variables['v_west'].field = 'v_east, scalar, series' - print 'Creating variable vbar_north' + print('Creating variable vbar_north') ncv.createVariable('vbar_north', 'f8', ('ocean_time', 'xi_v'), fill_value=spval) ncv.variables['vbar_north'].long_name = '2D v-momentum north boundary condition' ncv.variables['vbar_north'].units = 'meter second-1' ncv.variables['vbar_north'].field = 'vbar_north, scalar, series' - print 'Creating variable vbar_south' + print('Creating variable vbar_south') ncv.createVariable('vbar_south', 'f8', ('ocean_time', 'xi_v'), fill_value=spval) ncv.variables['vbar_south'].long_name = '2D v-momentum south boundary condition' ncv.variables['vbar_south'].units = 'meter second-1' ncv.variables['vbar_south'].field = 'vbar_south, scalar, series' - print 'Creating variable vbar_east' + print('Creating variable vbar_east') ncv.createVariable('vbar_east', 'f8', ('ocean_time', 'eta_v'), fill_value=spval) ncv.variables['vbar_east'].long_name = '2D v-momentum east boundary condition' ncv.variables['vbar_east'].units = 'meter second-1' ncv.variables['vbar_east'].field = 'vbar_east, scalar, series' - print 'Creating variable vbar_west' + print('Creating variable vbar_west') ncv.createVariable('vbar_west', 'f8', ('ocean_time', 'eta_v'), fill_value=spval) ncv.variables['vbar_west'].long_name = '2D v-momentum west boundary condition' ncv.variables['vbar_west'].units = 'meter second-1' @@ -161,27 +161,27 @@ def remap_bdry_uv(src_file, src_grd, dst_grd, dxy=20, cdepth=0, kk=2, dst_dir='. # remaping - print 'remapping and rotating u and v from', src_grd.name, \ - 'to', dst_grd.name - print 'time =', time + print('remapping and rotating u and v from', src_grd.name, \ + 'to', dst_grd.name) + print('time =', time) # flood the grid - print 'flood the grid' + print('flood the grid') src_uz = pyroms_toolbox.Grid_HYCOM.flood_fast(src_varu, src_grd, pos='t', \ spval=spval, dxy=dxy, cdepth=cdepth, kk=kk) src_vz = pyroms_toolbox.Grid_HYCOM.flood_fast(src_varv, src_grd, pos='t', \ spval=spval, dxy=dxy, cdepth=cdepth, kk=kk) # horizontal interpolation using scrip weights - print 'horizontal interpolation using scrip weights' + print('horizontal interpolation using scrip weights') dst_uz = pyroms.remapping.remap(src_uz, wts_file, \ spval=spval) dst_vz = pyroms.remapping.remap(src_vz, wts_file, \ spval=spval) # vertical interpolation from standard z level to sigma - print 'vertical interpolation from standard z level to sigma' + print('vertical interpolation from standard z level to sigma') dst_u_north = pyroms.remapping.z2roms(dst_uz[::-1, Mp-2:Mp, 0:Lp], \ dst_grdz, dst_grd, Cpos='rho', spval=spval, \ flood=False, irange=(0,Lp), jrange=(Mp-2,Mp)) @@ -325,7 +325,7 @@ def remap_bdry_uv(src_file, src_grd, dst_grd, dxy=20, cdepth=0, kk=2, dst_dir='. dst_vbar_west = np.ma.masked_where(dst_grd.hgrid.mask_v[:,0] == 0, dst_vbar_west) # write data in destination file - print 'write data in destination file' + print('write data in destination file') ncu.variables['ocean_time'][0] = time ncu.variables['u_north'][0] = dst_u_north ncu.variables['u_south'][0] = dst_u_south diff --git a/examples/Palau_HYCOM/remap_clm.py b/examples/Palau_HYCOM/remap_clm.py index af74e1d..c6e24c9 100644 --- a/examples/Palau_HYCOM/remap_clm.py +++ b/examples/Palau_HYCOM/remap_clm.py @@ -41,7 +41,7 @@ def remap_clm(src_file, src_varname, src_grd, dst_grd, dxy=20, cdepth=0, kk=0, d # create IC file dst_file = src_file.rsplit('/')[-1] dst_file = dst_dir + dst_file[:-3] + '_' + src_varname + '_clim_' + dst_grd.name + '.nc' - print '\nCreating file', dst_file + print('\nCreating file', dst_file) if os.path.exists(dst_file) is True: os.remove(dst_file) pyroms_toolbox.nc_create_roms_file(dst_file, dst_grd, nctime) @@ -72,7 +72,7 @@ def remap_clm(src_file, src_varname, src_grd, dst_grd, dxy=20, cdepth=0, kk=0, d long_name = 'free-surface' units = 'meter' field = 'free-surface, scalar, series' - vartime = 'ocean_time' + vartime = 'ocean_time' elif src_varname == 'temp': pos = 't' Cpos = 'rho' @@ -84,7 +84,7 @@ def remap_clm(src_file, src_varname, src_grd, dst_grd, dxy=20, cdepth=0, kk=0, d long_name = 'potential temperature' units = 'Celsius' field = 'temperature, scalar, series' - vartime = 'ocean_time' + vartime = 'ocean_time' elif src_varname == 'salt': pos = 't' Cpos = 'rho' @@ -96,9 +96,9 @@ def remap_clm(src_file, src_varname, src_grd, dst_grd, dxy=20, cdepth=0, kk=0, d long_name = 'salinity' units = 'PSU' field = 'salinity, scalar, series' - vartime = 'ocean_time' + vartime = 'ocean_time' else: - raise ValueError, 'Undefined src_varname' + raise ValueError('Undefined src_varname') if ndim == 3: @@ -110,7 +110,7 @@ def remap_clm(src_file, src_varname, src_grd, dst_grd, dxy=20, cdepth=0, kk=0, d # create variable in file - print 'Creating variable', dst_varname + print('Creating variable', dst_varname) nc.createVariable(dst_varname, 'f8', dimensions, fill_value=spval) nc.variables[dst_varname].long_name = long_name nc.variables[dst_varname].units = units @@ -119,34 +119,34 @@ def remap_clm(src_file, src_varname, src_grd, dst_grd, dxy=20, cdepth=0, kk=0, d # remapping - print 'remapping', dst_varname, 'from', src_grd.name, \ - 'to', dst_grd.name - print 'time =', time + print('remapping', dst_varname, 'from', src_grd.name, \ + 'to', dst_grd.name) + print('time =', time) if ndim == 3: # flood the grid - print 'flood the grid' + print('flood the grid') src_varz = pyroms_toolbox.Grid_HYCOM.flood_fast(src_var, src_grd, pos=pos, spval=spval, \ dxy=dxy, cdepth=cdepth, kk=kk) else: src_varz = src_var # horizontal interpolation using scrip weights - print 'horizontal interpolation using scrip weights' + print('horizontal interpolation using scrip weights') dst_varz = pyroms.remapping.remap(src_varz, wts_file, \ spval=spval) if ndim == 3: # vertical interpolation from standard z level to sigma - print 'vertical interpolation from standard z level to sigma' + print('vertical interpolation from standard z level to sigma') dst_var = pyroms.remapping.z2roms(dst_varz[::-1,:,:], dst_grdz, \ dst_grd, Cpos=Cpos, spval=spval, flood=False) else: dst_var = dst_varz # write data in destination file - print 'write data in destination file' + print('write data in destination file') nc.variables['ocean_time'][0] = time nc.variables[dst_varname][0] = dst_var diff --git a/examples/Palau_HYCOM/remap_clm_uv.py b/examples/Palau_HYCOM/remap_clm_uv.py index f8aa116..d3e06ff 100644 --- a/examples/Palau_HYCOM/remap_clm_uv.py +++ b/examples/Palau_HYCOM/remap_clm_uv.py @@ -43,12 +43,12 @@ def remap_clm_uv(src_file, src_grd, dst_grd, dxy=20, cdepth=0, kk=0, dst_dir='./ # create destination file dst_file = src_file.rsplit('/')[-1] dst_fileu = dst_dir + dst_file[:-3] + '_u_clim_' + dst_grd.name + '.nc' - print '\nCreating destination file', dst_fileu + print('\nCreating destination file', dst_fileu) if os.path.exists(dst_fileu) is True: os.remove(dst_fileu) pyroms_toolbox.nc_create_roms_file(dst_fileu, dst_grd, nctime) dst_filev = dst_dir + dst_file[:-3] + '_v_clim_' + dst_grd.name + '.nc' - print 'Creating destination file', dst_filev + print('Creating destination file', dst_filev) if os.path.exists(dst_filev) is True: os.remove(dst_filev) pyroms_toolbox.nc_create_roms_file(dst_filev, dst_grd, nctime) @@ -77,27 +77,27 @@ def remap_clm_uv(src_file, src_grd, dst_grd, dxy=20, cdepth=0, kk=0, dst_dir='./ dst_grdz = pyroms.grid.ROMS_Grid(dst_grd.name+'_Z', dst_grd.hgrid, dst_zcoord) # create variable in destination file - print 'Creating variable u' + print('Creating variable u') ncu.createVariable('u', 'f8', ('ocean_time', 's_rho', 'eta_u', 'xi_u'), fill_value=spval) ncu.variables['u'].long_name = '3D u-momentum component' ncu.variables['u'].units = 'meter second-1' ncu.variables['u'].field = 'u-velocity, scalar, series' ncu.variables['u'].time = 'ocean_time' # create variable in destination file - print 'Creating variable ubar' + print('Creating variable ubar') ncu.createVariable('ubar', 'f8', ('ocean_time', 'eta_u', 'xi_u'), fill_value=spval) ncu.variables['ubar'].long_name = '2D u-momentum component' ncu.variables['ubar'].units = 'meter second-1' ncu.variables['ubar'].field = 'ubar-velocity,, scalar, series' ncu.variables['ubar'].time = 'ocean_time' - print 'Creating variable v' + print('Creating variable v') ncv.createVariable('v', 'f8', ('ocean_time', 's_rho', 'eta_v', 'xi_v'), fill_value=spval) ncv.variables['v'].long_name = '3D v-momentum component' ncv.variables['v'].units = 'meter second-1' ncv.variables['v'].field = 'v-velocity, scalar, series' ncv.variables['v'].time = 'ocean_time' - print 'Creating variable vbar' + print('Creating variable vbar') ncv.createVariable('vbar', 'f8', ('ocean_time', 'eta_v', 'xi_v'), fill_value=spval) ncv.variables['vbar'].long_name = '2D v-momentum component' ncv.variables['vbar'].units = 'meter second-1' @@ -106,27 +106,27 @@ def remap_clm_uv(src_file, src_grd, dst_grd, dxy=20, cdepth=0, kk=0, dst_dir='./ # remaping - print 'remapping and rotating u and v from', src_grd.name, \ - 'to', dst_grd.name - print 'time =', time + print('remapping and rotating u and v from', src_grd.name, \ + 'to', dst_grd.name) + print('time =', time) # flood the grid - print 'flood the grid' + print('flood the grid') src_uz = pyroms_toolbox.Grid_HYCOM.flood_fast(src_varu, src_grd, pos='t', \ spval=spval, dxy=dxy, cdepth=cdepth, kk=kk) src_vz = pyroms_toolbox.Grid_HYCOM.flood_fast(src_varv, src_grd, pos='t', \ spval=spval, dxy=dxy, cdepth=cdepth, kk=kk) # horizontal interpolation using scrip weights - print 'horizontal interpolation using scrip weights' + print('horizontal interpolation using scrip weights') dst_uz = pyroms.remapping.remap(src_uz, wts_file, \ spval=spval) dst_vz = pyroms.remapping.remap(src_vz, wts_file, \ spval=spval) # vertical interpolation from standard z level to sigma - print 'vertical interpolation from standard z level to sigma' + print('vertical interpolation from standard z level to sigma') dst_u = pyroms.remapping.z2roms(dst_uz[::-1,:,:], dst_grdz, \ dst_grd, Cpos='rho', spval=spval, flood=False) dst_v = pyroms.remapping.z2roms(dst_vz[::-1,:,:], dst_grdz, \ @@ -181,7 +181,7 @@ def remap_clm_uv(src_file, src_grd, dst_grd, dxy=20, cdepth=0, kk=0, dst_dir='./ dst_vbar[idxv[0], idxv[1]] = spval # write data in destination file - print 'write data in destination file' + print('write data in destination file') ncu.variables['ocean_time'][0] = time ncu.variables['u'][0] = dst_u ncu.variables['ubar'][0] = dst_ubar diff --git a/examples/Palau_HYCOM/remap_uv.py b/examples/Palau_HYCOM/remap_uv.py index b2b786e..6185243 100644 --- a/examples/Palau_HYCOM/remap_uv.py +++ b/examples/Palau_HYCOM/remap_uv.py @@ -43,12 +43,12 @@ def remap_uv(src_file, src_grd, dst_grd, dxy=20, cdepth=0, kk=0, dst_dir='./'): # create destination file dst_file = src_file.rsplit('/')[-1] dst_fileu = dst_dir + dst_file[:-3] + '_u_ic_' + dst_grd.name + '.nc' - print '\nCreating destination file', dst_fileu + print('\nCreating destination file', dst_fileu) if os.path.exists(dst_fileu) is True: os.remove(dst_fileu) pyroms_toolbox.nc_create_roms_file(dst_fileu, dst_grd, nctime) dst_filev = dst_dir + dst_file[:-3] + '_v_ic_' + dst_grd.name + '.nc' - print 'Creating destination file', dst_filev + print('Creating destination file', dst_filev) if os.path.exists(dst_filev) is True: os.remove(dst_filev) pyroms_toolbox.nc_create_roms_file(dst_filev, dst_grd, nctime) @@ -77,24 +77,24 @@ def remap_uv(src_file, src_grd, dst_grd, dxy=20, cdepth=0, kk=0, dst_dir='./'): dst_grdz = pyroms.grid.ROMS_Grid(dst_grd.name+'_Z', dst_grd.hgrid, dst_zcoord) # create variable in destination file - print 'Creating variable u' + print('Creating variable u') ncu.createVariable('u', 'f8', ('ocean_time', 's_rho', 'eta_u', 'xi_u'), fill_value=spval) ncu.variables['u'].long_name = '3D u-momentum component' ncu.variables['u'].units = 'meter second-1' ncu.variables['u'].field = 'u-velocity, scalar, series' # create variable in destination file - print 'Creating variable ubar' + print('Creating variable ubar') ncu.createVariable('ubar', 'f8', ('ocean_time', 'eta_u', 'xi_u'), fill_value=spval) ncu.variables['ubar'].long_name = '2D u-momentum component' ncu.variables['ubar'].units = 'meter second-1' ncu.variables['ubar'].field = 'ubar-velocity,, scalar, series' - print 'Creating variable v' + print('Creating variable v') ncv.createVariable('v', 'f8', ('ocean_time', 's_rho', 'eta_v', 'xi_v'), fill_value=spval) ncv.variables['v'].long_name = '3D v-momentum component' ncv.variables['v'].units = 'meter second-1' ncv.variables['v'].field = 'v-velocity, scalar, series' - print 'Creating variable vbar' + print('Creating variable vbar') ncv.createVariable('vbar', 'f8', ('ocean_time', 'eta_v', 'xi_v'), fill_value=spval) ncv.variables['vbar'].long_name = '2D v-momentum component' ncv.variables['vbar'].units = 'meter second-1' @@ -102,27 +102,27 @@ def remap_uv(src_file, src_grd, dst_grd, dxy=20, cdepth=0, kk=0, dst_dir='./'): # remaping - print 'remapping and rotating u and v from', src_grd.name, \ - 'to', dst_grd.name - print 'time =', time + print('remapping and rotating u and v from', src_grd.name, \ + 'to', dst_grd.name) + print('time =', time) # flood the grid - print 'flood the grid' + print('flood the grid') src_uz = pyroms_toolbox.Grid_HYCOM.flood_fast(src_varu, src_grd, pos='t', \ spval=spval, dxy=dxy, cdepth=cdepth, kk=kk) src_vz = pyroms_toolbox.Grid_HYCOM.flood_fast(src_varv, src_grd, pos='t', \ spval=spval, dxy=dxy, cdepth=cdepth, kk=kk) # horizontal interpolation using scrip weights - print 'horizontal interpolation using scrip weights' + print('horizontal interpolation using scrip weights') dst_uz = pyroms.remapping.remap(src_uz, wts_file, \ spval=spval) dst_vz = pyroms.remapping.remap(src_vz, wts_file, \ spval=spval) # vertical interpolation from standard z level to sigma - print 'vertical interpolation from standard z level to sigma' + print('vertical interpolation from standard z level to sigma') dst_u = pyroms.remapping.z2roms(dst_uz[::-1,:,:], dst_grdz, \ dst_grd, Cpos='rho', spval=spval, flood=False) dst_v = pyroms.remapping.z2roms(dst_vz[::-1,:,:], dst_grdz, \ @@ -177,7 +177,7 @@ def remap_uv(src_file, src_grd, dst_grd, dxy=20, cdepth=0, kk=0, dst_dir='./'): dst_vbar[idxv[0], idxv[1]] = spval # write data in destination file - print 'write data in destination file' + print('write data in destination file') ncu.variables['ocean_time'][0] = time ncu.variables['u'][0] = dst_u ncu.variables['ubar'][0] = dst_ubar diff --git a/examples/README b/examples/README index e61181d..f6762a6 100644 --- a/examples/README +++ b/examples/README @@ -23,6 +23,8 @@ INTERPOLATING FROM SODA: conditions and climatology files - be sure to set the number of processes desired. + Added a couple of SODA3 examples, but the Arctic isn't working yet. + INTERPOLATING FROM LARGER ROMS DOMAINS: For creating initial and boundary files from larger ROMS grids, diff --git a/examples/Yellow_Sea/Inputs/Boundary/make_bdry_file.py b/examples/Yellow_Sea/Inputs/Boundary/make_bdry_file.py index ccdad52..6cf071f 100644 --- a/examples/Yellow_Sea/Inputs/Boundary/make_bdry_file.py +++ b/examples/Yellow_Sea/Inputs/Boundary/make_bdry_file.py @@ -1,7 +1,7 @@ import subprocess import os import sys -import commands +import subprocess import numpy as np import pyroms @@ -20,13 +20,13 @@ for year in lst_year: year = np.str(year) - lst = commands.getoutput('ls ' + data_dir + 'SODA_2.1.6_' + year + '*') + lst = subprocess.getoutput('ls ' + data_dir + 'SODA_2.1.6_' + year + '*') lst = lst.split() lst_file = lst_file + lst -print 'Build OBC file from the following file list:' -print lst_file -print ' ' +print('Build OBC file from the following file list:') +print(lst_file) +print(' ') src_grd_file = data_dir + 'SODA_grid.cdf' src_grd = pyroms_toolbox.BGrid_SODA.get_nc_BGrid_SODA('/Volumes/R1/DATA/SODA_2.1.6/SODA_grid.cdf', name='SODA_2.1.6_YELLOW', xrange=(225, 275), yrange=(190, 240)) diff --git a/examples/Yellow_Sea/Inputs/Boundary/remap_bdry.py b/examples/Yellow_Sea/Inputs/Boundary/remap_bdry.py index 4165d19..aac2f31 100644 --- a/examples/Yellow_Sea/Inputs/Boundary/remap_bdry.py +++ b/examples/Yellow_Sea/Inputs/Boundary/remap_bdry.py @@ -39,7 +39,7 @@ def remap_bdry(src_file, src_varname, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, # create boundary file dst_file = src_file.rsplit('/')[-1] dst_file = dst_dir + dst_file[:-4] + '_' + src_varname + '_bdry_' + dst_grd.name + '.nc' - print '\nCreating boundary file', dst_file + print('\nCreating boundary file', dst_file) if os.path.exists(dst_file) is True: os.remove(dst_file) pyroms_toolbox.nc_create_roms_file(dst_file, dst_grd, nctime, Lgrid=False) @@ -139,7 +139,7 @@ def remap_bdry(src_file, src_varname, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, field_west = 'salt_west, scalar, series' units = 'PSU' else: - raise ValueError, 'Undefined src_varname' + raise ValueError('Undefined src_varname') if ndim == 3: @@ -151,25 +151,25 @@ def remap_bdry(src_file, src_varname, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, # create variable in boudary file - print 'Creating variable', dst_varname_north + print('Creating variable', dst_varname_north) nc.createVariable(dst_varname_north, 'f8', dimensions_north, fill_value=spval) nc.variables[dst_varname_north].long_name = long_name_north nc.variables[dst_varname_north].units = units nc.variables[dst_varname_north].field = field_north - print 'Creating variable', dst_varname_south + print('Creating variable', dst_varname_south) nc.createVariable(dst_varname_south, 'f8', dimensions_south, fill_value=spval) nc.variables[dst_varname_south].long_name = long_name_south nc.variables[dst_varname_south].units = units nc.variables[dst_varname_south].field = field_south - print 'Creating variable', dst_varname_east + print('Creating variable', dst_varname_east) nc.createVariable(dst_varname_east, 'f8', dimensions_east, fill_value=spval) nc.variables[dst_varname_east].long_name = long_name_east nc.variables[dst_varname_east].units = units nc.variables[dst_varname_east].field = field_east - print 'Creating variable', dst_varname_west + print('Creating variable', dst_varname_west) nc.createVariable(dst_varname_west, 'f8', dimensions_west, fill_value=spval) nc.variables[dst_varname_west].long_name = long_name_west nc.variables[dst_varname_west].units = units @@ -177,26 +177,26 @@ def remap_bdry(src_file, src_varname, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, # remapping - print 'remapping', dst_varname, 'from', src_grd.name, \ - 'to', dst_grd.name - print 'time =', time + print('remapping', dst_varname, 'from', src_grd.name, \ + 'to', dst_grd.name) + print('time =', time) if ndim == 3: # flood the grid - print 'flood the grid' + print('flood the grid') src_varz = pyroms_toolbox.BGrid_SODA.flood(src_var, src_grd, Bpos=Bpos, spval=spval, \ dmax=dmax, cdepth=cdepth, kk=kk) else: src_varz = src_var # horizontal interpolation using scrip weights - print 'horizontal interpolation using scrip weights' + print('horizontal interpolation using scrip weights') dst_varz = pyroms.remapping.remap(src_varz, wts_file, spval=spval) if ndim == 3: # vertical interpolation from standard z level to sigma - print 'vertical interpolation from standard z level to sigma' + print('vertical interpolation from standard z level to sigma') dst_var_north = pyroms.remapping.z2roms(dst_varz[::-1, Mp-1:Mp, 0:Lp], \ dst_grdz, dst_grd, Cpos=Cpos, spval=spval, \ flood=False, irange=(0,Lp), jrange=(Mp-1,Mp)) @@ -216,7 +216,7 @@ def remap_bdry(src_file, src_varname, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_var_west = dst_varz[:, 0] # write data in destination file - print 'write data in destination file' + print('write data in destination file') nc.variables['ocean_time'][0] = time nc.variables[dst_varname_north][0] = np.squeeze(dst_var_north) nc.variables[dst_varname_south][0] = np.squeeze(dst_var_south) diff --git a/examples/Yellow_Sea/Inputs/Boundary/remap_bdry_uv.py b/examples/Yellow_Sea/Inputs/Boundary/remap_bdry_uv.py index c5966d8..97eb9e2 100644 --- a/examples/Yellow_Sea/Inputs/Boundary/remap_bdry_uv.py +++ b/examples/Yellow_Sea/Inputs/Boundary/remap_bdry_uv.py @@ -42,12 +42,12 @@ def remap_bdry_uv(src_file, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='. # create destination file dst_file = src_file.rsplit('/')[-1] dst_fileu = dst_dir + dst_file[:-4] + '_u_bdry_' + dst_grd.name + '.nc' - print '\nCreating destination file', dst_fileu + print('\nCreating destination file', dst_fileu) if os.path.exists(dst_fileu) is True: os.remove(dst_fileu) pyroms_toolbox.nc_create_roms_file(dst_fileu, dst_grd, nctime) dst_filev = dst_dir + dst_file[:-4] + '_v_bdry_' + dst_grd.name + '.nc' - print 'Creating destination file', dst_filev + print('Creating destination file', dst_filev) if os.path.exists(dst_filev) is True: os.remove(dst_filev) pyroms_toolbox.nc_create_roms_file(dst_filev, dst_grd, nctime) @@ -78,87 +78,87 @@ def remap_bdry_uv(src_file, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='. dst_grdz = pyroms.grid.ROMS_Grid(dst_grd.name+'_Z', dst_grd.hgrid, dst_zcoord) # create variable in destination file - print 'Creating variable u_north' + print('Creating variable u_north') ncu.createVariable('u_north', 'f8', ('ocean_time', 's_rho', 'xi_u'), fill_value=spval) ncu.variables['u_north'].long_name = '3D u-momentum north boundary condition' ncu.variables['u_north'].units = 'meter second-1' ncu.variables['u_north'].field = 'u_north, scalar, series' - print 'Creating variable u_south' + print('Creating variable u_south') ncu.createVariable('u_south', 'f8', ('ocean_time', 's_rho', 'xi_u'), fill_value=spval) ncu.variables['u_south'].long_name = '3D u-momentum south boundary condition' ncu.variables['u_south'].units = 'meter second-1' ncu.variables['u_south'].field = 'u_south, scalar, series' - print 'Creating variable u_east' + print('Creating variable u_east') ncu.createVariable('u_east', 'f8', ('ocean_time', 's_rho', 'eta_u'), fill_value=spval) ncu.variables['u_east'].long_name = '3D u-momentum east boundary condition' ncu.variables['u_east'].units = 'meter second-1' ncu.variables['u_east'].field = 'u_east, scalar, series' - print 'Creating variable u_west' + print('Creating variable u_west') ncu.createVariable('u_west', 'f8', ('ocean_time', 's_rho', 'eta_u'), fill_value=spval) ncu.variables['u_west'].long_name = '3D u-momentum west boundary condition' ncu.variables['u_west'].units = 'meter second-1' ncu.variables['u_west'].field = 'u_east, scalar, series' # create variable in destination file - print 'Creating variable ubar_north' + print('Creating variable ubar_north') ncu.createVariable('ubar_north', 'f8', ('ocean_time', 'xi_u'), fill_value=spval) ncu.variables['ubar_north'].long_name = '2D u-momentum north boundary condition' ncu.variables['ubar_north'].units = 'meter second-1' ncu.variables['ubar_north'].field = 'ubar_north, scalar, series' - print 'Creating variable ubar_south' + print('Creating variable ubar_south') ncu.createVariable('ubar_south', 'f8', ('ocean_time', 'xi_u'), fill_value=spval) ncu.variables['ubar_south'].long_name = '2D u-momentum south boundary condition' ncu.variables['ubar_south'].units = 'meter second-1' ncu.variables['ubar_south'].field = 'ubar_south, scalar, series' - print 'Creating variable ubar_east' + print('Creating variable ubar_east') ncu.createVariable('ubar_east', 'f8', ('ocean_time', 'eta_u'), fill_value=spval) ncu.variables['ubar_east'].long_name = '2D u-momentum east boundary condition' ncu.variables['ubar_east'].units = 'meter second-1' ncu.variables['ubar_east'].field = 'ubar_east, scalar, series' - print 'Creating variable ubar_west' + print('Creating variable ubar_west') ncu.createVariable('ubar_west', 'f8', ('ocean_time', 'eta_u'), fill_value=spval) ncu.variables['ubar_west'].long_name = '2D u-momentum west boundary condition' ncu.variables['ubar_west'].units = 'meter second-1' ncu.variables['ubar_west'].field = 'ubar_east, scalar, series' - print 'Creating variable v_north' + print('Creating variable v_north') ncv.createVariable('v_north', 'f8', ('ocean_time', 's_rho', 'xi_v'), fill_value=spval) ncv.variables['v_north'].long_name = '3D v-momentum north boundary condition' ncv.variables['v_north'].units = 'meter second-1' ncv.variables['v_north'].field = 'v_north, scalar, series' - print 'Creating variable v_south' + print('Creating variable v_south') ncv.createVariable('v_south', 'f8', ('ocean_time', 's_rho', 'xi_v'), fill_value=spval) ncv.variables['v_south'].long_name = '3D v-momentum south boundary condition' ncv.variables['v_south'].units = 'meter second-1' ncv.variables['v_south'].field = 'v_south, scalar, series' - print 'Creating variable v_east' + print('Creating variable v_east') ncv.createVariable('v_east', 'f8', ('ocean_time', 's_rho', 'eta_v'), fill_value=spval) ncv.variables['v_east'].long_name = '3D v-momentum east boundary condition' ncv.variables['v_east'].units = 'meter second-1' ncv.variables['v_east'].field = 'v_east, scalar, series' - print 'Creating variable v_west' + print('Creating variable v_west') ncv.createVariable('v_west', 'f8', ('ocean_time', 's_rho', 'eta_v'), fill_value=spval) ncv.variables['v_west'].long_name = '3D v-momentum west boundary condition' ncv.variables['v_west'].units = 'meter second-1' ncv.variables['v_west'].field = 'v_east, scalar, series' - print 'Creating variable vbar_north' + print('Creating variable vbar_north') ncv.createVariable('vbar_north', 'f8', ('ocean_time', 'xi_v'), fill_value=spval) ncv.variables['vbar_north'].long_name = '2D v-momentum north boundary condition' ncv.variables['vbar_north'].units = 'meter second-1' ncv.variables['vbar_north'].field = 'vbar_north, scalar, series' - print 'Creating variable vbar_south' + print('Creating variable vbar_south') ncv.createVariable('vbar_south', 'f8', ('ocean_time', 'xi_v'), fill_value=spval) ncv.variables['vbar_south'].long_name = '2D v-momentum south boundary condition' ncv.variables['vbar_south'].units = 'meter second-1' ncv.variables['vbar_south'].field = 'vbar_south, scalar, series' - print 'Creating variable vbar_east' + print('Creating variable vbar_east') ncv.createVariable('vbar_east', 'f8', ('ocean_time', 'eta_v'), fill_value=spval) ncv.variables['vbar_east'].long_name = '2D v-momentum east boundary condition' ncv.variables['vbar_east'].units = 'meter second-1' ncv.variables['vbar_east'].field = 'vbar_east, scalar, series' - print 'Creating variable vbar_west' + print('Creating variable vbar_west') ncv.createVariable('vbar_west', 'f8', ('ocean_time', 'eta_v'), fill_value=spval) ncv.variables['vbar_west'].long_name = '2D v-momentum west boundary condition' ncv.variables['vbar_west'].units = 'meter second-1' @@ -167,27 +167,27 @@ def remap_bdry_uv(src_file, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='. # remaping - print 'remapping and rotating u and v from', src_grd.name, \ - 'to', dst_grd.name - print 'time =', time + print('remapping and rotating u and v from', src_grd.name, \ + 'to', dst_grd.name) + print('time =', time) # flood the grid - print 'flood the grid' + print('flood the grid') src_uz = pyroms_toolbox.BGrid_SODA.flood(src_varu, src_grd, Bpos='uv', \ spval=spval, dmax=dmax, cdepth=cdepth, kk=kk) src_vz = pyroms_toolbox.BGrid_SODA.flood(src_varv, src_grd, Bpos='uv', \ spval=spval, dmax=dmax, cdepth=cdepth, kk=kk) # horizontal interpolation using scrip weights - print 'horizontal interpolation using scrip weights' + print('horizontal interpolation using scrip weights') dst_uz = pyroms.remapping.remap(src_uz, wts_file, \ spval=spval) dst_vz = pyroms.remapping.remap(src_vz, wts_file, \ spval=spval) # vertical interpolation from standard z level to sigma - print 'vertical interpolation from standard z level to sigma' + print('vertical interpolation from standard z level to sigma') dst_u_north = pyroms.remapping.z2roms(dst_uz[::-1, Mp-2:Mp, 0:Lp], \ dst_grdz, dst_grd, Cpos='rho', spval=spval, \ flood=False, irange=(0,Lp), jrange=(Mp-2,Mp)) @@ -319,7 +319,7 @@ def remap_bdry_uv(src_file, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='. dst_vbar_west = np.ma.masked_where(dst_grd.hgrid.mask_v[:,0] == 0, dst_vbar_west) # write data in destination file - print 'write data in destination file' + print('write data in destination file') ncu.variables['ocean_time'][0] = time ncu.variables['u_north'][0] = dst_u_north ncu.variables['u_south'][0] = dst_u_south diff --git a/examples/Yellow_Sea/Inputs/Initial/make_ic_file.py b/examples/Yellow_Sea/Inputs/Initial/make_ic_file.py index 4831e95..55e6dbe 100644 --- a/examples/Yellow_Sea/Inputs/Initial/make_ic_file.py +++ b/examples/Yellow_Sea/Inputs/Initial/make_ic_file.py @@ -1,6 +1,6 @@ import subprocess import os -import commands +import subprocess import _iso import numpy as np @@ -14,9 +14,9 @@ file = '/center/w/kate/SODA/SODA_2.1.6_20071230-20080104.cdf' dst_dir='./' -print 'Build IC file from the following file:' -print file -print ' ' +print('Build IC file from the following file:') +print(file) +print(' ') src_grd = pyroms_toolbox.BGrid_SODA.get_nc_BGrid_SODA('/center/w/SODA/SODA_grid.cdf', name='SODA_2.1.6_YELLOW', xrange=(225, 275), yrange=(190, 240)) dst_grd = pyroms.grid.get_ROMS_grid('YELLOW') diff --git a/examples/Yellow_Sea/Inputs/Initial/remap.py b/examples/Yellow_Sea/Inputs/Initial/remap.py index a77504c..c20c5b6 100644 --- a/examples/Yellow_Sea/Inputs/Initial/remap.py +++ b/examples/Yellow_Sea/Inputs/Initial/remap.py @@ -39,7 +39,7 @@ def remap(src_file, src_varname, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_d # create IC file dst_file = src_file.rsplit('/')[-1] dst_file = dst_dir + dst_file[:-4] + '_' + src_varname + '_ic_' + dst_grd.name + '.nc' - print '\nCreating file', dst_file + print('\nCreating file', dst_file) if os.path.exists(dst_file) is True: os.remove(dst_file) pyroms_toolbox.nc_create_roms_file(dst_file, dst_grd, nctime) @@ -98,7 +98,7 @@ def remap(src_file, src_varname, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_d units = 'PSU' field = 'salinity, scalar, series' else: - raise ValueError, 'Undefined src_varname' + raise ValueError('Undefined src_varname') if ndim == 3: @@ -110,7 +110,7 @@ def remap(src_file, src_varname, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_d # create variable in file - print 'Creating variable', dst_varname + print('Creating variable', dst_varname) nc.createVariable(dst_varname, 'f8', dimensions, fill_value=spval) nc.variables[dst_varname].long_name = long_name nc.variables[dst_varname].units = units @@ -118,34 +118,34 @@ def remap(src_file, src_varname, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_d # remapping - print 'remapping', dst_varname, 'from', src_grd.name, \ - 'to', dst_grd.name - print 'time =', time + print('remapping', dst_varname, 'from', src_grd.name, \ + 'to', dst_grd.name) + print('time =', time) if ndim == 3: # flood the grid - print 'flood the grid' + print('flood the grid') src_varz = pyroms_toolbox.BGrid_SODA.flood(src_var, src_grd, Bpos=Bpos, spval=spval, \ dmax=dmax, cdepth=cdepth, kk=kk) else: src_varz = src_var # horizontal interpolation using scrip weights - print 'horizontal interpolation using scrip weights' + print('horizontal interpolation using scrip weights') dst_varz = pyroms.remapping.remap(src_varz, wts_file, \ spval=spval) if ndim == 3: # vertical interpolation from standard z level to sigma - print 'vertical interpolation from standard z level to sigma' + print('vertical interpolation from standard z level to sigma') dst_var = pyroms.remapping.z2roms(dst_varz[::-1,:,:], dst_grdz, \ dst_grd, Cpos=Cpos, spval=spval, flood=False) else: dst_var = dst_varz # write data in destination file - print 'write data in destination file' + print('write data in destination file') nc.variables['ocean_time'][0] = time nc.variables[dst_varname][0] = dst_var diff --git a/examples/Yellow_Sea/Inputs/Initial/remap_uv.py b/examples/Yellow_Sea/Inputs/Initial/remap_uv.py index 04bd2f0..b79ad1f 100644 --- a/examples/Yellow_Sea/Inputs/Initial/remap_uv.py +++ b/examples/Yellow_Sea/Inputs/Initial/remap_uv.py @@ -42,12 +42,12 @@ def remap_uv(src_file, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='./'): # create destination file dst_file = src_file.rsplit('/')[-1] dst_fileu = dst_dir + dst_file[:-4] + '_u_ic_' + dst_grd.name + '.nc' - print '\nCreating destination file', dst_fileu + print('\nCreating destination file', dst_fileu) if os.path.exists(dst_fileu) is True: os.remove(dst_fileu) pyroms_toolbox.nc_create_roms_file(dst_fileu, dst_grd, nctime) dst_filev = dst_dir + dst_file[:-4] + '_v_ic_' + dst_grd.name + '.nc' - print 'Creating destination file', dst_filev + print('Creating destination file', dst_filev) if os.path.exists(dst_filev) is True: os.remove(dst_filev) pyroms_toolbox.nc_create_roms_file(dst_filev, dst_grd, nctime) @@ -78,24 +78,24 @@ def remap_uv(src_file, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='./'): dst_grdz = pyroms.grid.ROMS_Grid(dst_grd.name+'_Z', dst_grd.hgrid, dst_zcoord) # create variable in destination file - print 'Creating variable u' + print('Creating variable u') ncu.createVariable('u', 'f8', ('ocean_time', 's_rho', 'eta_u', 'xi_u'), fill_value=spval) ncu.variables['u'].long_name = '3D u-momentum component' ncu.variables['u'].units = 'meter second-1' ncu.variables['u'].field = 'u-velocity, scalar, series' # create variable in destination file - print 'Creating variable ubar' + print('Creating variable ubar') ncu.createVariable('ubar', 'f8', ('ocean_time', 'eta_u', 'xi_u'), fill_value=spval) ncu.variables['ubar'].long_name = '2D u-momentum component' ncu.variables['ubar'].units = 'meter second-1' ncu.variables['ubar'].field = 'ubar-velocity,, scalar, series' - print 'Creating variable v' + print('Creating variable v') ncv.createVariable('v', 'f8', ('ocean_time', 's_rho', 'eta_v', 'xi_v'), fill_value=spval) ncv.variables['v'].long_name = '3D v-momentum component' ncv.variables['v'].units = 'meter second-1' ncv.variables['v'].field = 'v-velocity, scalar, series' - print 'Creating variable vbar' + print('Creating variable vbar') ncv.createVariable('vbar', 'f8', ('ocean_time', 'eta_v', 'xi_v'), fill_value=spval) ncv.variables['vbar'].long_name = '2D v-momentum component' ncv.variables['vbar'].units = 'meter second-1' @@ -103,27 +103,27 @@ def remap_uv(src_file, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='./'): # remaping - print 'remapping and rotating u and v from', src_grd.name, \ - 'to', dst_grd.name - print 'time =', time + print('remapping and rotating u and v from', src_grd.name, \ + 'to', dst_grd.name) + print('time =', time) # flood the grid - print 'flood the grid' + print('flood the grid') src_uz = pyroms_toolbox.BGrid_SODA.flood(src_varu, src_grd, Bpos='uv', \ spval=spval, dmax=dmax, cdepth=cdepth, kk=kk) src_vz = pyroms_toolbox.BGrid_SODA.flood(src_varv, src_grd, Bpos='uv', \ spval=spval, dmax=dmax, cdepth=cdepth, kk=kk) # horizontal interpolation using scrip weights - print 'horizontal interpolation using scrip weights' + print('horizontal interpolation using scrip weights') dst_uz = pyroms.remapping.remap(src_uz, wts_file, \ spval=spval) dst_vz = pyroms.remapping.remap(src_vz, wts_file, \ spval=spval) # vertical interpolation from standard z level to sigma - print 'vertical interpolation from standard z level to sigma' + print('vertical interpolation from standard z level to sigma') dst_u = pyroms.remapping.z2roms(dst_uz[::-1,:,:], dst_grdz, \ dst_grd, Cpos='rho', spval=spval, flood=False) dst_v = pyroms.remapping.z2roms(dst_vz[::-1,:,:], dst_grdz, \ @@ -176,7 +176,7 @@ def remap_uv(src_file, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='./'): dst_vbar[idxv[0], idxv[1]] = spval # write data in destination file - print 'write data in destination file' + print('write data in destination file') ncu.variables['ocean_time'][0] = time ncu.variables['u'][0] = dst_u ncu.variables['ubar'][0] = dst_ubar diff --git a/examples/Yellow_Sea/make_YELLOW_grd_v1.py b/examples/Yellow_Sea/make_YELLOW_grd_v1.py index f679fbf..d98510a 100644 --- a/examples/Yellow_Sea/make_YELLOW_grd_v1.py +++ b/examples/Yellow_Sea/make_YELLOW_grd_v1.py @@ -35,7 +35,7 @@ #bry = pyroms.hgrid.BoundaryInteractor(xp, yp, beta, shp=(Mm+3,Lm+3), proj=map) #hgrd=bry.grd -lonv, latv = map(hgrd.x_vert, hgrd.y_vert, inverse=True) +lonv, latv = list(map(hgrd.x_vert, hgrd.y_vert, inverse=True)) hgrd = pyroms.grid.CGrid_geo(lonv, latv, map) # generate the mask @@ -94,7 +94,7 @@ # check bathymetry roughness RoughMat = bathy_tools.RoughnessMatrix(h, hgrd.mask_rho) -print 'Max Roughness value is: ', RoughMat.max() +print('Max Roughness value is: ', RoughMat.max()) # smooth the raw bathy using the direct iterative method from Martinho and Batteen (2006) rx0_max = 0.35 @@ -102,7 +102,7 @@ # check bathymetry roughness again RoughMat = bathy_tools.RoughnessMatrix(h, hgrd.mask_rho) -print 'Max Roughness value is: ', RoughMat.max() +print('Max Roughness value is: ', RoughMat.max()) # vertical coordinate theta_b = 2 diff --git a/examples/Yellow_Sea/make_YELLOW_grd_v2.py b/examples/Yellow_Sea/make_YELLOW_grd_v2.py index 9d9f355..4026d93 100644 --- a/examples/Yellow_Sea/make_YELLOW_grd_v2.py +++ b/examples/Yellow_Sea/make_YELLOW_grd_v2.py @@ -129,10 +129,10 @@ # smooth the raw bathy using the direct iterative method from Martinho and Batteen (2006) RoughMat = bathy_tools.RoughnessMatrix(h, hgrd.mask_rho) -print 'Max Roughness value is: ', RoughMat.max() +print('Max Roughness value is: ', RoughMat.max()) hsmooth = bathy_smoothing.smoothing_Positive_rx0(hgrd.mask_rho, h, 0.3) RoughMat = bathy_tools.RoughnessMatrix(hsmooth, hgrd.mask_rho) -print 'Max Roughness value is: ', RoughMat.max() +print('Max Roughness value is: ', RoughMat.max()) # insure that depth is always deeper than hmin h = pyroms_toolbox.change(h, '<', hmin, hmin) diff --git a/examples/cobalt-preproc/Arctic/fix_pole.py b/examples/cobalt-preproc/Arctic/fix_pole.py new file mode 100644 index 0000000..7e8aedf --- /dev/null +++ b/examples/cobalt-preproc/Arctic/fix_pole.py @@ -0,0 +1,54 @@ +import subprocess +import os +import sys +import subprocess +import numpy as np +import netCDF4 as nc + +dst_dir='./' + +ic_file = dst_dir + 'ARCTIC4_ic_bio_GFDL-APR.nc' +fidic = nc.Dataset(ic_file,'a') +Cs_r = fidic.variables['Cs_r'] +nz = Cs_r.shape[0] + +# define all tracer stuff +list_tracer = ['alk', 'cadet_arag', 'cadet_calc', 'dic', 'fed', 'fedet', 'fedi', 'felg', 'fesm', 'ldon', 'ldop', 'lith', 'lithdet', 'nbact', 'ndet', 'ndi', 'nlg', 'nsm', 'nh4', 'no3', 'o2', 'pdet', 'po4', 'srdon', 'srdop', 'sldon', 'sldop', 'sidet', 'silg', 'sio4', 'nsmz', 'nmdz', 'nlgz','cased','chl','irr_mem','htotal','co3_ion'] + + +print('\nFixing a north pole problem') +for tr in list_tracer: + print('for variable', tr) + tracer = fidic.variables[tr][:] + mysum = np.zeros((nz)) + count = 0 + for j in range(753,768): + for i in range(271,287): + if tracer[0,0,j,i] != 0: + count += 1 + mysum += tracer[0,:,j,i] + print('count', count) + mysum = mysum/count + print('mysum', mysum) + for j in range(753,768): + for i in range(271,287): + if tracer[0,0,j,i] == 0: + tracer[0,:,j,i] = mysum + fidic.variables[tr][:] = tracer + +# These two tracers contain zeros, leading to nans. +tracer = fidic.variables['cased'][:] +mysum = 0.25*(tracer[0,:,752,279] + tracer[0,:,768,279] + tracer[0,:,760,270] + tracer[0,:,602,287]) +for j in range(753,768): + for i in range(271,287): + tracer[0,:,j,i] = mysum +fidic.variables['cased'][:] = tracer + +tracer = fidic.variables['irr_mem'][:] +mysum = 0.25*(tracer[0,:,752,279] + tracer[0,:,768,279] + tracer[0,:,760,270] + tracer[0,:,602,287]) +for j in range(753,768): + for i in range(271,287): + tracer[0,:,j,i] = mysum +fidic.variables['irr_mem'][:] = tracer + +fidic.close() diff --git a/examples/cobalt-preproc/Arctic/make_ic_file_bio.py b/examples/cobalt-preproc/Arctic/make_ic_file_bio.py new file mode 100644 index 0000000..f470721 --- /dev/null +++ b/examples/cobalt-preproc/Arctic/make_ic_file_bio.py @@ -0,0 +1,75 @@ +import subprocess +import os +import sys +import subprocess +import numpy as np +import netCDF4 as nc + +import pyroms +import pyroms_toolbox + +from remap_bio import remap_bio + +#build list of date to remap +tag = 'y1988-2007m04' + +data_dir = '/archive/AKWATERS/kshedstrom/COBALT/' +dst_dir='./' + +src_grd = pyroms_toolbox.BGrid_GFDL.get_nc_BGrid_GFDL('/archive/AKWATERS/kshedstrom/COBALT/GFDL_CM2.1_grid.nc', \ + name='ESM2M_ARCTIC4', area='tripole', ystart=150) +dst_grd = pyroms.grid.get_ROMS_grid('ARCTIC4') + +# define all tracer stuff +list_tracer = ['alk', 'cadet_arag', 'cadet_calc', 'dic', 'fed', 'fedet', 'fedi', 'felg', 'fesm', 'ldon', 'ldop', 'lith', 'lithdet', 'nbact', 'ndet', 'ndi', 'nlg', 'nsm', 'nh4', 'no3', 'o2', 'pdet', 'po4', 'srdon', 'srdop', 'sldon', 'sldop', 'sidet', 'silg', 'sio4', 'nsmz', 'nmdz', 'nlgz','cased','chl','irr_mem','htotal','co3_ion'] + +tracer_longname = ['Alkalinity', 'Detrital CaCO3', 'Detrital CaCO3', 'Dissolved Inorganic Carbon', 'Dissolved Iron', 'Detrital Iron', 'Diazotroph Iron', 'Large Phytoplankton Iron', 'Small Phytoplankton Iron', 'labile DON', 'labile DOP', 'Lithogenic Aluminosilicate', 'lithdet', 'bacterial', 'ndet', 'Diazotroph Nitrogen', 'Large Phytoplankton Nitrogen', 'Small Phytoplankton Nitrogen', 'Ammonia', 'Nitrate', 'Oxygen', 'Detrital Phosphorus', 'Phosphate', 'Semi-Refractory DON', 'Semi-Refractory DOP', 'Semilabile DON', 'Semilabile DOP', 'Detrital Silicon', 'Large Phytoplankton Silicon', 'Silicate', 'Small Zooplankton Nitrogen', 'Medium-sized zooplankton Nitrogen', 'large Zooplankton Nitrogen','Sediment CaCO3','Cholorophyll','Irradiance Memory','Total H+','Carbonate ion'] + +tracer_units = ['mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'g/kg', 'g/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg','mol.m-3','ug.kg-1','W.m-2','mol/kg','mol/kg'] + + + +print('\nBuild IC file for time %s' %tag) +for ktr in np.arange(len(list_tracer)): + mydict = {'tracer':list_tracer[ktr],'longname':tracer_longname[ktr],'units':tracer_units[ktr], \ + 'file':data_dir + 'ocean_cobalt_tracers.1988-2007.01_12.nc', 'nframe':3} + remap_bio(mydict, src_grd, dst_grd, dst_dir=dst_dir) + +## merge file +ic_file = dst_dir + dst_grd.name + '_ic_bio_GFDL-APR.nc' +out_file = dst_dir + dst_grd.name + '_ic_bio_' + list_tracer[0] + '.nc' +command = ('ncks', '-a', '-O', out_file, ic_file) +subprocess.check_call(command) +os.remove(out_file) + +for ktr in np.arange(1,len(list_tracer)): + out_file = dst_dir + dst_grd.name + '_ic_bio_' + list_tracer[ktr] + '.nc' + command = ('ncks', '-a', '-A', out_file, ic_file) + subprocess.check_call(command) + os.remove(out_file) + + +#------------------ Add additional zeros fields ---------------------------------- + +fidic = nc.Dataset(ic_file,'a',format='NETCDF3_64BIT') + +fidic.createVariable('mu_mem_lg', 'f8', ('ocean_time', 's_rho', 'eta_rho', 'xi_rho',) ) +fidic.variables['mu_mem_lg'].long_name = 'large phytoplankton aggregation memory' +fidic.variables['mu_mem_lg'].units = '' +fidic.variables['mu_mem_lg'].field = 'mu_mem_lg, scalar, series' + +fidic.createVariable('mu_mem_di', 'f8', ('ocean_time', 's_rho', 'eta_rho', 'xi_rho',) ) +fidic.variables['mu_mem_di'].long_name = 'medium phytoplankton aggregation memory' +fidic.variables['mu_mem_di'].units = '' +fidic.variables['mu_mem_di'].field = 'mu_mem_di, scalar, series' + +fidic.createVariable('mu_mem_sm', 'f8', ('ocean_time', 's_rho', 'eta_rho', 'xi_rho',) ) +fidic.variables['mu_mem_sm'].long_name = 'small phytoplankton aggregation memory' +fidic.variables['mu_mem_sm'].units = '' +fidic.variables['mu_mem_sm'].field = 'mu_mem_sm, scalar, series' + +fidic.variables['mu_mem_lg'][0,:,:,:] = 0. +fidic.variables['mu_mem_di'][0,:,:,:] = 0. +fidic.variables['mu_mem_sm'][0,:,:,:] = 0. + +fidic.close() diff --git a/examples/cobalt-preproc/Arctic/make_ic_file_bio_addons.py b/examples/cobalt-preproc/Arctic/make_ic_file_bio_addons.py new file mode 100644 index 0000000..9c083c3 --- /dev/null +++ b/examples/cobalt-preproc/Arctic/make_ic_file_bio_addons.py @@ -0,0 +1,83 @@ +import subprocess +import os +import sys +import subprocess +import numpy as np + +import pyroms +import pyroms_toolbox + +from remap_bio_woa import remap_bio_woa +from remap_bio_glodap import remap_bio_glodap + +data_dir_woa = '/archive/AKWATERS/kshedstrom/COBALT/' +data_dir_glodap = '/archive/AKWATERS/kshedstrom/COBALT/' +dst_dir='./' + +src_grd = pyroms_toolbox.BGrid_GFDL.get_nc_BGrid_GFDL('/archive/AKWATERS/kshedstrom/COBALT/GFDL_CM2.1_grid.nc', \ + name='ESM2M_ARCTIC4', area='tripole', ystart=150) +dst_grd = pyroms.grid.get_ROMS_grid('ARCTIC4') + +# tracer informations +list_tracer = ['alk', 'cadet_arag', 'cadet_calc', 'dic', 'fed', 'fedet', 'fedi', 'felg', 'fesm', 'ldon', 'ldop', 'lith', 'lithdet', 'nbact', 'ndet', 'ndi', 'nlg', 'nsm', 'nh4', 'no3', 'o2', 'pdet', 'po4', 'srdon', 'srdop', 'sldon', 'sldop', 'sidet', 'silg', 'sio4', 'nsmz', 'nmdz', 'nlgz','cased','chl','irr_mem','htotal','co3_ion'] + +tracer_longname = ['Alkalinity', 'Detrital CaCO3', 'Detrital CaCO3', 'Dissolved Inorganic Carbon', 'Dissolved Iron', 'Detrital Iron', 'Diazotroph Iron', 'Large Phytoplankton Iron', 'Small Phytoplankton Iron', 'labile DON', 'labile DOP', 'Lithogenic Aluminosilicate', 'lithdet', 'bacterial', 'ndet', 'Diazotroph Nitrogen', 'Large Phytoplankton Nitrogen', 'Small Phytoplankton Nitrogen', 'Ammonia', 'Nitrate', 'Oxygen', 'Detrital Phosphorus', 'Phosphate', 'Semi-Refractory DON', 'Semi-Refractory DOP', 'Semilabile DON', 'Semilabile DOP', 'Detrital Silicon', 'Large Phytoplankton Silicon', 'Silicate', 'Small Zooplankton Nitrogen', 'Medium-sized zooplankton Nitrogen', 'large Zooplankton Nitrogen','Sediment CaCO3','Cholorophyll','Irradiance Memory','Total H+','Carbonate ion'] + +tracer_units = ['mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'g/kg', 'g/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg','mol.m-3','ug.kg-1','W.m-2','mol/kg','mol/kg'] + + +ic_file = dst_dir + dst_grd.name + '_ic_bio_GFDL-APR.nc' + +#------- WOA13 --------------------------------- +id_tracer_update_woa = [19,20,22,29] +list_tracer_update_woa = [] +tracer_longname_update_woa = [] +tracer_units_update_woa = [] + +for idtra in id_tracer_update_woa: + print(list_tracer[idtra]) + +for idtra in id_tracer_update_woa: + # add to tracer update + list_tracer_update_woa.append(list_tracer[idtra]) + tracer_longname_update_woa.append(tracer_longname[idtra]) + tracer_units_update_woa.append(tracer_units[idtra]) + +for ktr in np.arange(len(list_tracer_update_woa)): + ctra = list_tracer_update_woa[ktr] + if ctra == 'sio4': + ctra = 'si' + mydict = {'tracer':list_tracer_update_woa[ktr],'longname':tracer_longname_update_woa[ktr],'units':tracer_units_update_woa[ktr],'file':data_dir_woa + ctra + '_WOA13-CM2.1_monthly.nc', \ + 'nframe':3} + remap_bio_woa(mydict, src_grd, dst_grd, dst_dir=dst_dir) + out_file = dst_dir + dst_grd.name + '_ic_bio_' + list_tracer_update_woa[ktr] + '.nc' + command = ('ncks', '-a', '-A', out_file, ic_file) + subprocess.check_call(command) + os.remove(out_file) + +#--------- GLODAP ------------------------------- +id_tracer_update_glodap = [0,3] +list_tracer_update_glodap = [] +tracer_longname_update_glodap = [] +tracer_units_update_glodap = [] + +for idtra in id_tracer_update_glodap: + print(list_tracer[idtra]) + +for idtra in id_tracer_update_glodap: + # add to tracer update + list_tracer_update_glodap.append(list_tracer[idtra]) + tracer_longname_update_glodap.append(tracer_longname[idtra]) + tracer_units_update_glodap.append(tracer_units[idtra]) + +for ktr in np.arange(len(list_tracer_update_glodap)): + ctra = list_tracer_update_glodap[ktr] + mydict = {'tracer':list_tracer_update_glodap[ktr],'longname':tracer_longname_update_glodap[ktr],'units':tracer_units_update_glodap[ktr],'file':data_dir_glodap + ctra + '_GLODAP-ESM2M_annual.nc', \ + 'nframe':0} + remap_bio_glodap(mydict, src_grd, dst_grd, dst_dir=dst_dir) + out_file = dst_dir + dst_grd.name + '_ic_bio_' + list_tracer_update_glodap[ktr] + '.nc' + command = ('ncks', '-a', '-A', out_file, ic_file) + subprocess.check_call(command) + os.remove(out_file) + + diff --git a/examples/cobalt-preproc/Arctic/make_remap_weights_file.py b/examples/cobalt-preproc/Arctic/make_remap_weights_file.py new file mode 100644 index 0000000..e236766 --- /dev/null +++ b/examples/cobalt-preproc/Arctic/make_remap_weights_file.py @@ -0,0 +1,83 @@ +import pyroms +import pyroms_toolbox + +# load the grid +srcgrd = pyroms_toolbox.BGrid_GFDL.get_nc_BGrid_GFDL('/import/archive/AKWATERS/kshedstrom/COBALT/GFDL_CM2.1_grid.nc', \ + name='ESM2M_ARCTIC4', area='tripole', ystart=150) +dstgrd = pyroms.grid.get_ROMS_grid('ARCTIC4') +dstgrd.hgrid.lon_rho = dstgrd.hgrid.lon_rho - 360. +dstgrd.hgrid.lon_u = dstgrd.hgrid.lon_u - 360. +dstgrd.hgrid.lon_v = dstgrd.hgrid.lon_v - 360. +dstgrd.hgrid.lon_psi = dstgrd.hgrid.lon_psi - 360. +dstgrd.hgrid.lon_vert = dstgrd.hgrid.lon_vert - 360. + +# make remap grid file for scrip +pyroms_toolbox.BGrid_GFDL.make_remap_grid_file(srcgrd, Bpos='t') +pyroms_toolbox.BGrid_GFDL.make_remap_grid_file(srcgrd, Bpos='uv') +pyroms.remapping.make_remap_grid_file(dstgrd, Cpos='rho') +pyroms.remapping.make_remap_grid_file(dstgrd, Cpos='u') +pyroms.remapping.make_remap_grid_file(dstgrd, Cpos='v') + +# compute remap weights +# input namelist variables for bilinear remapping at rho points +grid1_file = 'remap_grid_ESM2M_ARCTIC4_t.nc' +grid2_file = 'remap_grid_ARCTIC4_rho.nc' +interp_file1 = 'remap_weights_ESM2M_to_ARCTIC4_bilinear_t_to_rho.nc' +interp_file2 = 'remap_weights_ARCTIC4_to_ESM2M_bilinear_rho_to_t.nc' +map1_name = 'ESM2M to ARCTIC4 Bilinear Mapping' +map2_name = 'ARCTIC4 to ESM2M Bilinear Mapping' +num_maps = 1 +map_method = 'bilinear' + +pyroms.remapping.compute_remap_weights(grid1_file, grid2_file, \ + interp_file1, interp_file2, map1_name, \ + map2_name, num_maps, map_method) + + +# compute remap weights +# input namelist variables for bilinear remapping at rho points +grid1_file = 'remap_grid_ESM2M_ARCTIC4_uv.nc' +grid2_file = 'remap_grid_ARCTIC4_rho.nc' +interp_file1 = 'remap_weights_ESM2M_to_ARCTIC4_bilinear_uv_to_rho.nc' +interp_file2 = 'remap_weights_ARCTIC4_to_ESM2M_bilinear_rho_to_uv.nc' +map1_name = 'ESM2M to ARCTIC4 Bilinear Mapping' +map2_name = 'ARCTIC4 to ESM2M Bilinear Mapping' +num_maps = 1 +map_method = 'bilinear' + +pyroms.remapping.compute_remap_weights(grid1_file, grid2_file, \ + interp_file1, interp_file2, map1_name, \ + map2_name, num_maps, map_method) + + +# compute remap weights +# input namelist variables for bilinear remapping at rho points +grid1_file = 'remap_grid_ESM2M_ARCTIC4_t.nc' +grid2_file = 'remap_grid_ARCTIC4_u.nc' +interp_file1 = 'remap_weights_ESM2M_to_ARCTIC4_bilinear_t_to_u.nc' +interp_file2 = 'remap_weights_ARCTIC4_to_ESM2M_bilinear_u_to_t.nc' +map1_name = 'ESM2M to ARCTIC4 Bilinear Mapping' +map2_name = 'ARCTIC4 to ESM2M Bilinear Mapping' +num_maps = 1 +map_method = 'bilinear' + +pyroms.remapping.compute_remap_weights(grid1_file, grid2_file, \ + interp_file1, interp_file2, map1_name, \ + map2_name, num_maps, map_method) + + +# compute remap weights +# input namelist variables for bilinear remapping at rho points +grid1_file = 'remap_grid_ESM2M_ARCTIC4_t.nc' +grid2_file = 'remap_grid_ARCTIC4_v.nc' +interp_file1 = 'remap_weights_ESM2M_to_ARCTIC4_bilinear_t_to_v.nc' +interp_file2 = 'remap_weights_ARCTIC4_to_ESM2M_bilinear_v_to_t.nc' +map1_name = 'ESM2M to ARCTIC4 Bilinear Mapping' +map2_name = 'ARCTIC4 to ESM2M Bilinear Mapping' +num_maps = 1 +map_method = 'bilinear' + +pyroms.remapping.compute_remap_weights(grid1_file, grid2_file, \ + interp_file1, interp_file2, map1_name, \ + map2_name, num_maps, map_method) + diff --git a/examples/cobalt-preproc/Arctic/remap_bio.py b/examples/cobalt-preproc/Arctic/remap_bio.py new file mode 100644 index 0000000..cafdcd6 --- /dev/null +++ b/examples/cobalt-preproc/Arctic/remap_bio.py @@ -0,0 +1,143 @@ +import numpy as np +import os +try: + import netCDF4 as netCDF +except: + import netCDF3 as netCDF +import matplotlib.pyplot as plt +import time +from datetime import datetime +from matplotlib.dates import date2num, num2date + +import pyroms +import pyroms_toolbox +import _remapping + +class nctime(object): + pass + +def remap_bio(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='./'): + + # ARCTIC4 grid sub-sample +# xrange=src_grd.xrange; yrange=src_grd.yrange + ystart = 150 + + src_varname = argdict['tracer'] + tracer = src_varname + src_file = argdict['file'] + units = argdict['units'] + longname = argdict['longname'] + nframe = argdict['nframe'] + + # get time + nctime.long_name = 'time' + nctime.units = 'days since 1900-01-01 00:00:00' + + # create clim file + dst_file = tracer + '.nc' + dst_file = dst_dir + dst_grd.name + '_ic_bio_' + dst_file + print('Creating clim file', dst_file) + if os.path.exists(dst_file) is True: + os.remove(dst_file) + pyroms_toolbox.nc_create_roms_file(dst_file, dst_grd, nctime) + + # open clim file + nc = netCDF.Dataset(dst_file, 'a', format='NETCDF3_64BIT') + + #load var + cdf = netCDF.Dataset(src_file) + src_var = cdf.variables[src_varname] + + time = cdf.variables['time'][nframe] + + # to be in sync with physics, add +0.5 day + #time = time + 0.5 + # time will be given by physics anyway + + #get missing value + spval = src_var._FillValue + + # determine variable dimension + ndim = len(src_var.dimensions) - 1 + print('ndim', ndim, src_var.dimensions) + + # ARCTIC4 grid sub-sample + if ndim == 3: +# src_var = src_var[nframe,:, yrange[0]:yrange[1]+1, xrange[0]:xrange[1]+1] + src_var = src_var[nframe,:,:,:] + print('subgrid 3d', src_var.shape) +# src_var = np.squeeze(src_var, axis=(0,)) + src_var = src_var[:,np.r_[ystart:np.size(src_var,1),-1],:] + print('subgrid 3d', src_var.shape) + elif ndim == 2: +# src_var = src_var[nframe,yrange[0]:yrange[1]+1, xrange[0]:xrange[1]+1] + src_var = src_var[nframe,:,:] + print('subgrid 2d', src_var.shape) +# src_var = np.squeeze(src_var, axis=(0,)) + src_var = src_var[np.r_[ystart:np.size(src_var,0),-1],:] + print('subgrid 2d', src_var.shape) + + + Bpos = 't' + Cpos = 'rho' + z = src_grd.z_t + Mp, Lp = dst_grd.hgrid.mask_rho.shape + wts_file = 'remap_weights_ESM2M_to_ARCTIC4_bilinear_t_to_rho.nc' + dst_varname = tracer + dimensions = ('ocean_time', 's_rho', 'eta_rho', 'xi_rho') + long_name = longname + field = tracer + ', scalar, series' + units = units + + if ndim == 3: + # build intermediate zgrid + zlevel = -z[::-1] + nzlevel = len(zlevel) + dst_zcoord = pyroms.vgrid.z_coordinate(dst_grd.vgrid.h, zlevel, nzlevel) + dst_grdz = pyroms.grid.ROMS_Grid(dst_grd.name+'_Z', dst_grd.hgrid, dst_zcoord) + + + # create variable in file + print('Creating variable', dst_varname) + nc.createVariable(dst_varname, 'f8', dimensions, fill_value=spval) + nc.variables[dst_varname].long_name = long_name + nc.variables[dst_varname].units = units + nc.variables[dst_varname].field = field + #nc.variables[dst_varname_north]._FillValue = spval + + + # remapping + print('remapping', dst_varname, 'from', src_grd.name, \ + 'to', dst_grd.name) + + if ndim == 3: + # flood the grid + print('flood the grid') + src_varz = pyroms_toolbox.BGrid_GFDL.flood(src_var, src_grd, Bpos=Bpos, spval=spval, \ + dmax=dmax, cdepth=cdepth, kk=kk) + else: + src_varz = src_var + + # horizontal interpolation using scrip weights + print('horizontal interpolation using scrip weights') + dst_varz = pyroms.remapping.remap(src_varz, wts_file, spval=spval) + + if ndim == 3: + # vertical interpolation from standard z level to sigma + print('vertical interpolation from standard z level to sigma') + dst_var = pyroms.remapping.z2roms(dst_varz[::-1,:,:], dst_grdz, \ + dst_grd, Cpos=Cpos, spval=spval, flood=False) + else: + dst_var = dst_varz + + # write data in destination file + print('write data in destination file\n') + nc.variables['ocean_time'][0] = time + nc.variables[dst_varname][0] = dst_var + + # close file + nc.close() + cdf.close() + + if src_varname == 'eta': + return dst_varz diff --git a/examples/cobalt-preproc/Arctic/remap_bio_glodap.py b/examples/cobalt-preproc/Arctic/remap_bio_glodap.py new file mode 100644 index 0000000..bead4b9 --- /dev/null +++ b/examples/cobalt-preproc/Arctic/remap_bio_glodap.py @@ -0,0 +1,162 @@ +import numpy as np +import os +try: + import netCDF4 as netCDF +except: + import netCDF3 as netCDF +import matplotlib.pyplot as plt +import time +from datetime import datetime +from matplotlib.dates import date2num, num2date + +import pyroms +import pyroms_toolbox +import _remapping + +class nctime(object): + pass + +def remap_bio_glodap(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='./'): + + # ARCTIC4 grid sub-sample + ystart = 150 +# xrange=src_grd.xrange; yrange=src_grd.yrange + + src_varname = argdict['tracer'] + tracer = src_varname + src_file = argdict['file'] + units = argdict['units'] + longname = argdict['longname'] + nframe = argdict['nframe'] + + # get time + nctime.long_name = 'time' + nctime.units = 'days since 1900-01-01 00:00:00' + + # create clim file + dst_file = tracer + '.nc' + dst_file = dst_dir + dst_grd.name + '_ic_bio_' + dst_file + print('Creating clim file', dst_file) + if os.path.exists(dst_file) is True: + os.remove(dst_file) + pyroms_toolbox.nc_create_roms_file(dst_file, dst_grd, nctime) + + # open clim file + nc = netCDF.Dataset(dst_file, 'a', format='NETCDF3_64BIT') + + #load var + cdf = netCDF.Dataset(src_file) + src_var = cdf.variables[src_varname] + + tmp = cdf.variables['time'][nframe] + #if len(tmp) > 1: + # print 'error : multiple frames in input file' ; exit() + #else: + # time = tmp[0] + + # to be in sync with physics, add +0.5 day + #time = time + 0.5 + # time will be given by physics anyway + time = 0. + + #get missing value + spval = src_var._FillValue + + spval2 = -1.0e+10 + + # determine variable dimension + ndim = len(src_var.dimensions) - 1 + + # ARCTIC4 grid sub-sample + if ndim == 3: +# src_var = src_var[nframe,:, yrange[0]:yrange[1]+1, xrange[0]:xrange[1]+1] + src_var = src_var[nframe, :, :, :] + print('subgrid 3d', src_var.shape) +# src_var = np.squeeze(src_var, axis=(0,)) + src_var = src_var[:,np.r_[ystart:np.size(src_var,1),-1],:] + print('subgrid 3d', src_var.shape) + elif ndim == 2: +# src_var = src_var[nframe,yrange[0]:yrange[1]+1, xrange[0]:xrange[1]+1] + src_var = src_var[nframe, :, :] + print('subgrid 2d', src_var.shape) +# src_var = np.squeeze(src_var, axis=(0,)) + src_var = src_var[np.r_[ystart:np.size(src_var,0),-1],:] + print('subgrid 2d', src_var.shape) + + if tracer == 'alk': + unit_conversion = 1. / 1e6 + elif tracer == 'dic': + unit_conversion = 1. / 1e6 + + src_var = src_var * unit_conversion + + + Bpos = 't' + Cpos = 'rho' + z = src_grd.z_t + Mp, Lp = dst_grd.hgrid.mask_rho.shape + wts_file = 'remap_weights_ESM2M_to_ARCTIC4_bilinear_t_to_rho.nc' + dst_varname = tracer + dimensions = ('ocean_time', 's_rho', 'eta_rho', 'xi_rho') + long_name = longname + field = tracer + ', scalar, series' + units = units + + if ndim == 3: + # build intermediate zgrid + zlevel = -z[::-1] + nzlevel = len(zlevel) + dst_zcoord = pyroms.vgrid.z_coordinate(dst_grd.vgrid.h, zlevel, nzlevel) + dst_grdz = pyroms.grid.ROMS_Grid(dst_grd.name+'_Z', dst_grd.hgrid, dst_zcoord) + + + # create variable in file + print('Creating variable', dst_varname) + nc.createVariable(dst_varname, 'f8', dimensions, fill_value=spval2) + nc.variables[dst_varname].long_name = long_name + nc.variables[dst_varname].units = units + nc.variables[dst_varname].field = field + #nc.variables[dst_varname_north]._FillValue = spval + + + # remapping + print('remapping', dst_varname, 'from', src_grd.name, \ + 'to', dst_grd.name) + + if ndim == 3: + # flood the grid + print('flood the grid') + src_varz = pyroms_toolbox.BGrid_GFDL.flood(src_var, src_grd, Bpos=Bpos, spval=spval, \ + dmax=dmax, cdepth=cdepth, kk=kk) + else: + src_varz = src_var + + # horizontal interpolation using scrip weights + print('horizontal interpolation using scrip weights') + dst_varz = pyroms.remapping.remap(src_varz, wts_file, spval=spval) + + if ndim == 3: + # vertical interpolation from standard z level to sigma + print('vertical interpolation from standard z level to sigma') + dst_var = pyroms.remapping.z2roms(dst_varz[::-1,:,:], dst_grdz, \ + dst_grd, Cpos=Cpos, spval=spval, flood=False) + else: + dst_var = dst_varz + + if ndim == 3: + for kz in np.arange(dst_grd.vgrid.N): + tmp = dst_var[kz,:,:].copy() + tmp[np.where(dst_grd.hgrid.mask_rho == 0)] = spval2 + dst_var[kz,:,:] = tmp.copy() + + # write data in destination file + print('write data in destination file\n') + nc.variables['ocean_time'][0] = time + nc.variables[dst_varname][0] = dst_var + + # close file + nc.close() + cdf.close() + + if src_varname == 'eta': + return dst_varz diff --git a/examples/cobalt-preproc/Arctic/remap_bio_woa.py b/examples/cobalt-preproc/Arctic/remap_bio_woa.py new file mode 100644 index 0000000..e273581 --- /dev/null +++ b/examples/cobalt-preproc/Arctic/remap_bio_woa.py @@ -0,0 +1,166 @@ +import numpy as np +import os +try: + import netCDF4 as netCDF +except: + import netCDF3 as netCDF +import matplotlib.pyplot as plt +import time +from datetime import datetime +from matplotlib.dates import date2num, num2date + +import pyroms +import pyroms_toolbox +import _remapping + +class nctime(object): + pass + +def remap_bio_woa(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='./'): + + # ARCTIC4 grid sub-sample +# xrange=src_grd.xrange; yrange=src_grd.yrange + ystart = 150 + + src_varname = argdict['tracer'] + tracer = src_varname + src_file = argdict['file'] + units = argdict['units'] + longname = argdict['longname'] + nframe = argdict['nframe'] + + if src_varname == 'sio4': + src_varname = 'si' + + # get time + nctime.long_name = 'time' + nctime.units = 'days since 1900-01-01 00:00:00' + + # create clim file + dst_file = tracer + '.nc' + dst_file = dst_dir + dst_grd.name + '_ic_bio_' + dst_file + print('Creating clim file', dst_file) + if os.path.exists(dst_file) is True: + os.remove(dst_file) + pyroms_toolbox.nc_create_roms_file(dst_file, dst_grd, nctime) + + # open clim file + nc = netCDF.Dataset(dst_file, 'a', format='NETCDF3_64BIT') + + #load var + cdf = netCDF.Dataset(src_file) + src_var = cdf.variables[src_varname] + + + tmp = cdf.variables['time'][nframe] + + # to be in sync with physics, add +0.5 day + #time = time + 0.5 + # time will be given by physics anyway + time = 0. + + #get missing value + spval = src_var._FillValue + + spval2 = -1.0e+10 + + # determine variable dimension + ndim = len(src_var.dimensions) - 1 + + # ARCTIC4 grid sub-sample + if ndim == 3: +# src_var = src_var[nframe,:, yrange[0]:yrange[1]+1, xrange[0]:xrange[1]+1] + src_var = src_var[nframe, :, :, :] + print('subgrid 3d', src_var.shape) +# src_var = np.squeeze(src_var, axis=(0,)) + src_var = src_var[:,np.r_[ystart:np.size(src_var,1),-1],:] + print('subgrid 3d', src_var.shape) + elif ndim == 2: +# src_var = src_var[nframe,yrange[0]:yrange[1]+1, xrange[0]:xrange[1]+1] + src_var = src_var[nframe, :, :] + print('subgrid 2d', src_var.shape) +# src_var = np.squeeze(src_var, axis=(0,)) + src_var = src_var[np.r_[ystart:np.size(src_var,0),-1],:] + print('subgrid 2d', src_var.shape) + + + if tracer == 'no3': + unit_conversion = 1. / 1e6 / 1.035 + elif tracer == 'po4': + unit_conversion = 1. / 1e6 / 1.035 + elif tracer == 'o2': + unit_conversion = 1. / 1035 / 22391.6 * 1000.0 + elif tracer == 'sio4': + unit_conversion = 1. / 1e6 / 1.035 + + src_var = src_var * unit_conversion + + Bpos = 't' + Cpos = 'rho' + z = src_grd.z_t + Mp, Lp = dst_grd.hgrid.mask_rho.shape + wts_file = 'remap_weights_ESM2M_to_ARCTIC4_bilinear_t_to_rho.nc' + dst_varname = tracer + dimensions = ('ocean_time', 's_rho', 'eta_rho', 'xi_rho') + long_name = longname + field = tracer + ', scalar, series' + units = units + + if ndim == 3: + # build intermediate zgrid + zlevel = -z[::-1] + nzlevel = len(zlevel) + dst_zcoord = pyroms.vgrid.z_coordinate(dst_grd.vgrid.h, zlevel, nzlevel) + dst_grdz = pyroms.grid.ROMS_Grid(dst_grd.name+'_Z', dst_grd.hgrid, dst_zcoord) + + + # create variable in file + print('Creating variable', dst_varname) + nc.createVariable(dst_varname, 'f8', dimensions, fill_value=spval2) + nc.variables[dst_varname].long_name = long_name + nc.variables[dst_varname].units = units + nc.variables[dst_varname].field = field + #nc.variables[dst_varname_north]._FillValue = spval + + + # remapping + print('remapping', dst_varname, 'from', src_grd.name, \ + 'to', dst_grd.name) + + if ndim == 3: + # flood the grid + print('flood the grid') + src_varz = pyroms_toolbox.BGrid_GFDL.flood(src_var, src_grd, Bpos=Bpos, spval=spval, \ + dmax=dmax, cdepth=cdepth, kk=kk) + else: + src_varz = src_var + + # horizontal interpolation using scrip weights + print('horizontal interpolation using scrip weights') + dst_varz = pyroms.remapping.remap(src_varz, wts_file, spval=spval) + + if ndim == 3: + # vertical interpolation from standard z level to sigma + print('vertical interpolation from standard z level to sigma') + dst_var = pyroms.remapping.z2roms(dst_varz[::-1,:,:], dst_grdz, \ + dst_grd, Cpos=Cpos, spval=spval, flood=False) + else: + dst_var = dst_varz + + if ndim == 3: + for kz in np.arange(dst_grd.vgrid.N): + tmp = dst_var[kz,:,:].copy() + tmp[np.where(dst_grd.hgrid.mask_rho == 0)] = spval2 + dst_var[kz,:,:] = tmp.copy() + + # write data in destination file + print('write data in destination file\n') + nc.variables['ocean_time'][0] = time + nc.variables[dst_varname][0] = dst_var + + # close file + nc.close() + cdf.close() + + if src_varname == 'eta': + return dst_varz diff --git a/examples/cobalt-preproc/Boundary_bio/make_bdry_file_bio_GFDLclimato.py b/examples/cobalt-preproc/Boundary_bio/make_bdry_file_bio_GFDLclimato.py index 248698a..d77fd9d 100644 --- a/examples/cobalt-preproc/Boundary_bio/make_bdry_file_bio_GFDLclimato.py +++ b/examples/cobalt-preproc/Boundary_bio/make_bdry_file_bio_GFDLclimato.py @@ -1,7 +1,7 @@ import subprocess import os import sys -import commands +import subprocess import numpy as np import pyroms @@ -23,7 +23,7 @@ tracer_units = ['mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'g/kg', 'g/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg'] for mm in np.arange(12): - print '\nBuild OBC file for month', mm + print('\nBuild OBC file for month', mm) for ktr in np.arange(len(list_tracer)): mydict = {'tracer':list_tracer[ktr],'longname':tracer_longname[ktr],'units':tracer_units[ktr],'file':data_dir + 'ocean_cobalt_tracers.1988-2007.01_12.nc','frame':mm} remap_bdry_bio(mydict, src_grd, dst_grd, dst_dir=dst_dir) diff --git a/examples/cobalt-preproc/Boundary_bio/make_bdry_file_bio_addons.py b/examples/cobalt-preproc/Boundary_bio/make_bdry_file_bio_addons.py index fd8ed17..45a448f 100644 --- a/examples/cobalt-preproc/Boundary_bio/make_bdry_file_bio_addons.py +++ b/examples/cobalt-preproc/Boundary_bio/make_bdry_file_bio_addons.py @@ -1,7 +1,7 @@ import subprocess import os import sys -import commands +import subprocess import numpy as np import pyroms @@ -34,7 +34,7 @@ tracer_units_update_woa = [] for idtra in id_tracer_update_woa: - print list_tracer[idtra] + print(list_tracer[idtra]) for idtra in id_tracer_update_woa: # add to tracer update @@ -44,7 +44,7 @@ for mm in np.arange(12): bdry_file = dst_dir + dst_grd.name + '_bdry_bio_GFDL+WOA+GLODAP_m' + str(mm+1).zfill(2) + '.nc' - print '\nBuild OBC file for month', mm + print('\nBuild OBC file for month', mm) for ktr in np.arange(len(list_tracer_update_woa)): ctra = list_tracer_update_woa[ktr] if ctra == 'sio4': @@ -65,7 +65,7 @@ tracer_units_update_glodap = [] for idtra in id_tracer_update_glodap: - print list_tracer[idtra] + print(list_tracer[idtra]) for idtra in id_tracer_update_glodap: # add to tracer update @@ -75,7 +75,7 @@ for mm in np.arange(12): bdry_file = dst_dir + dst_grd.name + '_bdry_bio_GFDL+WOA+GLODAP_m' + str(mm+1).zfill(2) + '.nc' - print '\nBuild OBC file for month', mm + print('\nBuild OBC file for month', mm) for ktr in np.arange(len(list_tracer_update_glodap)): ctra = list_tracer_update_glodap[ktr] if ctra == 'sio4': diff --git a/examples/cobalt-preproc/Boundary_bio/remap_bdry_bio.py b/examples/cobalt-preproc/Boundary_bio/remap_bdry_bio.py index e72f6cb..1b88ff8 100644 --- a/examples/cobalt-preproc/Boundary_bio/remap_bdry_bio.py +++ b/examples/cobalt-preproc/Boundary_bio/remap_bdry_bio.py @@ -17,7 +17,7 @@ class nctime(object): pass def remap_bdry_bio(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='./'): - + # NWGOA3 grid sub-sample xrange=src_grd.xrange; yrange=src_grd.yrange src_varname = argdict['tracer'] @@ -35,7 +35,7 @@ def remap_bdry_bio(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='. # create boundary file dst_file = tracer + '.nc' dst_file = dst_dir + dst_grd.name + '_bdry_bio_' + dst_file - print 'Creating boundary file', dst_file + print('Creating boundary file', dst_file) if os.path.exists(dst_file) is True: os.remove(dst_file) pyroms_toolbox.nc_create_roms_bdry_file(dst_file, dst_grd, nctime) @@ -44,7 +44,7 @@ def remap_bdry_bio(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='. nc = netCDF.Dataset(dst_file, 'a', format='NETCDF3_64BIT') #load var - cdf = netCDF.Dataset(src_file) + cdf = netCDF.Dataset(src_file) src_var = cdf.variables[src_varname] # correct time to some classic value @@ -98,25 +98,25 @@ def remap_bdry_bio(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='. # create variable in boudary file - print 'Creating variable', dst_varname_north + print('Creating variable', dst_varname_north) nc.createVariable(dst_varname_north, 'f8', dimensions_north, fill_value=spval) nc.variables[dst_varname_north].long_name = long_name_north nc.variables[dst_varname_north].units = units nc.variables[dst_varname_north].field = field_north - print 'Creating variable', dst_varname_south + print('Creating variable', dst_varname_south) nc.createVariable(dst_varname_south, 'f8', dimensions_south, fill_value=spval) nc.variables[dst_varname_south].long_name = long_name_south nc.variables[dst_varname_south].units = units nc.variables[dst_varname_south].field = field_south - print 'Creating variable', dst_varname_west + print('Creating variable', dst_varname_west) nc.createVariable(dst_varname_west, 'f8', dimensions_west, fill_value=spval) nc.variables[dst_varname_west].long_name = long_name_west nc.variables[dst_varname_west].units = units nc.variables[dst_varname_west].field = field_west - print 'Creating variable', dst_varname_east + print('Creating variable', dst_varname_east) nc.createVariable(dst_varname_east, 'f8', dimensions_east, fill_value=spval) nc.variables[dst_varname_east].long_name = long_name_east nc.variables[dst_varname_east].units = units @@ -124,31 +124,31 @@ def remap_bdry_bio(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='. # remapping - print 'remapping', dst_varname, 'from', src_grd.name, \ - 'to', dst_grd.name + print('remapping', dst_varname, 'from', src_grd.name, \ + 'to', dst_grd.name) if ndim == 3: # flood the grid - print 'flood the grid' + print('flood the grid') src_varz = pyroms_toolbox.BGrid_GFDL.flood(src_var, src_grd, Bpos=Bpos, spval=spval, \ dmax=dmax, cdepth=cdepth, kk=kk) else: src_varz = src_var # horizontal interpolation using scrip weights - print 'horizontal interpolation using scrip weights' + print('horizontal interpolation using scrip weights') dst_varz = pyroms.remapping.remap(src_varz, wts_file, spval=spval) if ndim == 3: # vertical interpolation from standard z level to sigma - print 'vertical interpolation from standard z level to sigma' + print('vertical interpolation from standard z level to sigma') dst_var_north = pyroms.remapping.z2roms(dst_varz[::-1, Mp-1:Mp, 0:Lp], \ dst_grdz, dst_grd, Cpos=Cpos, spval=spval, \ flood=False, irange=(0,Lp), jrange=(Mp-1,Mp)) dst_var_south = pyroms.remapping.z2roms(dst_varz[::-1, 0:1, :], \ dst_grdz, dst_grd, Cpos=Cpos, spval=spval, \ flood=False, irange=(0,Lp), jrange=(0,1)) - dst_var_east = pyroms.remapping.z2roms(dst_varz[::-1, :, Lp-1:Lp], \ + dst_var_east = pyroms.remapping.z2roms(dst_varz[::-1, :, Lp-1:Lp], \ dst_grdz, dst_grd, Cpos=Cpos, spval=spval, \ flood=False, irange=(Lp-1,Lp), jrange=(0,Mp)) dst_var_west = pyroms.remapping.z2roms(dst_varz[::-1, :, 0:1], \ @@ -157,11 +157,11 @@ def remap_bdry_bio(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='. else: dst_var_north = dst_varz[-1, :] dst_var_south = dst_varz[0, :] - dst_var_east = dst_varz[:, -1] + dst_var_east = dst_varz[:, -1] dst_var_west = dst_varz[:, 0] # write data in destination file - print 'write data in destination file\n' + print('write data in destination file\n') nc.variables['ocean_time'][0] = time nc.variables['ocean_time'].cycle_length = 365.25 nc.variables[dst_varname_north][0] = np.squeeze(dst_var_north) diff --git a/examples/cobalt-preproc/Boundary_bio/remap_bdry_bio_glodap.py b/examples/cobalt-preproc/Boundary_bio/remap_bdry_bio_glodap.py index 86e0144..0471946 100644 --- a/examples/cobalt-preproc/Boundary_bio/remap_bdry_bio_glodap.py +++ b/examples/cobalt-preproc/Boundary_bio/remap_bdry_bio_glodap.py @@ -17,7 +17,7 @@ class nctime(object): pass def remap_bdry_bio_glodap(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='./'): - + # NWGOA3 grid sub-sample xrange=src_grd.xrange; yrange=src_grd.yrange src_varname = argdict['tracer'] @@ -35,7 +35,7 @@ def remap_bdry_bio_glodap(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst # create boundary file dst_file = tracer + '.nc' dst_file = dst_dir + dst_grd.name + '_bdry_bio_' + dst_file - print 'Creating boundary file', dst_file + print('Creating boundary file', dst_file) if os.path.exists(dst_file) is True: os.remove(dst_file) pyroms_toolbox.nc_create_roms_bdry_file(dst_file, dst_grd, nctime) @@ -44,7 +44,7 @@ def remap_bdry_bio_glodap(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst nc = netCDF.Dataset(dst_file, 'a', format='NETCDF3_64BIT') #load var - cdf = netCDF.Dataset(src_file) + cdf = netCDF.Dataset(src_file) src_var = cdf.variables[src_varname] # correct time to some classic value @@ -65,9 +65,9 @@ def remap_bdry_bio_glodap(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst src_var = src_var[0,yrange[0]:yrange[1]+1, xrange[0]:xrange[1]+1] if tracer == 'alk': - unit_conversion = 1. / 1e6 + unit_conversion = 1. / 1e6 elif tracer == 'dic': - unit_conversion = 1. / 1e6 + unit_conversion = 1. / 1e6 src_var = src_var * unit_conversion @@ -105,25 +105,25 @@ def remap_bdry_bio_glodap(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst # create variable in boudary file - print 'Creating variable', dst_varname_north + print('Creating variable', dst_varname_north) nc.createVariable(dst_varname_north, 'f8', dimensions_north, fill_value=spval2) nc.variables[dst_varname_north].long_name = long_name_north nc.variables[dst_varname_north].units = units nc.variables[dst_varname_north].field = field_north - print 'Creating variable', dst_varname_south + print('Creating variable', dst_varname_south) nc.createVariable(dst_varname_south, 'f8', dimensions_south, fill_value=spval2) nc.variables[dst_varname_south].long_name = long_name_south nc.variables[dst_varname_south].units = units nc.variables[dst_varname_south].field = field_south - print 'Creating variable', dst_varname_west + print('Creating variable', dst_varname_west) nc.createVariable(dst_varname_west, 'f8', dimensions_west, fill_value=spval2) nc.variables[dst_varname_west].long_name = long_name_west nc.variables[dst_varname_west].units = units nc.variables[dst_varname_west].field = field_west - print 'Creating variable', dst_varname_east + print('Creating variable', dst_varname_east) nc.createVariable(dst_varname_east, 'f8', dimensions_east, fill_value=spval2) nc.variables[dst_varname_east].long_name = long_name_east nc.variables[dst_varname_east].units = units @@ -131,31 +131,31 @@ def remap_bdry_bio_glodap(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst # remapping - print 'remapping', dst_varname, 'from', src_grd.name, \ - 'to', dst_grd.name + print('remapping', dst_varname, 'from', src_grd.name, \ + 'to', dst_grd.name) if ndim == 3: # flood the grid - print 'flood the grid' + print('flood the grid') src_varz = pyroms_toolbox.BGrid_GFDL.flood(src_var, src_grd, Bpos=Bpos, spval=spval, \ dmax=dmax, cdepth=cdepth, kk=kk) else: src_varz = src_var # horizontal interpolation using scrip weights - print 'horizontal interpolation using scrip weights' + print('horizontal interpolation using scrip weights') dst_varz = pyroms.remapping.remap(src_varz, wts_file, spval=spval) if ndim == 3: # vertical interpolation from standard z level to sigma - print 'vertical interpolation from standard z level to sigma' + print('vertical interpolation from standard z level to sigma') dst_var_north = pyroms.remapping.z2roms(dst_varz[::-1, Mp-1:Mp, 0:Lp], \ dst_grdz, dst_grd, Cpos=Cpos, spval=spval, \ flood=False, irange=(0,Lp), jrange=(Mp-1,Mp)) dst_var_south = pyroms.remapping.z2roms(dst_varz[::-1, 0:1, :], \ dst_grdz, dst_grd, Cpos=Cpos, spval=spval, \ flood=False, irange=(0,Lp), jrange=(0,1)) - dst_var_east = pyroms.remapping.z2roms(dst_varz[::-1, :, Lp-1:Lp], \ + dst_var_east = pyroms.remapping.z2roms(dst_varz[::-1, :, Lp-1:Lp], \ dst_grdz, dst_grd, Cpos=Cpos, spval=spval, \ flood=False, irange=(Lp-1,Lp), jrange=(0,Mp)) dst_var_west = pyroms.remapping.z2roms(dst_varz[::-1, :, 0:1], \ @@ -164,7 +164,7 @@ def remap_bdry_bio_glodap(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst else: dst_var_north = dst_varz[-1, :] dst_var_south = dst_varz[0, :] - dst_var_east = dst_varz[:, -1] + dst_var_east = dst_varz[:, -1] dst_var_west = dst_varz[:, 0] dst_var_north[np.where(dst_var_north == spval)] = spval2 @@ -173,7 +173,7 @@ def remap_bdry_bio_glodap(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst dst_var_west[np.where(dst_var_west == spval)] = spval2 # write data in destination file - print 'write data in destination file\n' + print('write data in destination file\n') nc.variables['ocean_time'][0] = time nc.variables['ocean_time'].cycle_length = 365.25 nc.variables[dst_varname_north][0] = np.squeeze(dst_var_north) diff --git a/examples/cobalt-preproc/Boundary_bio/remap_bdry_bio_woa.py b/examples/cobalt-preproc/Boundary_bio/remap_bdry_bio_woa.py index 5ed4a38..1e1e98c 100644 --- a/examples/cobalt-preproc/Boundary_bio/remap_bdry_bio_woa.py +++ b/examples/cobalt-preproc/Boundary_bio/remap_bdry_bio_woa.py @@ -17,7 +17,7 @@ class nctime(object): pass def remap_bdry_bio_woa(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='./'): - + # NWGOA3 grid sub-sample xrange=src_grd.xrange; yrange=src_grd.yrange src_varname = argdict['tracer'] @@ -38,7 +38,7 @@ def remap_bdry_bio_woa(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_di # create boundary file dst_file = tracer + '.nc' dst_file = dst_dir + dst_grd.name + '_bdry_bio_' + dst_file - print 'Creating boundary file', dst_file + print('Creating boundary file', dst_file) if os.path.exists(dst_file) is True: os.remove(dst_file) pyroms_toolbox.nc_create_roms_bdry_file(dst_file, dst_grd, nctime) @@ -47,7 +47,7 @@ def remap_bdry_bio_woa(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_di nc = netCDF.Dataset(dst_file, 'a', format='NETCDF3_64BIT') #load var - cdf = netCDF.Dataset(src_file) + cdf = netCDF.Dataset(src_file) src_var = cdf.variables[src_varname] # correct time to some classic value @@ -112,25 +112,25 @@ def remap_bdry_bio_woa(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_di # create variable in boudary file - print 'Creating variable', dst_varname_north + print('Creating variable', dst_varname_north) nc.createVariable(dst_varname_north, 'f8', dimensions_north, fill_value=spval2) nc.variables[dst_varname_north].long_name = long_name_north nc.variables[dst_varname_north].units = units nc.variables[dst_varname_north].field = field_north - print 'Creating variable', dst_varname_south + print('Creating variable', dst_varname_south) nc.createVariable(dst_varname_south, 'f8', dimensions_south, fill_value=spval2) nc.variables[dst_varname_south].long_name = long_name_south nc.variables[dst_varname_south].units = units nc.variables[dst_varname_south].field = field_south - print 'Creating variable', dst_varname_west + print('Creating variable', dst_varname_west) nc.createVariable(dst_varname_west, 'f8', dimensions_west, fill_value=spval2) nc.variables[dst_varname_west].long_name = long_name_west nc.variables[dst_varname_west].units = units nc.variables[dst_varname_west].field = field_west - print 'Creating variable', dst_varname_east + print('Creating variable', dst_varname_east) nc.createVariable(dst_varname_east, 'f8', dimensions_east, fill_value=spval2) nc.variables[dst_varname_east].long_name = long_name_east nc.variables[dst_varname_east].units = units @@ -138,31 +138,31 @@ def remap_bdry_bio_woa(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_di # remapping - print 'remapping', dst_varname, 'from', src_grd.name, \ - 'to', dst_grd.name + print('remapping', dst_varname, 'from', src_grd.name, \ + 'to', dst_grd.name) if ndim == 3: # flood the grid - print 'flood the grid' + print('flood the grid') src_varz = pyroms_toolbox.BGrid_GFDL.flood(src_var, src_grd, Bpos=Bpos, spval=spval, \ dmax=dmax, cdepth=cdepth, kk=kk) else: src_varz = src_var # horizontal interpolation using scrip weights - print 'horizontal interpolation using scrip weights' + print('horizontal interpolation using scrip weights') dst_varz = pyroms.remapping.remap(src_varz, wts_file, spval=spval) if ndim == 3: # vertical interpolation from standard z level to sigma - print 'vertical interpolation from standard z level to sigma' + print('vertical interpolation from standard z level to sigma') dst_var_north = pyroms.remapping.z2roms(dst_varz[::-1, Mp-1:Mp, 0:Lp], \ dst_grdz, dst_grd, Cpos=Cpos, spval=spval, \ flood=False, irange=(0,Lp), jrange=(Mp-1,Mp)) dst_var_south = pyroms.remapping.z2roms(dst_varz[::-1, 0:1, :], \ dst_grdz, dst_grd, Cpos=Cpos, spval=spval, \ flood=False, irange=(0,Lp), jrange=(0,1)) - dst_var_east = pyroms.remapping.z2roms(dst_varz[::-1, :, Lp-1:Lp], \ + dst_var_east = pyroms.remapping.z2roms(dst_varz[::-1, :, Lp-1:Lp], \ dst_grdz, dst_grd, Cpos=Cpos, spval=spval, \ flood=False, irange=(Lp-1,Lp), jrange=(0,Mp)) dst_var_west = pyroms.remapping.z2roms(dst_varz[::-1, :, 0:1], \ @@ -171,7 +171,7 @@ def remap_bdry_bio_woa(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_di else: dst_var_north = dst_varz[-1, :] dst_var_south = dst_varz[0, :] - dst_var_east = dst_varz[:, -1] + dst_var_east = dst_varz[:, -1] dst_var_west = dst_varz[:, 0] dst_var_north[np.where(dst_var_north == spval)] = spval2 @@ -180,7 +180,7 @@ def remap_bdry_bio_woa(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_di dst_var_west[np.where(dst_var_west == spval)] = spval2 # write data in destination file - print 'write data in destination file\n' + print('write data in destination file\n') nc.variables['ocean_time'][0] = time nc.variables['ocean_time'].cycle_length = 365.25 nc.variables[dst_varname_north][0] = np.squeeze(dst_var_north) diff --git a/examples/cobalt-preproc/Clim_bio/make_clim_file_bio_addons.py b/examples/cobalt-preproc/Clim_bio/make_clim_file_bio_addons.py index 7ca04aa..c28b579 100644 --- a/examples/cobalt-preproc/Clim_bio/make_clim_file_bio_addons.py +++ b/examples/cobalt-preproc/Clim_bio/make_clim_file_bio_addons.py @@ -1,7 +1,7 @@ import subprocess import os import sys -import commands +import subprocess import numpy as np import pyroms @@ -32,7 +32,7 @@ tracer_units_update_woa = [] for idtra in id_tracer_update_woa: - print list_tracer[idtra] + print(list_tracer[idtra]) for idtra in id_tracer_update_woa: # add to tracer update @@ -42,7 +42,7 @@ for mm in np.arange(12): clim_file = dst_dir + dst_grd.name + '_clim_bio_GFDL+WOA+GLODAP_m' + str(mm+1).zfill(2) + '.nc' - print '\nBuild CLIM file for month', mm + print('\nBuild CLIM file for month', mm) for ktr in np.arange(len(list_tracer_update_woa)): ctra = list_tracer_update_woa[ktr] if ctra == 'sio4': @@ -62,7 +62,7 @@ tracer_units_update_glodap = [] for idtra in id_tracer_update_glodap: - print list_tracer[idtra] + print(list_tracer[idtra]) for idtra in id_tracer_update_glodap: # add to tracer update @@ -72,7 +72,7 @@ for mm in np.arange(12): clim_file = dst_dir + dst_grd.name + '_clim_bio_GFDL+WOA+GLODAP_m' + str(mm+1).zfill(2) + '.nc' - print '\nBuild CLIM file for month', mm + print('\nBuild CLIM file for month', mm) for ktr in np.arange(len(list_tracer_update_glodap)): ctra = list_tracer_update_glodap[ktr] mydict = {'tracer':list_tracer_update_glodap[ktr],'longname':tracer_longname_update_glodap[ktr],'units':tracer_units_update_glodap[ktr],'file':data_dir_glodap + ctra + '_GLODAP-ESM2M_annual.nc', \ diff --git a/examples/cobalt-preproc/Clim_bio/make_clim_file_bio_gfdl-climato.py b/examples/cobalt-preproc/Clim_bio/make_clim_file_bio_gfdl-climato.py index 2983a22..a248a9c 100644 --- a/examples/cobalt-preproc/Clim_bio/make_clim_file_bio_gfdl-climato.py +++ b/examples/cobalt-preproc/Clim_bio/make_clim_file_bio_gfdl-climato.py @@ -1,7 +1,7 @@ import subprocess import os import sys -import commands +import subprocess import numpy as np import pyroms @@ -23,7 +23,7 @@ tracer_units = ['mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'g/kg', 'g/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg', 'mol/kg'] for mm in np.arange(12): - print '\nBuild clim file for month ', mm + print('\nBuild clim file for month ', mm) for ktr in np.arange(len(list_tracer)): mydict = {'tracer':list_tracer[ktr],'longname':tracer_longname[ktr],'units':tracer_units[ktr],'file':data_dir + 'ocean_cobalt_tracers.1988-2007.01_12.nc','frame':mm} remap_bio(mydict, src_grd, dst_grd, dst_dir=dst_dir) diff --git a/examples/cobalt-preproc/Clim_bio/remap_bio.py b/examples/cobalt-preproc/Clim_bio/remap_bio.py index 55c7016..6d7d4b3 100644 --- a/examples/cobalt-preproc/Clim_bio/remap_bio.py +++ b/examples/cobalt-preproc/Clim_bio/remap_bio.py @@ -35,7 +35,7 @@ def remap_bio(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='./'): # create clim file dst_file = tracer + '.nc' dst_file = dst_dir + dst_grd.name + '_clim_bio_' + dst_file - print 'Creating clim file', dst_file + print('Creating clim file', dst_file) if os.path.exists(dst_file) is True: os.remove(dst_file) pyroms_toolbox.nc_create_roms_file(dst_file, dst_grd, nctime) @@ -85,38 +85,38 @@ def remap_bio(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='./'): # create variable in file - print 'Creating variable', dst_varname + print('Creating variable', dst_varname) nc.createVariable(dst_varname, 'f8', dimensions, fill_value=spval) nc.variables[dst_varname].long_name = long_name nc.variables[dst_varname].units = units nc.variables[dst_varname].field = field # remapping - print 'remapping', dst_varname, 'from', src_grd.name, \ - 'to', dst_grd.name + print('remapping', dst_varname, 'from', src_grd.name, \ + 'to', dst_grd.name) if ndim == 3: # flood the grid - print 'flood the grid' + print('flood the grid') src_varz = pyroms_toolbox.BGrid_GFDL.flood(src_var, src_grd, Bpos=Bpos, spval=spval, \ dmax=dmax, cdepth=cdepth, kk=kk) else: src_varz = src_var # horizontal interpolation using scrip weights - print 'horizontal interpolation using scrip weights' + print('horizontal interpolation using scrip weights') dst_varz = pyroms.remapping.remap(src_varz, wts_file, spval=spval) if ndim == 3: # vertical interpolation from standard z level to sigma - print 'vertical interpolation from standard z level to sigma' + print('vertical interpolation from standard z level to sigma') dst_var = pyroms.remapping.z2roms(dst_varz[::-1,:,:], dst_grdz, \ dst_grd, Cpos=Cpos, spval=spval, flood=False) else: dst_var = dst_varz # write data in destination file - print 'write data in destination file\n' + print('write data in destination file\n') nc.variables['ocean_time'][0] = time nc.variables['ocean_time'].cycle_length = 365.25 nc.variables[dst_varname][0] = dst_var diff --git a/examples/cobalt-preproc/Clim_bio/remap_bio_glodap.py b/examples/cobalt-preproc/Clim_bio/remap_bio_glodap.py index 7a0fd97..d431436 100644 --- a/examples/cobalt-preproc/Clim_bio/remap_bio_glodap.py +++ b/examples/cobalt-preproc/Clim_bio/remap_bio_glodap.py @@ -35,7 +35,7 @@ def remap_bio_glodap(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir= # create clim file dst_file = tracer + '.nc' dst_file = dst_dir + dst_grd.name + '_clim_bio_' + dst_file - print 'Creating clim file', dst_file + print('Creating clim file', dst_file) if os.path.exists(dst_file) is True: os.remove(dst_file) pyroms_toolbox.nc_create_roms_file(dst_file, dst_grd, nctime) @@ -93,7 +93,7 @@ def remap_bio_glodap(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir= # create variable in file - print 'Creating variable', dst_varname + print('Creating variable', dst_varname) nc.createVariable(dst_varname, 'f8', dimensions, fill_value=spval2) nc.variables[dst_varname].long_name = long_name nc.variables[dst_varname].units = units @@ -102,24 +102,24 @@ def remap_bio_glodap(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir= # remapping - print 'remapping', dst_varname, 'from', src_grd.name, \ - 'to', dst_grd.name + print('remapping', dst_varname, 'from', src_grd.name, \ + 'to', dst_grd.name) if ndim == 3: # flood the grid - print 'flood the grid' + print('flood the grid') src_varz = pyroms_toolbox.BGrid_GFDL.flood(src_var, src_grd, Bpos=Bpos, spval=spval, \ dmax=dmax, cdepth=cdepth, kk=kk) else: src_varz = src_var # horizontal interpolation using scrip weights - print 'horizontal interpolation using scrip weights' + print('horizontal interpolation using scrip weights') dst_varz = pyroms.remapping.remap(src_varz, wts_file, spval=spval) if ndim == 3: # vertical interpolation from standard z level to sigma - print 'vertical interpolation from standard z level to sigma' + print('vertical interpolation from standard z level to sigma') dst_var = pyroms.remapping.z2roms(dst_varz[::-1,:,:], dst_grdz, \ dst_grd, Cpos=Cpos, spval=spval, flood=False) else: @@ -132,7 +132,7 @@ def remap_bio_glodap(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir= dst_var[kz,:,:] = tmp.copy() # write data in destination file - print 'write data in destination file\n' + print('write data in destination file\n') nc.variables['ocean_time'][0] = time nc.variables[dst_varname][0] = dst_var diff --git a/examples/cobalt-preproc/Clim_bio/remap_bio_woa.py b/examples/cobalt-preproc/Clim_bio/remap_bio_woa.py index 3c0d3ba..aee0a78 100644 --- a/examples/cobalt-preproc/Clim_bio/remap_bio_woa.py +++ b/examples/cobalt-preproc/Clim_bio/remap_bio_woa.py @@ -17,7 +17,7 @@ class nctime(object): pass def remap_bio_woa(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='./'): - + # NWGOA3 grid sub-sample xrange=src_grd.xrange; yrange=src_grd.yrange @@ -38,7 +38,7 @@ def remap_bio_woa(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='./ # create clim file dst_file = tracer + '.nc' dst_file = dst_dir + dst_grd.name + '_clim_bio_' + dst_file - print 'Creating clim file', dst_file + print('Creating clim file', dst_file) if os.path.exists(dst_file) is True: os.remove(dst_file) pyroms_toolbox.nc_create_roms_file(dst_file, dst_grd, nctime) @@ -47,7 +47,7 @@ def remap_bio_woa(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='./ nc = netCDF.Dataset(dst_file, 'a', format='NETCDF3_64BIT') #load var - cdf = netCDF.Dataset(src_file) + cdf = netCDF.Dataset(src_file) src_var = cdf.variables[src_varname] # correct time to some classic value @@ -100,7 +100,7 @@ def remap_bio_woa(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='./ # create variable in file - print 'Creating variable', dst_varname + print('Creating variable', dst_varname) nc.createVariable(dst_varname, 'f8', dimensions, fill_value=spval2) nc.variables[dst_varname].long_name = long_name nc.variables[dst_varname].units = units @@ -109,24 +109,24 @@ def remap_bio_woa(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='./ # remapping - print 'remapping', dst_varname, 'from', src_grd.name, \ - 'to', dst_grd.name + print('remapping', dst_varname, 'from', src_grd.name, \ + 'to', dst_grd.name) if ndim == 3: # flood the grid - print 'flood the grid' + print('flood the grid') src_varz = pyroms_toolbox.BGrid_GFDL.flood(src_var, src_grd, Bpos=Bpos, spval=spval, \ dmax=dmax, cdepth=cdepth, kk=kk) else: src_varz = src_var # horizontal interpolation using scrip weights - print 'horizontal interpolation using scrip weights' + print('horizontal interpolation using scrip weights') dst_varz = pyroms.remapping.remap(src_varz, wts_file, spval=spval) if ndim == 3: # vertical interpolation from standard z level to sigma - print 'vertical interpolation from standard z level to sigma' + print('vertical interpolation from standard z level to sigma') dst_var = pyroms.remapping.z2roms(dst_varz[::-1,:,:], dst_grdz, \ dst_grd, Cpos=Cpos, spval=spval, flood=False) else: @@ -135,11 +135,11 @@ def remap_bio_woa(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='./ if ndim == 3: for kz in np.arange(dst_grd.vgrid.N): tmp = dst_var[kz,:,:].copy() - tmp[np.where(dst_grd.hgrid.mask_rho == 0)] = spval2 + tmp[np.where(dst_grd.hgrid.mask_rho == 0)] = spval2 dst_var[kz,:,:] = tmp.copy() # write data in destination file - print 'write data in destination file\n' + print('write data in destination file\n') nc.variables['ocean_time'][0] = time nc.variables[dst_varname][0] = dst_var diff --git a/examples/cobalt-preproc/Initial_bio/make_ic_file_bio.py b/examples/cobalt-preproc/Initial_bio/make_ic_file_bio.py index bd58f86..b95fe5b 100644 --- a/examples/cobalt-preproc/Initial_bio/make_ic_file_bio.py +++ b/examples/cobalt-preproc/Initial_bio/make_ic_file_bio.py @@ -1,7 +1,7 @@ import subprocess import os import sys -import commands +import subprocess import numpy as np import netCDF4 as nc @@ -28,20 +28,21 @@ -print '\nBuild IC file for time %s' %tag +print('\nBuild IC file for time %s' %tag) for ktr in np.arange(len(list_tracer)): - mydict = {'tracer':list_tracer[ktr],'longname':tracer_longname[ktr],'units':tracer_units[ktr],'file':data_dir + 'ocean_cobalt_tracers.1988-2007.01.nc'} + mydict = {'tracer':list_tracer[ktr],'longname':tracer_longname[ktr],'units':tracer_units[ktr], \ + 'file':data_dir + 'ocean_cobalt_tracers.1988-2007.01_12.nc', 'nframe':0} remap_bio(mydict, src_grd, dst_grd, dst_dir=dst_dir) ## merge file -ic_file = dst_dir + dst_grd.name + '_ic_bio_GFDL-JAN.nc' -out_file = dst_dir + dst_grd.name + '_ic_bio_' + list_tracer[0] + '.nc' +ic_file = dst_dir + dst_grd.name + '_ic_bio_GFDL-JAN.nc' +out_file = dst_dir + dst_grd.name + '_ic_bio_' + list_tracer[0] + '.nc' command = ('ncks', '-a', '-O', out_file, ic_file) subprocess.check_call(command) os.remove(out_file) for ktr in np.arange(1,len(list_tracer)): - out_file = dst_dir + dst_grd.name + '_ic_bio_' + list_tracer[ktr] + '.nc' + out_file = dst_dir + dst_grd.name + '_ic_bio_' + list_tracer[ktr] + '.nc' command = ('ncks', '-a', '-A', out_file, ic_file) subprocess.check_call(command) os.remove(out_file) diff --git a/examples/cobalt-preproc/Initial_bio/make_ic_file_bio_addons.py b/examples/cobalt-preproc/Initial_bio/make_ic_file_bio_addons.py index e2a5f40..437c82a 100644 --- a/examples/cobalt-preproc/Initial_bio/make_ic_file_bio_addons.py +++ b/examples/cobalt-preproc/Initial_bio/make_ic_file_bio_addons.py @@ -1,7 +1,7 @@ import subprocess import os import sys -import commands +import subprocess import numpy as np import pyroms @@ -34,7 +34,7 @@ tracer_units_update_woa = [] for idtra in id_tracer_update_woa: - print list_tracer[idtra] + print(list_tracer[idtra]) for idtra in id_tracer_update_woa: # add to tracer update @@ -61,7 +61,7 @@ tracer_units_update_glodap = [] for idtra in id_tracer_update_glodap: - print list_tracer[idtra] + print(list_tracer[idtra]) for idtra in id_tracer_update_glodap: # add to tracer update diff --git a/examples/cobalt-preproc/Initial_bio/remap_bio.py b/examples/cobalt-preproc/Initial_bio/remap_bio.py index 713a1d7..4cf740e 100644 --- a/examples/cobalt-preproc/Initial_bio/remap_bio.py +++ b/examples/cobalt-preproc/Initial_bio/remap_bio.py @@ -17,7 +17,7 @@ class nctime(object): pass def remap_bio(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='./'): - + # NWGOA3 grid sub-sample xrange=src_grd.xrange; yrange=src_grd.yrange @@ -26,6 +26,7 @@ def remap_bio(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='./'): src_file = argdict['file'] units = argdict['units'] longname = argdict['longname'] + nframe = argdict['nframe'] # get time nctime.long_name = 'time' @@ -34,7 +35,7 @@ def remap_bio(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='./'): # create clim file dst_file = tracer + '.nc' dst_file = dst_dir + dst_grd.name + '_ic_bio_' + dst_file - print 'Creating clim file', dst_file + print('Creating clim file', dst_file) if os.path.exists(dst_file) is True: os.remove(dst_file) pyroms_toolbox.nc_create_roms_file(dst_file, dst_grd, nctime) @@ -43,19 +44,14 @@ def remap_bio(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='./'): nc = netCDF.Dataset(dst_file, 'a', format='NETCDF3_64BIT') #load var - cdf = netCDF.Dataset(src_file) + cdf = netCDF.Dataset(src_file) src_var = cdf.variables[src_varname] - tmp = cdf.variables['time'][:] - if len(tmp) > 1: - print 'error : multiple frames in input file' ; exit() - else: - time = tmp[0] + time = cdf.variables['time'][nframe] # to be in sync with physics, add +0.5 day #time = time + 0.5 # time will be given by physics anyway - time = 0. #get missing value spval = src_var._FillValue @@ -65,9 +61,9 @@ def remap_bio(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='./'): # NWGOA3 grid sub-sample if ndim == 3: - src_var = src_var[0,:, yrange[0]:yrange[1]+1, xrange[0]:xrange[1]+1] + src_var = src_var[nframe,:, yrange[0]:yrange[1]+1, xrange[0]:xrange[1]+1] elif ndim == 2: - src_var = src_var[0,yrange[0]:yrange[1]+1, xrange[0]:xrange[1]+1] + src_var = src_var[nframe,yrange[0]:yrange[1]+1, xrange[0]:xrange[1]+1] Bpos = 't' @@ -90,7 +86,7 @@ def remap_bio(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='./'): # create variable in file - print 'Creating variable', dst_varname + print('Creating variable', dst_varname) nc.createVariable(dst_varname, 'f8', dimensions, fill_value=spval) nc.variables[dst_varname].long_name = long_name nc.variables[dst_varname].units = units @@ -99,31 +95,31 @@ def remap_bio(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='./'): # remapping - print 'remapping', dst_varname, 'from', src_grd.name, \ - 'to', dst_grd.name + print('remapping', dst_varname, 'from', src_grd.name, \ + 'to', dst_grd.name) if ndim == 3: # flood the grid - print 'flood the grid' + print('flood the grid') src_varz = pyroms_toolbox.BGrid_GFDL.flood(src_var, src_grd, Bpos=Bpos, spval=spval, \ dmax=dmax, cdepth=cdepth, kk=kk) else: src_varz = src_var # horizontal interpolation using scrip weights - print 'horizontal interpolation using scrip weights' + print('horizontal interpolation using scrip weights') dst_varz = pyroms.remapping.remap(src_varz, wts_file, spval=spval) if ndim == 3: # vertical interpolation from standard z level to sigma - print 'vertical interpolation from standard z level to sigma' + print('vertical interpolation from standard z level to sigma') dst_var = pyroms.remapping.z2roms(dst_varz[::-1,:,:], dst_grdz, \ dst_grd, Cpos=Cpos, spval=spval, flood=False) else: dst_var = dst_varz # write data in destination file - print 'write data in destination file\n' + print('write data in destination file\n') nc.variables['ocean_time'][0] = time nc.variables[dst_varname][0] = dst_var diff --git a/examples/cobalt-preproc/Initial_bio/remap_bio_glodap.py b/examples/cobalt-preproc/Initial_bio/remap_bio_glodap.py index 3b8ef00..bb9242f 100644 --- a/examples/cobalt-preproc/Initial_bio/remap_bio_glodap.py +++ b/examples/cobalt-preproc/Initial_bio/remap_bio_glodap.py @@ -35,7 +35,7 @@ def remap_bio_glodap(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir= # create clim file dst_file = tracer + '.nc' dst_file = dst_dir + dst_grd.name + '_ic_bio_' + dst_file - print 'Creating clim file', dst_file + print('Creating clim file', dst_file) if os.path.exists(dst_file) is True: os.remove(dst_file) pyroms_toolbox.nc_create_roms_file(dst_file, dst_grd, nctime) @@ -100,7 +100,7 @@ def remap_bio_glodap(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir= # create variable in file - print 'Creating variable', dst_varname + print('Creating variable', dst_varname) nc.createVariable(dst_varname, 'f8', dimensions, fill_value=spval2) nc.variables[dst_varname].long_name = long_name nc.variables[dst_varname].units = units @@ -109,24 +109,24 @@ def remap_bio_glodap(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir= # remapping - print 'remapping', dst_varname, 'from', src_grd.name, \ - 'to', dst_grd.name + print('remapping', dst_varname, 'from', src_grd.name, \ + 'to', dst_grd.name) if ndim == 3: # flood the grid - print 'flood the grid' + print('flood the grid') src_varz = pyroms_toolbox.BGrid_GFDL.flood(src_var, src_grd, Bpos=Bpos, spval=spval, \ dmax=dmax, cdepth=cdepth, kk=kk) else: src_varz = src_var # horizontal interpolation using scrip weights - print 'horizontal interpolation using scrip weights' + print('horizontal interpolation using scrip weights') dst_varz = pyroms.remapping.remap(src_varz, wts_file, spval=spval) if ndim == 3: # vertical interpolation from standard z level to sigma - print 'vertical interpolation from standard z level to sigma' + print('vertical interpolation from standard z level to sigma') dst_var = pyroms.remapping.z2roms(dst_varz[::-1,:,:], dst_grdz, \ dst_grd, Cpos=Cpos, spval=spval, flood=False) else: @@ -139,7 +139,7 @@ def remap_bio_glodap(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir= dst_var[kz,:,:] = tmp.copy() # write data in destination file - print 'write data in destination file\n' + print('write data in destination file\n') nc.variables['ocean_time'][0] = time nc.variables[dst_varname][0] = dst_var diff --git a/examples/cobalt-preproc/Initial_bio/remap_bio_woa.py b/examples/cobalt-preproc/Initial_bio/remap_bio_woa.py index 08c4173..5744c66 100644 --- a/examples/cobalt-preproc/Initial_bio/remap_bio_woa.py +++ b/examples/cobalt-preproc/Initial_bio/remap_bio_woa.py @@ -17,7 +17,7 @@ class nctime(object): pass def remap_bio_woa(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='./'): - + # NWGOA3 grid sub-sample xrange=src_grd.xrange; yrange=src_grd.yrange @@ -38,7 +38,7 @@ def remap_bio_woa(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='./ # create clim file dst_file = tracer + '.nc' dst_file = dst_dir + dst_grd.name + '_ic_bio_' + dst_file - print 'Creating clim file', dst_file + print('Creating clim file', dst_file) if os.path.exists(dst_file) is True: os.remove(dst_file) pyroms_toolbox.nc_create_roms_file(dst_file, dst_grd, nctime) @@ -47,15 +47,11 @@ def remap_bio_woa(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='./ nc = netCDF.Dataset(dst_file, 'a', format='NETCDF3_64BIT') #load var - cdf = netCDF.Dataset(src_file) + cdf = netCDF.Dataset(src_file) src_var = cdf.variables[src_varname] tmp = cdf.variables['time'][nframe] - #if len(tmp) > 1: - # print 'error : multiple frames in input file' ; exit() - #else: - # time = tmp[0] # to be in sync with physics, add +0.5 day #time = time + 0.5 @@ -72,9 +68,9 @@ def remap_bio_woa(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='./ # NWGOA3 grid sub-sample if ndim == 3: - src_var = src_var[0,:, yrange[0]:yrange[1]+1, xrange[0]:xrange[1]+1] + src_var = src_var[nframe,:, yrange[0]:yrange[1]+1, xrange[0]:xrange[1]+1] elif ndim == 2: - src_var = src_var[0,yrange[0]:yrange[1]+1, xrange[0]:xrange[1]+1] + src_var = src_var[nframe,yrange[0]:yrange[1]+1, xrange[0]:xrange[1]+1] if tracer == 'no3': @@ -108,7 +104,7 @@ def remap_bio_woa(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='./ # create variable in file - print 'Creating variable', dst_varname + print('Creating variable', dst_varname) nc.createVariable(dst_varname, 'f8', dimensions, fill_value=spval2) nc.variables[dst_varname].long_name = long_name nc.variables[dst_varname].units = units @@ -117,24 +113,24 @@ def remap_bio_woa(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='./ # remapping - print 'remapping', dst_varname, 'from', src_grd.name, \ - 'to', dst_grd.name + print('remapping', dst_varname, 'from', src_grd.name, \ + 'to', dst_grd.name) if ndim == 3: # flood the grid - print 'flood the grid' + print('flood the grid') src_varz = pyroms_toolbox.BGrid_GFDL.flood(src_var, src_grd, Bpos=Bpos, spval=spval, \ dmax=dmax, cdepth=cdepth, kk=kk) else: src_varz = src_var # horizontal interpolation using scrip weights - print 'horizontal interpolation using scrip weights' + print('horizontal interpolation using scrip weights') dst_varz = pyroms.remapping.remap(src_varz, wts_file, spval=spval) if ndim == 3: # vertical interpolation from standard z level to sigma - print 'vertical interpolation from standard z level to sigma' + print('vertical interpolation from standard z level to sigma') dst_var = pyroms.remapping.z2roms(dst_varz[::-1,:,:], dst_grdz, \ dst_grd, Cpos=Cpos, spval=spval, flood=False) else: @@ -143,11 +139,11 @@ def remap_bio_woa(argdict, src_grd, dst_grd, dmax=0, cdepth=0, kk=0, dst_dir='./ if ndim == 3: for kz in np.arange(dst_grd.vgrid.N): tmp = dst_var[kz,:,:].copy() - tmp[np.where(dst_grd.hgrid.mask_rho == 0)] = spval2 + tmp[np.where(dst_grd.hgrid.mask_rho == 0)] = spval2 dst_var[kz,:,:] = tmp.copy() # write data in destination file - print 'write data in destination file\n' + print('write data in destination file\n') nc.variables['ocean_time'][0] = time nc.variables[dst_varname][0] = dst_var diff --git a/examples/cobalt-preproc/README b/examples/cobalt-preproc/README new file mode 100644 index 0000000..418b44a --- /dev/null +++ b/examples/cobalt-preproc/README @@ -0,0 +1,10 @@ +Most of these are suitable for a mid-latitude domain with xrange, +yrange. The Arctic had some issues with the tripole source grid, so I +hacked in a tripole option, but there are still remnant pole problems, +needing the fix_pole.py routine. It only matters for the initial +file - the Arctic directory is a copy of Initial_bio. + +The climatology we were given did not contain the coastal diatom +fields (medium phyto). I was told that to create appropriate files, +split the large phyto into large and medium phyto. The scripts in +the diatoms directory do just that. diff --git a/examples/cobalt-preproc/Runoff_bio/make_runoff_bio_file.py b/examples/cobalt-preproc/Runoff_bio/make_runoff_bio_file.py index 810027b..5ada844 100644 --- a/examples/cobalt-preproc/Runoff_bio/make_runoff_bio_file.py +++ b/examples/cobalt-preproc/Runoff_bio/make_runoff_bio_file.py @@ -1,7 +1,7 @@ import subprocess import os import sys -import commands +import subprocess import numpy as np from datetime import * @@ -29,53 +29,53 @@ tracer_longname = [] tracer_units = [] for kt in list_tracer: - tracer_longname.append(kt.lower() + ' river source') - tracer_units.append('mol.m-2.s-1') + tracer_longname.append(kt.lower() + ' river source') + tracer_units.append('mol.m-2.s-1') for ktr in np.arange(len(list_tracer)): - dst_varname = 'river_' + list_tracer[ktr].lower() - # create output file - dst_file = dst_dir + grd.name + '_' + dst_varname + '_runoff_bio_NEWS.nc' - fid_o = nc.Dataset(dst_file, 'w', format='NETCDF3_64BIT') - fid_o.Description = 'ROMS file' - fid_o.Author = 'pyroms_toolbox.nc_create_roms_file' - fid_o.Created = datetime.now().strftime("%Y-%m-%d %H:%M:%S") - fid_o.title = 'ROMS file' + dst_varname = 'river_' + list_tracer[ktr].lower() + # create output file + dst_file = dst_dir + grd.name + '_' + dst_varname + '_runoff_bio_NEWS.nc' + fid_o = nc.Dataset(dst_file, 'w', format='NETCDF3_64BIT') + fid_o.Description = 'ROMS file' + fid_o.Author = 'pyroms_toolbox.nc_create_roms_file' + fid_o.Created = datetime.now().strftime("%Y-%m-%d %H:%M:%S") + fid_o.title = 'ROMS file' + + fid_o.createDimension('xi_rho', np.size(grd.hgrid.mask_rho,1)) + fid_o.createDimension('xi_u', np.size(grd.hgrid.mask_u,1)) + fid_o.createDimension('xi_v', np.size(grd.hgrid.mask_v,1)) + fid_o.createDimension('xi_psi', np.size(grd.hgrid.mask_psi,1)) + fid_o.createDimension('eta_rho', np.size(grd.hgrid.mask_rho,0)) + fid_o.createDimension('eta_u', np.size(grd.hgrid.mask_u,0)) + fid_o.createDimension('eta_v', np.size(grd.hgrid.mask_v,0)) + fid_o.createDimension('eta_psi', np.size(grd.hgrid.mask_psi,0)) + fid_o.createDimension('s_rho', grd.vgrid.N) + fid_o.createDimension('s_w', grd.vgrid.Np) + fid_o.createDimension('runoff_time', None) + fid_o.createVariable('runoff_time', 'f8', ('runoff_time')) + fid_o.variables['runoff_time'].long_name = 'runoff_time' + fid_o.variables['runoff_time'].units = 'days since 1900-01-01 00:00:00' + fid_o.variables['runoff_time'][:] = nctime - fid_o.createDimension('xi_rho', np.size(grd.hgrid.mask_rho,1)) - fid_o.createDimension('xi_u', np.size(grd.hgrid.mask_u,1)) - fid_o.createDimension('xi_v', np.size(grd.hgrid.mask_v,1)) - fid_o.createDimension('xi_psi', np.size(grd.hgrid.mask_psi,1)) - fid_o.createDimension('eta_rho', np.size(grd.hgrid.mask_rho,0)) - fid_o.createDimension('eta_u', np.size(grd.hgrid.mask_u,0)) - fid_o.createDimension('eta_v', np.size(grd.hgrid.mask_v,0)) - fid_o.createDimension('eta_psi', np.size(grd.hgrid.mask_psi,0)) - fid_o.createDimension('s_rho', grd.vgrid.N) - fid_o.createDimension('s_w', grd.vgrid.Np) - fid_o.createDimension('runoff_time', None) - fid_o.createVariable('runoff_time', 'f8', ('runoff_time')) - fid_o.variables['runoff_time'].long_name = 'runoff_time' - fid_o.variables['runoff_time'].units = 'days since 1900-01-01 00:00:00' - fid_o.variables['runoff_time'][:] = nctime + spval = 1.0e+15 + dimensions = ('runoff_time', 'eta_rho', 'xi_rho') + long_name = tracer_longname[ktr] + field = dst_varname + ', scalar, series' + units = tracer_units[ktr] + fid_o.createVariable(dst_varname, 'f8', dimensions, fill_value=spval) + fid_o.variables[dst_varname].long_name = long_name + fid_o.variables[dst_varname].units = units + fid_o.variables[dst_varname].field = field + conc = fid_n.variables[list_tracer[ktr]][:] + for kt in np.arange(nt): + print('working on timestep', kt) + runoff = fid_r.variables['Runoff'][kt,:,:] + river_input = conc * runoff / 1000. + fid_o.variables[dst_varname][kt,:,:] = river_input - spval = 1.0e+15 - dimensions = ('runoff_time', 'eta_rho', 'xi_rho') - long_name = tracer_longname[ktr] - field = dst_varname + ', scalar, series' - units = tracer_units[ktr] - fid_o.createVariable(dst_varname, 'f8', dimensions, fill_value=spval) - fid_o.variables[dst_varname].long_name = long_name - fid_o.variables[dst_varname].units = units - fid_o.variables[dst_varname].field = field - conc = fid_n.variables[list_tracer[ktr]][:] - for kt in np.arange(nt): - print 'working on timestep', kt - runoff = fid_r.variables['Runoff'][kt,:,:] - river_input = conc * runoff / 1000. - fid_o.variables[dst_varname][kt,:,:] = river_input - - fid_o.close() + fid_o.close() fid_r.close() fid_n.close() diff --git a/examples/cobalt-preproc/diatoms/split_bdry_diatom.py b/examples/cobalt-preproc/diatoms/split_bdry_diatom.py new file mode 100644 index 0000000..56c96b5 --- /dev/null +++ b/examples/cobalt-preproc/diatoms/split_bdry_diatom.py @@ -0,0 +1,147 @@ +import numpy as np +import netCDF4 +import sys + +# This program splits the large phytoplankton into large and medium. +# True for nitrogen, iron, and silicon. +# Copy the original file before running this, then operate on the copy. +# Could probably do this with ncap2 instead. +# +ncfile = sys.argv[1] +nc = netCDF4.Dataset(ncfile, 'a', format='NETCDF3_CLASSIC') +spval = -10000000000. + +nlg = nc.variables['nlg_north'][:] +nlg *= 0.5 +nc.variables['nlg_north'][:] = nlg + +nc.createVariable('nmd_north', 'f8', ('ocean_time', 's_rho', 'xi_rho'), fill_value=spval) +nc.variables['nmd_north'].long_name = 'Medium Phytoplankton Nitrogen north boundary condition' +nc.variables['nmd_north'].units = 'mol/kg' +nc.variables['nmd_north'].time = 'ocean_time' +nc.variables['nmd_north'].field = 'nmd_north, scalar, series' +nc.variables['nmd_north'][:] = nlg + +felg = nc.variables['felg_north'][:] +felg *= 0.5 +nc.variables['felg_north'][:] = felg + +nc.createVariable('femd_north', 'f8', ('ocean_time', 's_rho', 'xi_rho'), fill_value=spval) +nc.variables['femd_north'].long_name = 'Medium Phytoplankton Iron north boundary condition' +nc.variables['femd_north'].units = 'mol/kg' +nc.variables['femd_north'].time = 'ocean_time' +nc.variables['femd_north'].field = 'femd_north, scalar, series' +nc.variables['femd_north'][:] = felg + +silg = nc.variables['silg_north'][:] +silg *= 0.5 +nc.variables['silg_north'][:] = silg + +nc.createVariable('simd_north', 'f8', ('ocean_time', 's_rho', 'xi_rho'), fill_value=spval) +nc.variables['simd_north'].long_name = 'Medium Phytoplankton Silicon north boundary condition' +nc.variables['simd_north'].units = 'mol/kg' +nc.variables['simd_north'].time = 'ocean_time' +nc.variables['simd_north'].field = 'simd_north, scalar, series' +nc.variables['simd_north'][:] = silg + + +nlg = nc.variables['nlg_south'][:] +nlg *= 0.5 +nc.variables['nlg_south'][:] = nlg + +nc.createVariable('nmd_south', 'f8', ('ocean_time', 's_rho', 'xi_rho'), fill_value=spval) +nc.variables['nmd_south'].long_name = 'Medium Phytoplankton Nitrogen south boundary condition' +nc.variables['nmd_south'].units = 'mol/kg' +nc.variables['nmd_south'].time = 'ocean_time' +nc.variables['nmd_south'].field = 'nmd_south, scalar, series' +nc.variables['nmd_south'][:] = nlg + +felg = nc.variables['felg_south'][:] +felg *= 0.5 +nc.variables['felg_south'][:] = felg + +nc.createVariable('femd_south', 'f8', ('ocean_time', 's_rho', 'xi_rho'), fill_value=spval) +nc.variables['femd_south'].long_name = 'Medium Phytoplankton Iron south boundary condition' +nc.variables['femd_south'].units = 'mol/kg' +nc.variables['femd_south'].time = 'ocean_time' +nc.variables['femd_south'].field = 'femd_south, scalar, series' +nc.variables['femd_south'][:] = felg + +silg = nc.variables['silg_south'][:] +silg *= 0.5 +nc.variables['silg_south'][:] = silg + +nc.createVariable('simd_south', 'f8', ('ocean_time', 's_rho', 'xi_rho'), fill_value=spval) +nc.variables['simd_south'].long_name = 'Medium Phytoplankton Silicon south boundary condition' +nc.variables['simd_south'].units = 'mol/kg' +nc.variables['simd_south'].time = 'ocean_time' +nc.variables['simd_south'].field = 'simd_south, scalar, series' +nc.variables['simd_south'][:] = silg + +nlg = nc.variables['nlg_west'][:] +nlg *= 0.5 +nc.variables['nlg_west'][:] = nlg + +nc.createVariable('nmd_west', 'f8', ('ocean_time', 's_rho', 'eta_rho'), fill_value=spval) +nc.variables['nmd_west'].long_name = 'Medium Phytoplankton Nitrogen west boundary condition' +nc.variables['nmd_west'].units = 'mol/kg' +nc.variables['nmd_west'].time = 'ocean_time' +nc.variables['nmd_west'].field = 'nmd_west, scalar, series' +nc.variables['nmd_west'][:] = nlg + +felg = nc.variables['felg_west'][:] +felg *= 0.5 +nc.variables['felg_west'][:] = felg + +nc.createVariable('femd_west', 'f8', ('ocean_time', 's_rho', 'eta_rho'), fill_value=spval) +nc.variables['femd_west'].long_name = 'Medium Phytoplankton Iron west boundary condition' +nc.variables['femd_west'].units = 'mol/kg' +nc.variables['femd_west'].time = 'ocean_time' +nc.variables['femd_west'].field = 'femd_west, scalar, series' +nc.variables['femd_west'][:] = felg + +silg = nc.variables['silg_west'][:] +silg *= 0.5 +nc.variables['silg_west'][:] = silg + +nc.createVariable('simd_west', 'f8', ('ocean_time', 's_rho', 'eta_rho'), fill_value=spval) +nc.variables['simd_west'].long_name = 'Medium Phytoplankton Silicon west boundary condition' +nc.variables['simd_west'].units = 'mol/kg' +nc.variables['simd_west'].time = 'ocean_time' +nc.variables['simd_west'].field = 'simd_west, scalar, series' +nc.variables['simd_west'][:] = silg + +nlg = nc.variables['nlg_east'][:] +nlg *= 0.5 +nc.variables['nlg_east'][:] = nlg + +nc.createVariable('nmd_east', 'f8', ('ocean_time', 's_rho', 'eta_rho'), fill_value=spval) +nc.variables['nmd_east'].long_name = 'Medium Phytoplankton Nitrogen east boundary condition' +nc.variables['nmd_east'].units = 'mol/kg' +nc.variables['nmd_east'].time = 'ocean_time' +nc.variables['nmd_east'].field = 'nmd_east, scalar, series' +nc.variables['nmd_east'][:] = nlg + +felg = nc.variables['felg_east'][:] +felg *= 0.5 +nc.variables['felg_east'][:] = felg + +nc.createVariable('femd_east', 'f8', ('ocean_time', 's_rho', 'eta_rho'), fill_value=spval) +nc.variables['femd_east'].long_name = 'Medium Phytoplankton Iron east boundary condition' +nc.variables['femd_east'].units = 'mol/kg' +nc.variables['femd_east'].time = 'ocean_time' +nc.variables['femd_east'].field = 'femd_east, scalar, series' +nc.variables['femd_east'][:] = felg + +silg = nc.variables['silg_east'][:] +silg *= 0.5 +nc.variables['silg_east'][:] = silg + +nc.createVariable('simd_east', 'f8', ('ocean_time', 's_rho', 'eta_rho'), fill_value=spval) +nc.variables['simd_east'].long_name = 'Medium Phytoplankton Silicon east boundary condition' +nc.variables['simd_east'].units = 'mol/kg' +nc.variables['simd_east'].time = 'ocean_time' +nc.variables['simd_east'].field = 'simd_east, scalar, series' +nc.variables['simd_east'][:] = silg + +nc.close() diff --git a/examples/cobalt-preproc/diatoms/split_diatom.py b/examples/cobalt-preproc/diatoms/split_diatom.py new file mode 100644 index 0000000..a1fd080 --- /dev/null +++ b/examples/cobalt-preproc/diatoms/split_diatom.py @@ -0,0 +1,73 @@ +import numpy as np +import netCDF4 +import sys + +# This program splits the large phytoplankton into large and medium. +# True for nitrogen, iron, and silicon. +# Copy the original file before running this, then operate on the copy. +# Could probably do this with ncap2 instead. +# +ncfile = sys.argv[1] +nc = netCDF4.Dataset(ncfile, 'a', format='NETCDF3_CLASSIC') +spval = 1.e+37 + +nlg = nc.variables['nlg'][:] +nlg *= 0.5 +nc.variables['nlg'][:] = nlg + +nc.createVariable('nmd', 'f8', ('ocean_time', 's_rho', 'eta_rho', 'xi_rho'), fill_value=spval) +#nc.createVariable('nmd', 'f8', ('ocean_time', 'two', 's_rho', 'eta_rho', 'xi_rho'), fill_value=spval) +nc.variables['nmd'].long_name = 'Medium Phytoplankton Nitrogen' +nc.variables['nmd'].units = 'mol/kg' +nc.variables['nmd'].time = 'ocean_time' +nc.variables['nmd'].field = 'nmd, scalar, series' +nc.variables['nmd'].grid = 'grid' +nc.variables['nmd'].location = 'face' +nc.variables['nmd'].coordinates = 'lon_rho lat_rho s_rho ocean_time' +nc.variables['nmd'][:] = nlg + +felg = nc.variables['felg'][:] +felg *= 0.5 +nc.variables['felg'][:] = felg + +nc.createVariable('femd', 'f8', ('ocean_time', 's_rho', 'eta_rho', 'xi_rho'), fill_value=spval) +#nc.createVariable('femd', 'f8', ('ocean_time', 'two', 's_rho', 'eta_rho', 'xi_rho'), fill_value=spval) +nc.variables['femd'].long_name = 'Medium Phytoplankton Iron' +nc.variables['femd'].units = 'mol/kg' +nc.variables['femd'].time = 'ocean_time' +nc.variables['femd'].field = 'femd, scalar, series' +nc.variables['femd'].grid = 'grid' +nc.variables['femd'].location = 'face' +nc.variables['femd'].coordinates = 'lon_rho lat_rho s_rho ocean_time' +nc.variables['femd'][:] = felg + +silg = nc.variables['silg'][:] +silg *= 0.5 +nc.variables['silg'][:] = silg + +nc.createVariable('simd', 'f8', ('ocean_time', 's_rho', 'eta_rho', 'xi_rho'), fill_value=spval) +#nc.createVariable('simd', 'f8', ('ocean_time', 'two', 's_rho', 'eta_rho', 'xi_rho'), fill_value=spval) +nc.variables['simd'].long_name = 'Medium Phytoplankton Silicon' +nc.variables['simd'].units = 'mol/kg' +nc.variables['simd'].time = 'ocean_time' +nc.variables['simd'].field = 'simd, scalar, series' +nc.variables['simd'].grid = 'grid' +nc.variables['simd'].location = 'face' +nc.variables['simd'].coordinates = 'lon_rho lat_rho s_rho ocean_time' +nc.variables['simd'][:] = silg + +mu_mem_lg = nc.variables['mu_mem_lg'][:] +mu_mem_lg *= 0.5 +nc.variables['mu_mem_lg'][:] = mu_mem_lg + +nc.createVariable('mu_mem_md', 'f8', ('two', 's_rho', 'eta_rho', 'xi_rho'), fill_value=spval) +nc.variables['mu_mem_md'].long_name = 'Medium Phytoplankton Silicon' +nc.variables['mu_mem_md'].units = 'mol/kg' +nc.variables['mu_mem_md'].time = 'ocean_time' +nc.variables['mu_mem_md'].field = 'mu_mem_md, scalar, series' +nc.variables['mu_mem_md'].grid = 'grid' +nc.variables['mu_mem_md'].location = 'face' +nc.variables['mu_mem_md'].coordinates = 'lon_rho lat_rho s_rho ocean_time' +nc.variables['mu_mem_md'][:] = mu_mem_lg + +nc.close() diff --git a/examples/cobalt-preproc/diatoms/split_restart_diatom.py b/examples/cobalt-preproc/diatoms/split_restart_diatom.py new file mode 100644 index 0000000..0874b02 --- /dev/null +++ b/examples/cobalt-preproc/diatoms/split_restart_diatom.py @@ -0,0 +1,73 @@ +import numpy as np +import netCDF4 +import sys + +# This program splits the large phytoplankton into large and medium. +# True for nitrogen, iron, and silicon. +# Copy the original file before running this, then operate on the copy. +# Could probably do this with ncap2 instead. +# +ncfile = sys.argv[1] +nc = netCDF4.Dataset(ncfile, 'a', format='NETCDF3_CLASSIC') +spval = 1.e+37 + +nlg = nc.variables['nlg'][:] +nlg *= 0.5 +nc.variables['nlg'][:] = nlg + +#nc.createVariable('nmd', 'f8', ('ocean_time', 'two', 's_rho', 'eta_rho', 'xi_rho'), fill_value=spval) +nc.createVariable('nmd', 'f8', ('ocean_time', 's_rho', 'eta_rho', 'xi_rho'), fill_value=spval) +nc.variables['nmd'].long_name = 'Medium Phytoplankton Nitrogen' +nc.variables['nmd'].units = 'mol/kg' +nc.variables['nmd'].time = 'ocean_time' +nc.variables['nmd'].field = 'nmd, scalar, series' +nc.variables['nmd'].grid = 'grid' +nc.variables['nmd'].location = 'face' +nc.variables['nmd'].coordinates = 'lon_rho lat_rho s_rho ocean_time' +nc.variables['nmd'][:] = nlg + +felg = nc.variables['felg'][:] +felg *= 0.5 +nc.variables['felg'][:] = felg + +#nc.createVariable('femd', 'f8', ('ocean_time', 'two', 's_rho', 'eta_rho', 'xi_rho'), fill_value=spval) +nc.createVariable('femd', 'f8', ('ocean_time', 's_rho', 'eta_rho', 'xi_rho'), fill_value=spval) +nc.variables['femd'].long_name = 'Medium Phytoplankton Iron' +nc.variables['femd'].units = 'mol/kg' +nc.variables['femd'].time = 'ocean_time' +nc.variables['femd'].field = 'femd, scalar, series' +nc.variables['femd'].grid = 'grid' +nc.variables['femd'].location = 'face' +nc.variables['femd'].coordinates = 'lon_rho lat_rho s_rho ocean_time' +nc.variables['femd'][:] = felg + +silg = nc.variables['silg'][:] +silg *= 0.5 +nc.variables['silg'][:] = silg + +#nc.createVariable('simd', 'f8', ('ocean_time', 'two', 's_rho', 'eta_rho', 'xi_rho'), fill_value=spval) +nc.createVariable('simd', 'f8', ('ocean_time', 's_rho', 'eta_rho', 'xi_rho'), fill_value=spval) +nc.variables['simd'].long_name = 'Medium Phytoplankton Silicon' +nc.variables['simd'].units = 'mol/kg' +nc.variables['simd'].time = 'ocean_time' +nc.variables['simd'].field = 'simd, scalar, series' +nc.variables['simd'].grid = 'grid' +nc.variables['simd'].location = 'face' +nc.variables['simd'].coordinates = 'lon_rho lat_rho s_rho ocean_time' +nc.variables['simd'][:] = silg + +mu_mem_lg = nc.variables['mu_mem_lg'][:] +mu_mem_lg *= 0.5 +nc.variables['mu_mem_lg'][:] = mu_mem_lg + +nc.createVariable('mu_mem_md', 'f8', ('two', 's_rho', 'eta_rho', 'xi_rho'), fill_value=spval) +nc.variables['mu_mem_md'].long_name = 'Medium Phytoplankton Silicon' +nc.variables['mu_mem_md'].units = 'mol/kg' +nc.variables['mu_mem_md'].time = 'ocean_time' +nc.variables['mu_mem_md'].field = 'mu_mem_md, scalar, series' +nc.variables['mu_mem_md'].grid = 'grid' +nc.variables['mu_mem_md'].location = 'face' +nc.variables['mu_mem_md'].coordinates = 'lon_rho lat_rho s_rho ocean_time' +nc.variables['mu_mem_md'][:] = mu_mem_lg + +nc.close() diff --git a/examples/cobalt-preproc/iron_coastal/create_zero_iron_coastal_file.py b/examples/cobalt-preproc/iron_coastal/create_zero_iron_coastal_file.py index 14fe0c8..8a3f333 100644 --- a/examples/cobalt-preproc/iron_coastal/create_zero_iron_coastal_file.py +++ b/examples/cobalt-preproc/iron_coastal/create_zero_iron_coastal_file.py @@ -8,114 +8,114 @@ class iron_coastal(): - def __init__(self,domain): - self.grd = pyroms.grid.get_ROMS_grid(domain) - self.fileout = 'iron_nofecoast_' + domain + '.nc' - self.ny, self.nx = self.grd.hgrid.lon_rho.shape - self.spval = 1.0e+15 - return None - - def __call__(self,mom_grid_file): - self.maskcoast = self.create_mask_coast_MOM(mom_grid_file) - if self.grd.name == 'NWGOA3': - self.ana_iron_ccs1() - else: - print 'domain not supported' ; pass - self.write_nc_file(self) - return None - - def create_mask_coast_MOM(self,mom_grid_file): - fidmask = nc.Dataset(mom_grid_file,'r') - kmt = fidmask.variables['kmt'][:].squeeze() - lon_mom = fidmask.variables['geolon_t'][:].squeeze() - lat_mom = fidmask.variables['geolat_t'][:].squeeze() - fidmask.close() - - if self.grd.name == 'NWGOA3': - lon_mom = lon_mom + 360. - - mask = np.zeros(kmt.shape) - mask[np.where(kmt.mask)] = 1 - #plt.figure() ; plt.pcolormesh(mask) ; plt.colorbar() - - mask_reduced = morph.binary_erosion(mask, structure=None, iterations=1, mask=None, output=None, border_value=0, origin=0, brute_force=False) - mask_extended = morph.binary_dilation(mask, structure=None, iterations=1, mask=None, output=None, border_value=0, origin=0, brute_force=False) - mask_coast = mask_extended - mask_reduced - #plt.figure() ; plt.pcolormesh(mask_coast) ; plt.colorbar() - - # interpolate mask to target grid - nymom, nxmom = lon_mom.shape - - sizetot_ini = nymom * nxmom - position_ini = np.zeros((sizetot_ini ,2)) - - a = lon_mom.flat - b = lat_mom.flat - - for k in np.arange(sizetot_ini): - position_ini[k,0] = a[k] - position_ini[k,1] = b[k] - - sizetot_final = self.grd.hgrid.lon_rho.shape[0]*self.grd.hgrid.lon_rho.shape[1] - position_final = np.zeros((sizetot_final ,2)) - - c = self.grd.hgrid.lon_rho.flat - d = self.grd.hgrid.lat_rho.flat - - for k in np.arange(sizetot_final): - position_final[k,0] = c[k] - position_final[k,1] = d[k] - - tmp = scipyint.griddata(position_ini,mask_coast.flat,position_final,method='nearest') - mask_coast_interp = np.reshape(tmp,self.grd.hgrid.lon_rho.shape) - - #plt.figure() ; plt.pcolormesh(mask_coast_interp) ; plt.colorbar() ; plt.show() - return mask_coast_interp - - def ana_iron_ccs1(self): - h = self.grd.vgrid.h - one_fe_coast = h.copy() - # set to constant value everywhere - one_fe_coast = 0.0e-11 # mol Fe m kg-1 s-1 - # mask the array - one_fe_coast = one_fe_coast * self.maskcoast * self.grd.hgrid.mask_rho - one_fe_coast[np.where( self.grd.hgrid.mask_rho == 0)] = self.spval - #plt.figure() ; plt.pcolormesh(one_fe_coast) ; plt.colorbar() ; plt.show() - # - self.fe_coast = np.zeros((12,self.ny,self.nx)) - for kt in np.arange(12): - self.fe_coast[kt,:,:] = one_fe_coast[:,:] - return None - - def write_nc_file(self,fileout): - fid = nc.Dataset(self.fileout, 'w', format='NETCDF3_CLASSIC') - fid.description = 'Iron coastal source file (raphael@esm.rutgers.edu)' - # dimensions - fid.createDimension('lat', self.ny) - fid.createDimension('lon', self.nx) - fid.createDimension('fecoast_time', None) - # variables - latitudes = fid.createVariable('lat', 'f4', ('lat','lon',)) - longitudes = fid.createVariable('lon', 'f4', ('lat','lon',)) - times = fid.createVariable('fecoast_time', 'f4', ('fecoast_time',)) - times.units = "days since 1900-01-01 00:00" - times.cycle_length = 365.25 - - variable = fid.createVariable('fecoast', 'f8', ('fecoast_time','lat','lon',),fill_value=self.spval) - variable.coordinates="lon lat fecoast_time" - variable.missing_value = 1e+15 - variable.time ="fecoast_time" - variable.units = "mol Fe m kg-1 s-1" - variable.long_name = "iron coastal source" - - # data - latitudes[:,:] = self.grd.hgrid.lat_rho - longitudes[:,:] = self.grd.hgrid.lon_rho - times[:] = [15.5, 45, 74.5, 105, 135.5, 166, 196.5, 227.5, 258, 288.5, 319, 349.5] - variable[:,:,:] = self.fe_coast - # close - fid.close() - return None + def __init__(self,domain): + self.grd = pyroms.grid.get_ROMS_grid(domain) + self.fileout = 'iron_nofecoast_' + domain + '.nc' + self.ny, self.nx = self.grd.hgrid.lon_rho.shape + self.spval = 1.0e+15 + return None + + def __call__(self,mom_grid_file): + self.maskcoast = self.create_mask_coast_MOM(mom_grid_file) + if self.grd.name == 'NWGOA3': + self.ana_iron_ccs1() + else: + print('domain not supported') ; pass + self.write_nc_file(self) + return None + + def create_mask_coast_MOM(self,mom_grid_file): + fidmask = nc.Dataset(mom_grid_file,'r') + kmt = fidmask.variables['kmt'][:].squeeze() + lon_mom = fidmask.variables['geolon_t'][:].squeeze() + lat_mom = fidmask.variables['geolat_t'][:].squeeze() + fidmask.close() + + if self.grd.name == 'NWGOA3': + lon_mom = lon_mom + 360. + + mask = np.zeros(kmt.shape) + mask[np.where(kmt.mask)] = 1 + #plt.figure() ; plt.pcolormesh(mask) ; plt.colorbar() + + mask_reduced = morph.binary_erosion(mask, structure=None, iterations=1, mask=None, output=None, border_value=0, origin=0, brute_force=False) + mask_extended = morph.binary_dilation(mask, structure=None, iterations=1, mask=None, output=None, border_value=0, origin=0, brute_force=False) + mask_coast = mask_extended - mask_reduced + #plt.figure() ; plt.pcolormesh(mask_coast) ; plt.colorbar() + + # interpolate mask to target grid + nymom, nxmom = lon_mom.shape + + sizetot_ini = nymom * nxmom + position_ini = np.zeros((sizetot_ini ,2)) + + a = lon_mom.flat + b = lat_mom.flat + + for k in np.arange(sizetot_ini): + position_ini[k,0] = a[k] + position_ini[k,1] = b[k] + + sizetot_final = self.grd.hgrid.lon_rho.shape[0]*self.grd.hgrid.lon_rho.shape[1] + position_final = np.zeros((sizetot_final ,2)) + + c = self.grd.hgrid.lon_rho.flat + d = self.grd.hgrid.lat_rho.flat + + for k in np.arange(sizetot_final): + position_final[k,0] = c[k] + position_final[k,1] = d[k] + + tmp = scipyint.griddata(position_ini,mask_coast.flat,position_final,method='nearest') + mask_coast_interp = np.reshape(tmp,self.grd.hgrid.lon_rho.shape) + + #plt.figure() ; plt.pcolormesh(mask_coast_interp) ; plt.colorbar() ; plt.show() + return mask_coast_interp + + def ana_iron_ccs1(self): + h = self.grd.vgrid.h + one_fe_coast = h.copy() + # set to constant value everywhere + one_fe_coast = 0.0e-11 # mol Fe m kg-1 s-1 + # mask the array + one_fe_coast = one_fe_coast * self.maskcoast * self.grd.hgrid.mask_rho + one_fe_coast[np.where( self.grd.hgrid.mask_rho == 0)] = self.spval + #plt.figure() ; plt.pcolormesh(one_fe_coast) ; plt.colorbar() ; plt.show() + # + self.fe_coast = np.zeros((12,self.ny,self.nx)) + for kt in np.arange(12): + self.fe_coast[kt,:,:] = one_fe_coast[:,:] + return None + + def write_nc_file(self,fileout): + fid = nc.Dataset(self.fileout, 'w', format='NETCDF3_CLASSIC') + fid.description = 'Iron coastal source file (raphael@esm.rutgers.edu)' + # dimensions + fid.createDimension('lat', self.ny) + fid.createDimension('lon', self.nx) + fid.createDimension('fecoast_time', None) + # variables + latitudes = fid.createVariable('lat', 'f4', ('lat','lon',)) + longitudes = fid.createVariable('lon', 'f4', ('lat','lon',)) + times = fid.createVariable('fecoast_time', 'f4', ('fecoast_time',)) + times.units = "days since 1900-01-01 00:00" + times.cycle_length = 365.25 + + variable = fid.createVariable('fecoast', 'f8', ('fecoast_time','lat','lon',),fill_value=self.spval) + variable.coordinates="lon lat fecoast_time" + variable.missing_value = 1e+15 + variable.time ="fecoast_time" + variable.units = "mol Fe m kg-1 s-1" + variable.long_name = "iron coastal source" + + # data + latitudes[:,:] = self.grd.hgrid.lat_rho + longitudes[:,:] = self.grd.hgrid.lon_rho + times[:] = [15.5, 45, 74.5, 105, 135.5, 166, 196.5, 227.5, 258, 288.5, 319, 349.5] + variable[:,:,:] = self.fe_coast + # close + fid.close() + return None #------------------------------------------------------------------------------- # iron coastal source for NWGOA3 diff --git a/examples/cobalt-preproc/iron_sediment/create_iron_sediment_file.py b/examples/cobalt-preproc/iron_sediment/create_iron_sediment_file.py index 0e141c5..c3cf0a2 100644 --- a/examples/cobalt-preproc/iron_sediment/create_iron_sediment_file.py +++ b/examples/cobalt-preproc/iron_sediment/create_iron_sediment_file.py @@ -4,71 +4,69 @@ class iron_sediment(): - def __init__(self,domain): - self.grd = pyroms.grid.get_ROMS_grid(domain) - self.fileout = 'iron_sediment_' + domain + '.nc' - self.ny, self.nx = self.grd.hgrid.lon_rho.shape - return None + def __init__(self,domain): + self.grd = pyroms.grid.get_ROMS_grid(domain) + self.fileout = 'iron_sediment_' + domain + '.nc' + self.ny, self.nx = self.grd.hgrid.lon_rho.shape + return None - def __call__(self): - if self.grd.name == 'NWGOA3': - self.ana_iron_ccs1() - else: - print 'domain not supported' ; pass - self.write_nc_file(self) - return None + def __call__(self): + if self.grd.name == 'NWGOA3': + self.ana_iron_ccs1() + else: + print('domain not supported') ; pass + self.write_nc_file(self) + return None - def ana_iron_ccs1(self): - h = self.grd.vgrid.h - iron_sed = h.copy() - # set to constant value everywhere - # jk moore et al., global BGC cycles, 2004 - # suggest a value of 5 micromol/m2/day for california shelf - iron_sed[:,:] = 0e-6 / 86400. # mol/m2/s -# iron_sed[:,:] = 5e-6 / 86400. # mol/m2/s - # set to zero deeper than cutoff depth - iron_sed[npy.where( h > 1100. ) ] = 0. - # mask the array - iron_sed = iron_sed * self.grd.hgrid.mask_rho - # - self.iron_flx = npy.zeros((12,self.ny,self.nx)) - print self.iron_flx.shape - for kt in npy.arange(12): - self.iron_flx[kt,:,:] = iron_sed[:,:] - return None + def ana_iron_ccs1(self): + h = self.grd.vgrid.h + iron_sed = h.copy() + # set to constant value everywhere + # jk moore et al., global BGC cycles, 2004 + # suggest a value of 5 micromol/m2/day for california shelf + iron_sed[:,:] = 0e-6 / 86400. # mol/m2/s +# iron_sed[:,:] = 5e-6 / 86400. # mol/m2/s + # set to zero deeper than cutoff depth + iron_sed[npy.where( h > 1100. ) ] = 0. + # mask the array + iron_sed = iron_sed * self.grd.hgrid.mask_rho + # + self.iron_flx = npy.zeros((12,self.ny,self.nx)) + print(self.iron_flx.shape) + for kt in npy.arange(12): + self.iron_flx[kt,:,:] = iron_sed[:,:] + return None - def write_nc_file(self,fileout): - fid = nc.Dataset(self.fileout, 'w', format='NETCDF3_CLASSIC') - fid.description = 'Iron flux from sediment file (raphael@esm.rutgers.edu)' - # dimensions - fid.createDimension('lat', self.ny) - fid.createDimension('lon', self.nx) - fid.createDimension('ironsed_time', None) - # variables - latitudes = fid.createVariable('lat', 'f4', ('lat','lon',)) - longitudes = fid.createVariable('lon', 'f4', ('lat','lon',)) - times = fid.createVariable('ironsed_time', 'f4', ('ironsed_time',)) - times.units = "days since 1900-01-01 00:00" - times.cycle_length = 365.25 - - variable = fid.createVariable('ironsed', 'f8', ('ironsed_time','lat','lon',)) - variable.coordinates="lon lat ironsed" - variable.missing_value = 1e+15 - variable.time ="ironsed_time" - variable.units = "mol/m2/s" - variable.long_name = "iron flux from sediments" - - # data - latitudes[:,:] = self.grd.hgrid.lat_rho - longitudes[:,:] = self.grd.hgrid.lon_rho - times[:] = [15.5, 45, 74.5, 105, 135.5, 166, 196.5, 227.5, 258, 288.5, 319, 349.5] - variable[:,:,:] = self.iron_flx - # close - fid.close() - return None + def write_nc_file(self,fileout): + fid = nc.Dataset(self.fileout, 'w', format='NETCDF3_CLASSIC') + fid.description = 'Iron flux from sediment file (raphael@esm.rutgers.edu)' + # dimensions + fid.createDimension('lat', self.ny) + fid.createDimension('lon', self.nx) + fid.createDimension('ironsed_time', None) + # variables + latitudes = fid.createVariable('lat', 'f4', ('lat','lon',)) + longitudes = fid.createVariable('lon', 'f4', ('lat','lon',)) + times = fid.createVariable('ironsed_time', 'f4', ('ironsed_time',)) + times.units = "days since 1900-01-01 00:00" + times.cycle_length = 365.25 + variable = fid.createVariable('ironsed', 'f8', ('ironsed_time','lat','lon',)) + variable.coordinates="lon lat ironsed" + variable.missing_value = 1e+15 + variable.time ="ironsed_time" + variable.units = "mol/m2/s" + variable.long_name = "iron flux from sediments" + # data + latitudes[:,:] = self.grd.hgrid.lat_rho + longitudes[:,:] = self.grd.hgrid.lon_rho + times[:] = [15.5, 45, 74.5, 105, 135.5, 166, 196.5, 227.5, 258, 288.5, 319, 349.5] + variable[:,:,:] = self.iron_flx + # close + fid.close() + return None #------------------------------------------------------------------------------- diff --git a/examples/cobalt-preproc/nudging_coef/lib_nudgcoef.py b/examples/cobalt-preproc/nudging_coef/lib_nudgcoef.py index 85f2c2e..202126e 100644 --- a/examples/cobalt-preproc/nudging_coef/lib_nudgcoef.py +++ b/examples/cobalt-preproc/nudging_coef/lib_nudgcoef.py @@ -3,181 +3,181 @@ import pyroms class nudgcoef(): - ''' A class to write the Nudging coeficient file for ROMS ''' - - def __init__(self,roms_grid): - ''' init an object of the class with the pyroms grid ID ''' - self.grd = pyroms.grid.get_ROMS_grid(roms_grid) - return None - - def __call__(self,east_dict,west_dict,north_dict,south_dict,tracer_timescales,foutname='./nudging_coef.nc'): - ''' call with following dictionaries : - 4 boundaries dict + tracer timescales - for example : - east_dict = {'nudge':True,'factor': 1,'width':50,'transition':'linear'} - west_dict = {'nudge':True,'factor': 1,'width':50,'transition':'linear'} - north_dict = {'nudge':True,'factor': 1,'width':50,'transition':'linear'} - south_dict = {'nudge':True,'factor': 1,'width':50,'transition':'linear'} - tracer_timescales = {'M2':30,'M3':30,'temp':30,'salt':30,'tracer':30} - tips: - * nudge = True if open boundary, False otherwise - * factor allows to have different timescales at each boundary - * width is in grid points - * transition shapes how timescale varies spatially - * tracer timescales are in days - ''' - self.east_dict = east_dict - self.west_dict = west_dict - self.north_dict = north_dict - self.south_dict = south_dict - self.tra_ts = tracer_timescales - self.foutname = foutname - # create 2d coef - self.nud2 = self._create_nudgcoef_2d() - # create 3d coef - self.nud3 = self._create_nudgcoef_3d() - # write to netcdf - self._write_nc_file() - return None - - def _create_nudgcoef_3d(self): - ''' expand 2d coef along the vertical ''' - # RD: later we could imagine multiplying by - # a vertical profile if needed - ny, nx = self.grd.hgrid.mask_rho.shape - nz = self.grd.vgrid.N - nudgcoef = npy.zeros((nz,ny,nx)) - for kz in npy.arange(nz): - nudgcoef[kz,:,:] = self.nud2[:,:] - return nudgcoef - - def _create_nudgcoef_2d(self): - ''' create the 2d nudging coef from dictionaries ''' - ny, nx = self.grd.hgrid.mask_rho.shape - nudgcoef_west = npy.zeros((ny,nx)) - nudgcoef_east = npy.zeros((ny,nx)) - nudgcoef_north = npy.zeros((ny,nx)) - nudgcoef_south = npy.zeros((ny,nx)) - nudgcoef = npy.zeros((ny,nx)) - mask = self.grd.hgrid.mask_rho - # west boundary - if self.west_dict['nudge'] is True: - fc = self.west_dict['factor'] - wd = self.west_dict['width'] - tr = self.west_dict['transition'] - if tr == 'linear': - for ji in npy.arange(0,wd): - nudgcoef_west[:,ji] = fc * (wd-ji) / float(wd) - elif tr == 'linear_nocoast': - for ji in npy.arange(0,wd): - nudgcoef_west[:,ji] = mask[:,0] * fc * (wd-ji) / float(wd) - else: - print 'transition not coded' ; pass - # east boundary - if self.east_dict['nudge'] is True: - fc = self.east_dict['factor'] - wd = self.east_dict['width'] - tr = self.east_dict['transition'] - if tr == 'linear': - for ji in npy.arange(nx-wd,nx): - nudgcoef_east[:,ji] = fc * (wd-nx+ji) / float(wd) - elif tr == 'linear_nocoast': - for ji in npy.arange(nx-wd,nx): - nudgcoef_east[:,ji] = mask[:,-1] * fc * (wd-nx+ji) / float(wd) - else: - print 'transition not coded' ; pass - # south boundary - if self.south_dict['nudge'] is True: - fc = self.south_dict['factor'] - wd = self.south_dict['width'] - tr = self.south_dict['transition'] - if tr == 'linear': - for jj in npy.arange(0,wd): - nudgcoef_south[jj,:] = fc * (wd-jj) / float(wd) - if tr == 'linear_nocoast': - for jj in npy.arange(0,wd): - nudgcoef_south[jj,:] = mask[0,:] * fc * (wd-jj) / float(wd) - else: - print 'transition not coded' ; pass - # north boundary - if self.north_dict['nudge'] is True: - fc = self.north_dict['factor'] - wd = self.north_dict['width'] - tr = self.north_dict['transition'] - if tr == 'linear': - for jj in npy.arange(ny-wd,ny): - nudgcoef_south[jj,:] = fc * (wd-ny+jj) / float(wd) - if tr == 'linear_nocoast': - for jj in npy.arange(ny-wd,ny): - nudgcoef_south[jj,:] = mask[-1,:] * fc * (wd-ny+jj) / float(wd) - else: - print 'transition not coded' ; pass - - - # create the total coefficient by combining all 4 fields - # the max functions is useful to make nice corners when - # individual field overlap - # maybe not the most efficient but short and readable - for jj in npy.arange(ny): - for ji in npy.arange(nx): - nudgcoef[jj,ji] = max(nudgcoef_west[jj,ji], \ - nudgcoef_east[jj,ji],nudgcoef_north[jj,ji],nudgcoef_south[jj,ji]) - return nudgcoef - - def _write_nc_file(self): - ''' writing to netcdf and multiplying by inverse timescales ''' - ncfile = self.foutname - fid = nc.Dataset(ncfile, 'w', format='NETCDF3_CLASSIC') - - # dimensions - fid.createDimension('xi_rho', npy.size(self.grd.hgrid.mask_rho,1)) - fid.createDimension('eta_rho', npy.size(self.grd.hgrid.mask_rho,0)) - fid.createDimension('s_rho', self.grd.vgrid.N) - fid.createDimension('s_w', self.grd.vgrid.Np) - fid.description = 'Nudging coefficients for grid' + self.grd.name - # vertical coordinate - fid.createVariable('s_rho', 'f8', ('s_rho')) - fid.variables['s_rho'].long_name = 'S-coordinate at RHO-points' - fid.variables['s_rho'].valid_min = '-1' - fid.variables['s_rho'].valid_max = '0' - fid.variables['s_rho'].field = 's_rho,scalar' - fid.variables['s_rho'][:] = self.grd.vgrid.s_rho - - # variables - O_M2_NudgeCoef = fid.createVariable('M2_NudgeCoef', 'f8', ('eta_rho','xi_rho',)) - O_M3_NudgeCoef = fid.createVariable('M3_NudgeCoef', 'f8', ('s_rho','eta_rho','xi_rho',)) - O_temp_NudgeCoef = fid.createVariable('temp_NudgeCoef', 'f8', ('s_rho','eta_rho','xi_rho',)) - O_salt_NudgeCoef = fid.createVariable('salt_NudgeCoef', 'f8', ('s_rho','eta_rho','xi_rho',)) - O_tracer_NudgeCoef = fid.createVariable('tracer_NudgeCoef', 'f8', ('s_rho','eta_rho','xi_rho',)) - # data - O_M2_NudgeCoef[:,:] = (1./self.tra_ts['M2']) * self.nud2 - O_M3_NudgeCoef[:,:,:] = (1./self.tra_ts['M3']) * self.nud3 - O_temp_NudgeCoef[:,:,:] = (1./self.tra_ts['temp']) * self.nud3 - O_salt_NudgeCoef[:,:,:] = (1./self.tra_ts['salt']) * self.nud3 - O_tracer_NudgeCoef[:,:,:] = (1./self.tra_ts['tracer']) * self.nud3 - # attributes - O_M2_NudgeCoef.long_name = '2D momentum inverse nudging coefficients' - O_M2_NudgeCoef.units = 'days-1' - O_M2_NudgeCoef.coordinates = 'xi_rho eta_rho' - - O_M3_NudgeCoef.long_name = '3D momentum inverse nudging coefficients' - O_M3_NudgeCoef.units = 'days-1' - O_M3_NudgeCoef.coordinates = 'xi_rho eta_rho s_rho' - - O_temp_NudgeCoef.long_name = 'temp inverse nudging coefficients' - O_temp_NudgeCoef.units = 'days-1' - O_temp_NudgeCoef.coordinates = 'xi_rho eta_rho s_rho' - - O_salt_NudgeCoef.long_name = 'salt inverse nudging coefficients' - O_salt_NudgeCoef.units = 'days-1' - O_salt_NudgeCoef.coordinates = 'xi_rho eta_rho s_rho' - - O_tracer_NudgeCoef.long_name = 'generic tracer inverse nudging coefficients' - O_tracer_NudgeCoef.units = 'days-1' - O_tracer_NudgeCoef.coordinates = 'xi_rho eta_rho s_rho' - # close - fid.close() - return None + ''' A class to write the Nudging coeficient file for ROMS ''' + + def __init__(self,roms_grid): + ''' init an object of the class with the pyroms grid ID ''' + self.grd = pyroms.grid.get_ROMS_grid(roms_grid) + return None + + def __call__(self,east_dict,west_dict,north_dict,south_dict,tracer_timescales,foutname='./nudging_coef.nc'): + ''' call with following dictionaries : + 4 boundaries dict + tracer timescales + for example : + east_dict = {'nudge':True,'factor': 1,'width':50,'transition':'linear'} + west_dict = {'nudge':True,'factor': 1,'width':50,'transition':'linear'} + north_dict = {'nudge':True,'factor': 1,'width':50,'transition':'linear'} + south_dict = {'nudge':True,'factor': 1,'width':50,'transition':'linear'} + tracer_timescales = {'M2':30,'M3':30,'temp':30,'salt':30,'tracer':30} + tips: + * nudge = True if open boundary, False otherwise + * factor allows to have different timescales at each boundary + * width is in grid points + * transition shapes how timescale varies spatially + * tracer timescales are in days + ''' + self.east_dict = east_dict + self.west_dict = west_dict + self.north_dict = north_dict + self.south_dict = south_dict + self.tra_ts = tracer_timescales + self.foutname = foutname + # create 2d coef + self.nud2 = self._create_nudgcoef_2d() + # create 3d coef + self.nud3 = self._create_nudgcoef_3d() + # write to netcdf + self._write_nc_file() + return None + + def _create_nudgcoef_3d(self): + ''' expand 2d coef along the vertical ''' + # RD: later we could imagine multiplying by + # a vertical profile if needed + ny, nx = self.grd.hgrid.mask_rho.shape + nz = self.grd.vgrid.N + nudgcoef = npy.zeros((nz,ny,nx)) + for kz in npy.arange(nz): + nudgcoef[kz,:,:] = self.nud2[:,:] + return nudgcoef + + def _create_nudgcoef_2d(self): + ''' create the 2d nudging coef from dictionaries ''' + ny, nx = self.grd.hgrid.mask_rho.shape + nudgcoef_west = npy.zeros((ny,nx)) + nudgcoef_east = npy.zeros((ny,nx)) + nudgcoef_north = npy.zeros((ny,nx)) + nudgcoef_south = npy.zeros((ny,nx)) + nudgcoef = npy.zeros((ny,nx)) + mask = self.grd.hgrid.mask_rho + # west boundary + if self.west_dict['nudge'] is True: + fc = self.west_dict['factor'] + wd = self.west_dict['width'] + tr = self.west_dict['transition'] + if tr == 'linear': + for ji in npy.arange(0,wd): + nudgcoef_west[:,ji] = fc * (wd-ji) / float(wd) + elif tr == 'linear_nocoast': + for ji in npy.arange(0,wd): + nudgcoef_west[:,ji] = mask[:,0] * fc * (wd-ji) / float(wd) + else: + print('transition not coded') ; pass + # east boundary + if self.east_dict['nudge'] is True: + fc = self.east_dict['factor'] + wd = self.east_dict['width'] + tr = self.east_dict['transition'] + if tr == 'linear': + for ji in npy.arange(nx-wd,nx): + nudgcoef_east[:,ji] = fc * (wd-nx+ji) / float(wd) + elif tr == 'linear_nocoast': + for ji in npy.arange(nx-wd,nx): + nudgcoef_east[:,ji] = mask[:,-1] * fc * (wd-nx+ji) / float(wd) + else: + print('transition not coded') ; pass + # south boundary + if self.south_dict['nudge'] is True: + fc = self.south_dict['factor'] + wd = self.south_dict['width'] + tr = self.south_dict['transition'] + if tr == 'linear': + for jj in npy.arange(0,wd): + nudgcoef_south[jj,:] = fc * (wd-jj) / float(wd) + if tr == 'linear_nocoast': + for jj in npy.arange(0,wd): + nudgcoef_south[jj,:] = mask[0,:] * fc * (wd-jj) / float(wd) + else: + print('transition not coded') ; pass + # north boundary + if self.north_dict['nudge'] is True: + fc = self.north_dict['factor'] + wd = self.north_dict['width'] + tr = self.north_dict['transition'] + if tr == 'linear': + for jj in npy.arange(ny-wd,ny): + nudgcoef_south[jj,:] = fc * (wd-ny+jj) / float(wd) + if tr == 'linear_nocoast': + for jj in npy.arange(ny-wd,ny): + nudgcoef_south[jj,:] = mask[-1,:] * fc * (wd-ny+jj) / float(wd) + else: + print('transition not coded') ; pass + + + # create the total coefficient by combining all 4 fields + # the max functions is useful to make nice corners when + # individual field overlap + # maybe not the most efficient but short and readable + for jj in npy.arange(ny): + for ji in npy.arange(nx): + nudgcoef[jj,ji] = max(nudgcoef_west[jj,ji], \ + nudgcoef_east[jj,ji],nudgcoef_north[jj,ji],nudgcoef_south[jj,ji]) + return nudgcoef + + def _write_nc_file(self): + ''' writing to netcdf and multiplying by inverse timescales ''' + ncfile = self.foutname + fid = nc.Dataset(ncfile, 'w', format='NETCDF3_CLASSIC') + + # dimensions + fid.createDimension('xi_rho', npy.size(self.grd.hgrid.mask_rho,1)) + fid.createDimension('eta_rho', npy.size(self.grd.hgrid.mask_rho,0)) + fid.createDimension('s_rho', self.grd.vgrid.N) + fid.createDimension('s_w', self.grd.vgrid.Np) + fid.description = 'Nudging coefficients for grid' + self.grd.name + # vertical coordinate + fid.createVariable('s_rho', 'f8', ('s_rho')) + fid.variables['s_rho'].long_name = 'S-coordinate at RHO-points' + fid.variables['s_rho'].valid_min = '-1' + fid.variables['s_rho'].valid_max = '0' + fid.variables['s_rho'].field = 's_rho,scalar' + fid.variables['s_rho'][:] = self.grd.vgrid.s_rho + + # variables + O_M2_NudgeCoef = fid.createVariable('M2_NudgeCoef', 'f8', ('eta_rho','xi_rho',)) + O_M3_NudgeCoef = fid.createVariable('M3_NudgeCoef', 'f8', ('s_rho','eta_rho','xi_rho',)) + O_temp_NudgeCoef = fid.createVariable('temp_NudgeCoef', 'f8', ('s_rho','eta_rho','xi_rho',)) + O_salt_NudgeCoef = fid.createVariable('salt_NudgeCoef', 'f8', ('s_rho','eta_rho','xi_rho',)) + O_tracer_NudgeCoef = fid.createVariable('tracer_NudgeCoef', 'f8', ('s_rho','eta_rho','xi_rho',)) + # data + O_M2_NudgeCoef[:,:] = (1./self.tra_ts['M2']) * self.nud2 + O_M3_NudgeCoef[:,:,:] = (1./self.tra_ts['M3']) * self.nud3 + O_temp_NudgeCoef[:,:,:] = (1./self.tra_ts['temp']) * self.nud3 + O_salt_NudgeCoef[:,:,:] = (1./self.tra_ts['salt']) * self.nud3 + O_tracer_NudgeCoef[:,:,:] = (1./self.tra_ts['tracer']) * self.nud3 + # attributes + O_M2_NudgeCoef.long_name = '2D momentum inverse nudging coefficients' + O_M2_NudgeCoef.units = 'days-1' + O_M2_NudgeCoef.coordinates = 'xi_rho eta_rho' + + O_M3_NudgeCoef.long_name = '3D momentum inverse nudging coefficients' + O_M3_NudgeCoef.units = 'days-1' + O_M3_NudgeCoef.coordinates = 'xi_rho eta_rho s_rho' + + O_temp_NudgeCoef.long_name = 'temp inverse nudging coefficients' + O_temp_NudgeCoef.units = 'days-1' + O_temp_NudgeCoef.coordinates = 'xi_rho eta_rho s_rho' + + O_salt_NudgeCoef.long_name = 'salt inverse nudging coefficients' + O_salt_NudgeCoef.units = 'days-1' + O_salt_NudgeCoef.coordinates = 'xi_rho eta_rho s_rho' + + O_tracer_NudgeCoef.long_name = 'generic tracer inverse nudging coefficients' + O_tracer_NudgeCoef.units = 'days-1' + O_tracer_NudgeCoef.coordinates = 'xi_rho eta_rho s_rho' + # close + fid.close() + return None #---------------------------------------------------------------------------- diff --git a/examples/make_tide/CGrid_TPXO8/__init__.py b/examples/make_tide/CGrid_TPXO8/__init__.py index 6501d0e..2de9184 100644 --- a/examples/make_tide/CGrid_TPXO8/__init__.py +++ b/examples/make_tide/CGrid_TPXO8/__init__.py @@ -2,9 +2,9 @@ TPXO8 module """ -from CGrid_TPXO8 import CGrid_TPXO8 -from get_nc_CGrid_TPXO8 import get_nc_CGrid_TPXO8 -from make_remap_grid_file import make_remap_grid_file -from flood import flood -from remap import remap -from tidal_ellipse import * +from .CGrid_TPXO8 import CGrid_TPXO8 +from .get_nc_CGrid_TPXO8 import get_nc_CGrid_TPXO8 +from .make_remap_grid_file import make_remap_grid_file +from .flood import flood +from .remap import remap +from .tidal_ellipse import * diff --git a/examples/make_tide/CGrid_TPXO8/get_nc_CGrid_TPXO8.py b/examples/make_tide/CGrid_TPXO8/get_nc_CGrid_TPXO8.py index 9300b3d..53dc4cb 100644 --- a/examples/make_tide/CGrid_TPXO8/get_nc_CGrid_TPXO8.py +++ b/examples/make_tide/CGrid_TPXO8/get_nc_CGrid_TPXO8.py @@ -1,6 +1,6 @@ import numpy as np import pyroms -from CGrid_TPXO8 import CGrid_TPXO8 +from .CGrid_TPXO8 import CGrid_TPXO8 def get_nc_CGrid_TPXO8(grdfile, name='TPXO8', \ diff --git a/examples/make_tide/CGrid_TPXO8/remap.py b/examples/make_tide/CGrid_TPXO8/remap.py index abc1c37..c8ccbf4 100644 --- a/examples/make_tide/CGrid_TPXO8/remap.py +++ b/examples/make_tide/CGrid_TPXO8/remap.py @@ -23,7 +23,7 @@ def remap(src_varname, src_file, src_grd, dst_grd, dst_file, dmax=0, cdepth=0, k nctime.units = 'days since 1900-01-01 00:00:00' # create tide file - print '\nCreating tide file', dst_file + print('\nCreating tide file', dst_file) if os.path.exists(dst_file) is True: os.remove(dst_file) pyroms_toolbox.nc_create_roms_file(dst_file, dst_grd, nctime) @@ -96,10 +96,10 @@ def remap(src_varname, src_file, src_grd, dst_grd, dst_file, dmax=0, cdepth=0, k field = 'y-velocity, scalar, series' else: - raise ValueError, 'Undefined src_varname' + raise ValueError('Undefined src_varname') # create variable in file - print 'Creating variable', dst_varname + print('Creating variable', dst_varname) nc.createVariable(dst_varname, 'f8', dimensions, fill_value=spval) # nc.createVariable(dst_varname, 'f8', dimensions) nc.variables[dst_varname].long_name = long_name @@ -107,15 +107,15 @@ def remap(src_varname, src_file, src_grd, dst_grd, dst_file, dmax=0, cdepth=0, k nc.variables[dst_varname].field = field # remapping - print 'remapping', dst_varname, 'from', src_grd.name, \ - 'to', dst_grd.name + print('remapping', dst_varname, 'from', src_grd.name, \ + 'to', dst_grd.name) # horizontal interpolation using scrip weights - print 'horizontal interpolation using scrip weights' + print('horizontal interpolation using scrip weights') dst_var = pyroms.remapping.remap(src_var, wts_file, spval=spval) # write data in destination file - print 'write data in destination file\n' + print('write data in destination file\n') nc.variables[dst_varname][:] = dst_var # close file diff --git a/examples/make_tide/CGrid_TPXO8/tidal_ellipse.py b/examples/make_tide/CGrid_TPXO8/tidal_ellipse.py index 86ab81f..b99092a 100644 --- a/examples/make_tide/CGrid_TPXO8/tidal_ellipse.py +++ b/examples/make_tide/CGrid_TPXO8/tidal_ellipse.py @@ -603,11 +603,11 @@ def prep_plot(SEMA, ECC, INC, PHA): rAu, rPhi_u, rAv, rPhi_v, rw = ep2ap(SEMA, ECC, INC, PHA, [2, 3, 1]) # Check if ep2ap has recovered Au, Phi_u, Av, Phi_v - print(np.max(np.abs(rAu - Au).flatten())) # = 9.9920e-16, = 2.22044604925e-16 - print(np.max(np.abs(rAv - Av).flatten())) # = 6.6613e-16, = 7.77156117238e-16 - print(np.max(np.abs(rPhi_u - Phi_u).flatten())) # = 4.4764e-13, = 1.70530256582e-13 - print(np.max(np.abs(rPhi_v - Phi_v).flatten())) # = 1.1369e-13, = 2.27373675443e-13 - print(np.max(np.max(np.abs(w - rw).flatten()))) # = 1.3710e-15, = 1.1322097734e-15 + print((np.max(np.abs(rAu - Au).flatten()))) # = 9.9920e-16, = 2.22044604925e-16 + print((np.max(np.abs(rAv - Av).flatten()))) # = 6.6613e-16, = 7.77156117238e-16 + print((np.max(np.abs(rPhi_u - Phi_u).flatten()))) # = 4.4764e-13, = 1.70530256582e-13 + print((np.max(np.abs(rPhi_v - Phi_v).flatten()))) # = 1.1369e-13, = 2.27373675443e-13 + print((np.max(np.max(np.abs(w - rw).flatten())))) # = 1.3710e-15, = 1.1322097734e-15 # For the random realization I (Zhigang Xu) had, the differences are listed # on the right hand of the above column. I (Pierre Cazenave) got the second # column with the Python version. What are yours? diff --git a/examples/make_tide/make_remap_weights_file.py b/examples/make_tide/make_remap_weights_file.py old mode 100755 new mode 100644 diff --git a/examples/rivers/add_remap.py b/examples/rivers/add_remap.py index 4d7dea1..aa05c44 100644 --- a/examples/rivers/add_remap.py +++ b/examples/rivers/add_remap.py @@ -12,32 +12,32 @@ def add_to_lists(pairs, i, j, sign, dir): for it in range(1,len(pairs)): x2, y2 = pairs[it] - if x2 > x1: - # negative v-velocity - i.append(x1) - j.append(y1) - sign.append(-1) - dir.append(1) - elif x1 > x2: - # positive v-velocity - i.append(x2) - j.append(y1) - sign.append(1) - dir.append(1) - elif y2 > y1: - # positive u-velocity - i.append(x1) - j.append(y1) - sign.append(1) - dir.append(0) - elif y1 > y2: - # negative u-velocity - i.append(x1) - j.append(y2) - sign.append(-1) - dir.append(0) - x1 = x2 - y1 = y2 + if x2 > x1: + # negative v-velocity + i.append(x1) + j.append(y1) + sign.append(-1) + dir.append(1) + elif x1 > x2: + # positive v-velocity + i.append(x2) + j.append(y1) + sign.append(1) + dir.append(1) + elif y2 > y1: + # positive u-velocity + i.append(x1) + j.append(y1) + sign.append(1) + dir.append(0) + elif y1 > y2: + # negative u-velocity + i.append(x1) + j.append(y2) + sign.append(-1) + dir.append(0) + x1 = x2 + y1 = y2 #outfile = sys.argv[1] outfile = 'remap_grid_CI_rivers.nc' @@ -65,15 +65,15 @@ def add_to_lists(pairs, i, j, sign, dir): for line in f: a, b, c = re.split('\s+', line) if a=='-10': - # wrap up object - add_to_lists(pairs, i, j, sign, dir) + # wrap up object + add_to_lists(pairs, i, j, sign, dir) elif (a=='-1' or a=='-3'): - # wrap up object - add_to_lists(pairs, i, j, sign, dir) - # start new object + # wrap up object + add_to_lists(pairs, i, j, sign, dir) + # start new object pairs = [] else: - pairs.append([int(a),int(b)]) + pairs.append([int(a),int(b)]) # set up grid coords grd = pyroms.grid.get_ROMS_grid('COOK_INLET_LYON') diff --git a/examples/rivers/add_rivers.py b/examples/rivers/add_rivers.py index d206b2f..fa9f6f5 100644 --- a/examples/rivers/add_rivers.py +++ b/examples/rivers/add_rivers.py @@ -10,32 +10,32 @@ def add_to_lists(pairs, i, j, sign, dir): for it in range(1,len(pairs)): x2, y2 = pairs[it] - if x2 > x1: - # negative v-velocity - i.append(x1) - j.append(y1) - sign.append(-1) - dir.append(1) - elif x1 > x2: - # positive v-velocity - i.append(x2) - j.append(y1) - sign.append(1) - dir.append(1) - elif y2 > y1: - # positive u-velocity - i.append(x1) - j.append(y1) - sign.append(1) - dir.append(0) - elif y1 > y2: - # negative u-velocity - i.append(x1) - j.append(y2) - sign.append(-1) - dir.append(0) - x1 = x2 - y1 = y2 + if x2 > x1: + # negative v-velocity + i.append(x1) + j.append(y1) + sign.append(-1) + dir.append(1) + elif x1 > x2: + # positive v-velocity + i.append(x2) + j.append(y1) + sign.append(1) + dir.append(1) + elif y2 > y1: + # positive u-velocity + i.append(x1) + j.append(y1) + sign.append(1) + dir.append(0) + elif y1 > y2: + # negative u-velocity + i.append(x1) + j.append(y2) + sign.append(-1) + dir.append(0) + x1 = x2 + y1 = y2 outfile = sys.argv[1] @@ -62,15 +62,15 @@ def add_to_lists(pairs, i, j, sign, dir): for line in f: a, b, c = re.split('\s+', line) if a=='-10': - # wrap up object - add_to_lists(pairs, i, j, sign, dir) + # wrap up object + add_to_lists(pairs, i, j, sign, dir) elif (a=='-1' or a=='-3'): - # wrap up object - add_to_lists(pairs, i, j, sign, dir) - # start new object + # wrap up object + add_to_lists(pairs, i, j, sign, dir) + # start new object pairs = [] else: - pairs.append([int(a),int(b)]) + pairs.append([int(a),int(b)]) # create file with all the objects out = netCDF4.Dataset(outfile, 'w', format='NETCDF3_64BIT') @@ -90,7 +90,7 @@ def add_to_lists(pairs, i, j, sign, dir): river = out.createVariable('river', 'i4', ('river')) river.long_name = 'river runoff identification number' -out.variables['river'][:] = range(1,len(i)+1) +out.variables['river'][:] = list(range(1,len(i)+1)) flag = out.createVariable('river_sign', 'f8', ('river')) flag.long_name = 'river directional sign' diff --git a/examples/rivers/compute_daitren_remap_weights.py b/examples/rivers/compute_daitren_remap_weights.py index 629abbc..9fee80a 100644 --- a/examples/rivers/compute_daitren_remap_weights.py +++ b/examples/rivers/compute_daitren_remap_weights.py @@ -8,7 +8,7 @@ ## load 2-dimentional interannual discharge data ## from 1948-2007. See Dai and Trenberth (2002) and Dai et al. (2009) -print 'Load interannual discharge data' +print('Load interannual discharge data') nc_data = netCDF.Dataset('/archive/u1/uaf/kate/CORE2/runoff.daitren.iaf.10FEB2011.nc', 'r') runoff = nc_data.variables['runoff'][:] lon = nc_data.variables['xc'][:] @@ -18,7 +18,7 @@ mask = nc_data.variables['mask'][:] ## create data remap file for scrip -print 'Create remap grid file for Dai and Trenberth runoff' +print('Create remap grid file for Dai and Trenberth runoff') remap_filename = 'remap_grid_daitren.nc' nc = netCDF.Dataset(remap_filename, 'w', format='NETCDF3_CLASSIC') nc.Description = 'remap grid file for Dai and Trenberth runoff data' @@ -84,14 +84,14 @@ ## create NWGOA remap file for scrip -print 'Create remap grid file for NWGOA grid' +print('Create remap grid file for NWGOA grid') dstgrd = pyroms.grid.get_ROMS_grid('NWGOA') dstgrd.hgrid.mask_rho = np.ones(dstgrd.hgrid.mask_rho.shape) pyroms.remapping.make_remap_grid_file(dstgrd, Cpos='rho') ## compute remap weights -print 'compute remap weights using scrip' +print('compute remap weights using scrip') # input namelist variables for conservative remapping at rho points grid1_file = 'remap_grid_daitren.nc' grid2_file = 'remap_grid_NWGOA_rho.nc' diff --git a/examples/rivers/compute_hill_remap_weights.py b/examples/rivers/compute_hill_remap_weights.py index cc1ba66..ef3ddfa 100644 --- a/examples/rivers/compute_hill_remap_weights.py +++ b/examples/rivers/compute_hill_remap_weights.py @@ -99,14 +99,14 @@ # create CI remap file for scrip -print 'Create remap grid file for CI grid' +print('Create remap grid file for CI grid') dstgrd = pyroms.grid.get_ROMS_grid('COOK_INLET_LYON') dstgrd.hgrid.mask_rho = np.ones(dstgrd.hgrid.mask_rho.shape) pyroms.remapping.make_remap_grid_file(dstgrd, Cpos='rho') ## compute remap weights -print 'compute remap weights using scrip' +print('compute remap weights using scrip') # input namelist variables for conservative remapping at rho points grid1_file = '../version1/remap_grid_runoff.nc' grid2_file = 'remap_grid_COOK_INLET_LYON_rho.nc' diff --git a/examples/rivers/compute_hill_remap_weights_2.py b/examples/rivers/compute_hill_remap_weights_2.py index d3a2c47..965a350 100644 --- a/examples/rivers/compute_hill_remap_weights_2.py +++ b/examples/rivers/compute_hill_remap_weights_2.py @@ -15,7 +15,7 @@ # # ### compute remap weights -print 'compute remap weights using scrip' +print('compute remap weights using scrip') # input namelist variables for conservative remapping at rho points grid1_file = '../version1/remap_grid_runoff.nc' grid2_file = 'remap_grid_CI_rho.nc' diff --git a/examples/rivers/hack_runoff_clim.py b/examples/rivers/hack_runoff_clim.py index 6435c83..b770c2a 100644 --- a/examples/rivers/hack_runoff_clim.py +++ b/examples/rivers/hack_runoff_clim.py @@ -8,7 +8,7 @@ # load 2-dimentional interannual discharge data # from 1948-2007. See Dai and Trenberth (2002) and Dai et al. (2009) -print 'Load interannual discharge data' +print('Load interannual discharge data') runoff_file = 'NWGOA_runoff.nc' nc = netCDF.Dataset(runoff_file, 'a', format='NETCDF3_64BIT') @@ -18,16 +18,16 @@ raw_180 = runoff_raw[180,:,:] runoff_180 = runoff[180,:,:] -print 'Sum 1', np.sum(raw_180) -print 'Sum 2', np.sum(runoff_180) +print('Sum 1', np.sum(raw_180)) +print('Sum 2', np.sum(runoff_180)) susitna = runoff[:,349,590] copper = runoff[:,158,688] copper2 = runoff[:,157,688] -print 'copper', copper[180] -print 'copper2', copper2[180] -print 'susitna', susitna[180] +print('copper', copper[180]) +print('copper2', copper2[180]) +print('susitna', susitna[180]) runoff[:,350,591] += susitna/3.0 runoff[:,348,590] += susitna/3.0 @@ -38,11 +38,11 @@ runoff[:,158,688] = copper/3.0 runoff_180 = runoff[180,:,:] -print 'Sum 3', np.sum(runoff_180) +print('Sum 3', np.sum(runoff_180)) -print 'copper', copper[180] -print 'copper2', copper2[180] -print 'susitna', susitna[180] +print('copper', copper[180]) +print('copper2', copper2[180]) +print('susitna', susitna[180]) nc.variables['Runoff'][:] = runoff diff --git a/examples/rivers/latlon_to_nc.py b/examples/rivers/latlon_to_nc.py index 9445c75..601c441 100644 --- a/examples/rivers/latlon_to_nc.py +++ b/examples/rivers/latlon_to_nc.py @@ -17,7 +17,7 @@ lon2 = np.zeros(lat.shape) numy, numx = lat.shape -print numy, numx +print(numy, numx) for j in range(numy): lat2[j,:] = lat[numy-1-j,:] diff --git a/examples/rivers/make_river_clim.py b/examples/rivers/make_river_clim.py index 1487e09..aa10175 100644 --- a/examples/rivers/make_river_clim.py +++ b/examples/rivers/make_river_clim.py @@ -6,8 +6,8 @@ import pyroms_toolbox -# load 2-dimentional discharge data -print 'Load discharge data' +# load 2-dimentional discharge data +print('Load discharge data') nc_data = netCDF.Dataset('CI_runoff.nc', 'r') nc_rivers = netCDF.Dataset('Cook_Inlet_rivers.nc', 'a') data = nc_data.variables['Runoff'][:] @@ -36,25 +36,25 @@ for k in range(Nr): if (sign[k]==1): count[eta[k],xi[k]] += 1 - rivers[eta[k],xi[k]].append(k) + rivers[eta[k],xi[k]].append(k) elif (sign[k]==-1 and dir[k]==0): count[eta[k],xi[k]-1] += 1 - rivers[eta[k],xi[k]-1].append(k) + rivers[eta[k],xi[k]-1].append(k) elif (sign[k]==-1 and dir[k]==1): count[eta[k]-1,xi[k]] += 1 - rivers[eta[k]-1,xi[k]].append(k) + rivers[eta[k]-1,xi[k]].append(k) nct=0 for t in range(nt): - print 'Remapping runoff for time %f' %time[t] + print('Remapping runoff for time %f' %time[t]) for j in range(Mp): for i in range(Lp): - for n in range(count[j,i]): - frac = 1.0/count[j,i] - k = rivers[j,i][n] - runoff[k] = frac*data[t,j,i] - + for n in range(count[j,i]): + frac = 1.0/count[j,i] + k = rivers[j,i][n] + runoff[k] = frac*data[t,j,i] + if t==180: sum180 = np.sum(runoff) @@ -69,4 +69,4 @@ # close netcdf file nc_rivers.close() -print 'sum 4', sum180 +print('sum 4', sum180) diff --git a/examples/rivers/make_runoff_clim.py b/examples/rivers/make_runoff_clim.py index 2d17556..96c2e07 100644 --- a/examples/rivers/make_runoff_clim.py +++ b/examples/rivers/make_runoff_clim.py @@ -8,7 +8,7 @@ # load 2-dimentional interannual discharge data # from Hill and Beamer. -print 'Load interannual discharge data' +print('Load interannual discharge data') nc_data = netCDF.Dataset('runoff.nc', 'r') time = nc_data.variables['time'][:] data = nc_data.variables['runoff'][:] @@ -95,7 +95,7 @@ #for t in range(nt): for t in range(nt-243,nt): flow = np.sum(data[t,280:600,160:460]) - print nct+1, 'Remapping runoff for time %f' %time[t] + print(nct+1, 'Remapping runoff for time %f' %time[t]) # print 'Remapping runoff for time %f' %time[nct] # conservative horizontal interpolation using scrip runoff_raw = pyroms.remapping.remap(data[t,:,:], wts_file, \ @@ -115,17 +115,17 @@ # HACK nc.variables['runoff_time'][nct] = time[nct] if t==180: - print 'Sum 2', np.sum(runoff_raw) - print 'Sum 3', np.sum(runoff) + print('Sum 2', np.sum(runoff_raw)) + print('Sum 3', np.sum(runoff)) if nct==180: - print 'Sum 2 new 180', np.sum(runoff_raw) - print 'Sum 3 new 180', np.sum(runoff) + print('Sum 2 new 180', np.sum(runoff_raw)) + print('Sum 3 new 180', np.sum(runoff)) nct = nct + 1 # Get rest of year for t in range(nt-243): flow = np.sum(data[t,280:600,160:460]) - print nct+1, 'Remapping runoff for time %f' %time[t] + print(nct+1, 'Remapping runoff for time %f' %time[t]) # conservative horizontal interpolation using scrip runoff_raw = pyroms.remapping.remap(data[t,:,:], wts_file, \ @@ -145,8 +145,8 @@ nct = nct + 1 if t==180: - print 'Sum 2', np.sum(runoff_raw) - print 'Sum 3', np.sum(runoff) + print('Sum 2', np.sum(runoff_raw)) + print('Sum 3', np.sum(runoff)) # close netcdf file nc.close() diff --git a/examples/rivers/mask_flow.py b/examples/rivers/mask_flow.py index 9ab16aa..d6b46fb 100644 --- a/examples/rivers/mask_flow.py +++ b/examples/rivers/mask_flow.py @@ -36,7 +36,7 @@ if it == 180: raw_180 = runoff[280:600,160:460] -print 'Sum 1', np.sum(raw_180) +print('Sum 1', np.sum(raw_180)) nc.close() mc.close() diff --git a/examples/rivers/maskedge.py b/examples/rivers/maskedge.py index 9401f24..14aa726 100644 --- a/examples/rivers/maskedge.py +++ b/examples/rivers/maskedge.py @@ -1,4 +1,4 @@ -from __future__ import print_function + import numpy as np import netCDF4 import sys @@ -50,13 +50,13 @@ def flood_fill_water(imask, i, j, ii): (j,i) = llist.pop() imask[j,i] = ii if ( imask[j,i-1] == jj and i > 1 ): - llist.append((j, i-1)) + llist.append((j, i-1)) if ( imask[j-1,i] == jj and j > 1 ): - llist.append((j-1, i)) + llist.append((j-1, i)) if ( imask[j,i+1] == jj and i < Lm ): - llist.append((j, i+1)) + llist.append((j, i+1)) if ( imask[j+1,i] == jj and j < Mm ): - llist.append((j+1, i)) + llist.append((j+1, i)) def flood_fill_land(imask, i, j, ii): """ @@ -75,29 +75,29 @@ def flood_fill_land(imask, i, j, ii): (j,i) = llist.pop() imask[j,i] = ii if ( imask[j,i-1] == jj and i > 1 ): - llist.append((j, i-1)) + llist.append((j, i-1)) if ( imask[j-1,i] == jj and j > 1 ): - llist.append((j-1, i)) + llist.append((j-1, i)) if ( imask[j,i+1] == jj and i < Lm ): - llist.append((j, i+1)) + llist.append((j, i+1)) if ( imask[j+1,i] == jj and j < Mm ): - llist.append((j+1, i)) + llist.append((j+1, i)) # now do the diagonals if ( imask[j-1,i-1] == jj and i > 1 ): - llist.append((j-1, i-1)) + llist.append((j-1, i-1)) if ( imask[j-1,i+1] == jj and j > 1 ): - llist.append((j-1, i+1)) + llist.append((j-1, i+1)) if ( imask[j+1,i+1] == jj and i < Lm ): - llist.append((j+1, i+1)) + llist.append((j+1, i+1)) if ( imask[j+1,i-1] == jj and j < Mm ): - llist.append((j+1, i-1)) + llist.append((j+1, i-1)) def set_values(imask, k, val): """ Set all k values to val""" Mp, Lp = imask.shape for j in range(1,Mp-1): for i in range(1,Lp-1): - if (imask[j,i] == k): imask[j,i] = val + if (imask[j,i] == k): imask[j,i] = val def warning(*objs): print("STDERR: ", *objs, file=sys.stderr) @@ -110,8 +110,8 @@ def color_water(imask): Mp, Lp = imask.shape for j in range(1,Mp-1): for i in range(1,Lp-1): - if (imask[j,i] == 1): - flood_fill_water(imask, i, j, count) + if (imask[j,i] == 1): + flood_fill_water(imask, i, j, count) warning("New body!", i, j) count += 1 @@ -132,13 +132,13 @@ def peninsula(imask, plist, i, j, dir, iwat, iland): plist.append(p) if (dir == 'east'): - seed = (i,j) + seed = (i,j) elif (dir == 'west'): - seed = (i-1,j-1) + seed = (i-1,j-1) elif (dir == 'south'): - seed = (i,j-1) + seed = (i,j-1) elif (dir == 'north'): - seed = (i-1,j) + seed = (i-1,j) # warning("Peninsula at", seed, dir) # Trace the edge of the peninsula, keeping track of the psi @@ -147,61 +147,61 @@ def peninsula(imask, plist, i, j, dir, iwat, iland): # value. while True: if (dir == 'east'): - i += 1 + i += 1 p = (i,j) - if ((imask[j-1,i] == iland) and (imask[j-1,i-1] == iwat)): - dir = 'south' - elif ((imask[j,i] == iland) and (imask[j-1,i] == iwat)): - dir = 'east' - elif ((imask[j,i-1] == iland) and (imask[j,i] == iwat)): - dir = 'north' - elif (i==1 or j==1 or i==Lp-1 or j==Mp-1): - break - else: - warning("Problem in peninsula at ", i, j) - exit(1) + if ((imask[j-1,i] == iland) and (imask[j-1,i-1] == iwat)): + dir = 'south' + elif ((imask[j,i] == iland) and (imask[j-1,i] == iwat)): + dir = 'east' + elif ((imask[j,i-1] == iland) and (imask[j,i] == iwat)): + dir = 'north' + elif (i==1 or j==1 or i==Lp-1 or j==Mp-1): + break + else: + warning("Problem in peninsula at ", i, j) + exit(1) elif (dir == 'north'): - j += 1 + j += 1 p = (i,j) - if ((imask[j,i] == iland) and (imask[j-1,i] == iwat)): - dir = 'east' - elif ((imask[j,i-1] == iland) and (imask[j,i] == iwat)): - dir = 'north' - elif ((imask[j-1,i-1] == iland) and (imask[j,i-1] == iwat)): - dir = 'west' - elif (i==1 or j==1 or i==Lp-1 or j==Mp-1): - break - else: - warning("Problem in peninsula at ", i, j) - exit(1) + if ((imask[j,i] == iland) and (imask[j-1,i] == iwat)): + dir = 'east' + elif ((imask[j,i-1] == iland) and (imask[j,i] == iwat)): + dir = 'north' + elif ((imask[j-1,i-1] == iland) and (imask[j,i-1] == iwat)): + dir = 'west' + elif (i==1 or j==1 or i==Lp-1 or j==Mp-1): + break + else: + warning("Problem in peninsula at ", i, j) + exit(1) elif (dir == 'west'): - i -= 1 + i -= 1 p = (i,j) - if ((imask[j,i-1] == iland) and (imask[j,i] == iwat)): - dir = 'north' - elif ((imask[j-1,i-1] == iland) and (imask[j,i-1] == iwat)): - dir = 'west' - elif ((imask[j-1,i] == iland) and (imask[j-1,i-1] == iwat)): - dir = 'south' - elif (i==1 or j==1 or i==Lp-1 or j==Mp-1): - break - else: - warning("Problem in peninsula at ", i, j) - exit(1) + if ((imask[j,i-1] == iland) and (imask[j,i] == iwat)): + dir = 'north' + elif ((imask[j-1,i-1] == iland) and (imask[j,i-1] == iwat)): + dir = 'west' + elif ((imask[j-1,i] == iland) and (imask[j-1,i-1] == iwat)): + dir = 'south' + elif (i==1 or j==1 or i==Lp-1 or j==Mp-1): + break + else: + warning("Problem in peninsula at ", i, j) + exit(1) elif (dir == 'south'): - j -= 1 + j -= 1 p = (i,j) - if ((imask[j-1,i-1] == iland) and (imask[j,i-1] == iwat)): - dir = 'west' - elif ((imask[j-1,i] == iland) and (imask[j-1,i-1] == iwat)): - dir = 'south' - elif ((imask[j,i] == iland) and (imask[j-1,i] == iwat)): - dir = 'east' - elif (i==1 or j==1 or i==Lp-1 or j==Mp-1): - break - else: - warning("Problem in peninsula at ", i, j) - exit(1) + if ((imask[j-1,i-1] == iland) and (imask[j,i-1] == iwat)): + dir = 'west' + elif ((imask[j-1,i] == iland) and (imask[j-1,i-1] == iwat)): + dir = 'south' + elif ((imask[j,i] == iland) and (imask[j-1,i] == iwat)): + dir = 'east' + elif (i==1 or j==1 or i==Lp-1 or j==Mp-1): + break + else: + warning("Problem in peninsula at ", i, j) + exit(1) plist.append(p) plist.append(p) @@ -227,13 +227,13 @@ def island(imask, ilist, i, j, dir, iwat, iland): p = (0,0) if (dir == 'east'): - seed = (i,j) + seed = (i,j) elif (dir == 'west'): - seed = (i-1,j-1) + seed = (i-1,j-1) elif (dir == 'south'): - seed = (i,j-1) + seed = (i,j-1) elif (dir == 'north'): - seed = (i-1,j) + seed = (i-1,j) # warning("Island at", seed, dir) # Trace the edge of the island, keeping track of the psi @@ -241,56 +241,56 @@ def island(imask, ilist, i, j, dir, iwat, iland): # edge segments so that we can later change the peninsula mask # points to water. while True: - if (p == pstart): - break + if (p == pstart): + break if (dir == 'east'): - i += 1 + i += 1 p = (i,j) - if ((imask[j-1,i] == iland) and (imask[j-1,i-1] == iwat)): - dir = 'south' - elif ((imask[j,i] == iland) and (imask[j-1,i] == iwat)): - dir = 'east' - elif ((imask[j,i-1] == iland) and (imask[j,i] == iwat)): - dir = 'north' - else: - warning("Problem in island at ", i, j) - exit(1) + if ((imask[j-1,i] == iland) and (imask[j-1,i-1] == iwat)): + dir = 'south' + elif ((imask[j,i] == iland) and (imask[j-1,i] == iwat)): + dir = 'east' + elif ((imask[j,i-1] == iland) and (imask[j,i] == iwat)): + dir = 'north' + else: + warning("Problem in island at ", i, j) + exit(1) elif (dir == 'north'): - j += 1 + j += 1 p = (i,j) - if ((imask[j,i] == iland) and (imask[j-1,i] == iwat)): - dir = 'east' - elif ((imask[j,i-1] == iland) and (imask[j,i] == iwat)): - dir = 'north' - elif ((imask[j-1,i-1] == iland) and (imask[j,i-1] == iwat)): - dir = 'west' - else: - warning("Problem in island at ", i, j) - exit(1) + if ((imask[j,i] == iland) and (imask[j-1,i] == iwat)): + dir = 'east' + elif ((imask[j,i-1] == iland) and (imask[j,i] == iwat)): + dir = 'north' + elif ((imask[j-1,i-1] == iland) and (imask[j,i-1] == iwat)): + dir = 'west' + else: + warning("Problem in island at ", i, j) + exit(1) elif (dir == 'west'): - i -= 1 + i -= 1 p = (i,j) - if ((imask[j,i-1] == iland) and (imask[j,i] == iwat)): - dir = 'north' - elif ((imask[j-1,i-1] == iland) and (imask[j,i-1] == iwat)): - dir = 'west' - elif ((imask[j-1,i] == iland) and (imask[j-1,i-1] == iwat)): - dir = 'south' - else: - warning("Problem in island at ", i, j) - exit(1) + if ((imask[j,i-1] == iland) and (imask[j,i] == iwat)): + dir = 'north' + elif ((imask[j-1,i-1] == iland) and (imask[j,i-1] == iwat)): + dir = 'west' + elif ((imask[j-1,i] == iland) and (imask[j-1,i-1] == iwat)): + dir = 'south' + else: + warning("Problem in island at ", i, j) + exit(1) elif (dir == 'south'): - j -= 1 + j -= 1 p = (i,j) - if ((imask[j-1,i-1] == iland) and (imask[j,i-1] == iwat)): - dir = 'west' - elif ((imask[j-1,i] == iland) and (imask[j-1,i-1] == iwat)): - dir = 'south' - elif ((imask[j,i] == iland) and (imask[j-1,i] == iwat)): - dir = 'east' - else: - warning("Problem in island at ", i, j) - exit(1) + if ((imask[j-1,i-1] == iland) and (imask[j,i-1] == iwat)): + dir = 'west' + elif ((imask[j-1,i] == iland) and (imask[j-1,i-1] == iwat)): + dir = 'south' + elif ((imask[j,i] == iland) and (imask[j-1,i] == iwat)): + dir = 'east' + else: + warning("Problem in island at ", i, j) + exit(1) ilist.append(p) @@ -310,10 +310,10 @@ def edges(imask, plist, iwat, iland): for i in range(1,Lp-2): if ((imask[Mp-2,i] == iwat) and (imask[Mp-2,i+1] == iland)): peninsula(imask, plist, i+1, Mp-1, 'south', iwat, iland) - for j in list(reversed(range(2,Mp-1))): + for j in list(reversed(list(range(2,Mp-1)))): if ((imask[j,Lp-2] == iwat) and (imask[j-1,Lp-2] == iland)): peninsula(imask, plist, Lp-1, j, 'west', iwat, iland) - for i in list(reversed(range(2,Lp-1))): + for i in list(reversed(list(range(2,Lp-1)))): if ((imask[1,i] == iwat) and (imask[1,i-1] == iland)): peninsula(imask, plist, i, 1, 'north', iwat, iland) @@ -325,7 +325,7 @@ def interior(imask, ilist, iwat, iland): for i in range(2,Lp-2): for j in range(2,Mp-2): if ((imask[j,i] == iwat) and (imask[j+1,i] == iland)): - island(imask, ilist, i, j+1, 'east', iwat, iland) + island(imask, ilist, i, j+1, 'east', iwat, iland) def main(): ncfile = sys.argv[1] @@ -347,16 +347,16 @@ def main(): # Ngl.contour(wks,imask,res) interior(imask, ipoints, iwat, iland) # Ngl.contour(wks,imask,res) - set_values(imask, iland, iwat) + set_values(imask, iland, iwat) # Ngl.contour(wks,imask,res) - iland = iwat + iland = iwat # Islands first, then peninsulas for point in ipoints: - i,j = point + i,j = point print(i, j) for point in lpoints: - i,j = point + i,j = point print(i, j) print(-10, -10) # Ngl.end() diff --git a/examples/rivers/mat_to_nc.py b/examples/rivers/mat_to_nc.py index 260d21e..496cebd 100644 --- a/examples/rivers/mat_to_nc.py +++ b/examples/rivers/mat_to_nc.py @@ -14,7 +14,7 @@ coast_cells = np.where(np.isnan(coast_cells),nine,coast_cells) numy, numx = coast_cells.shape -print numy, numx +print(numy, numx) ntime = 1 out.createDimension('x', numx) diff --git a/examples/rivers/set_vshape.py b/examples/rivers/set_vshape.py index 41d3937..e7f942c 100644 --- a/examples/rivers/set_vshape.py +++ b/examples/rivers/set_vshape.py @@ -18,7 +18,7 @@ area = sum(vshape[:,0]) vshape = (1.0/area)*vshape -print vshape[:,0] +print(vshape[:,0]) vshape2 = np.zeros([N]) for k in range(N): @@ -26,7 +26,7 @@ area = sum(vshape2[:]) vshape2 = (1.0/area)*vshape2 -print vshape2 +print(vshape2) # Copper River #for k in range(5490,5497): diff --git a/examples/rivers/squeeze_rivers.py b/examples/rivers/squeeze_rivers.py index 8735c18..9c465b2 100644 --- a/examples/rivers/squeeze_rivers.py +++ b/examples/rivers/squeeze_rivers.py @@ -20,7 +20,7 @@ time = nc_rivers.variables['river_time'][:] run_280 = np.abs(runoff[280,:]) -print 'Sum 5', np.sum(run_280) +print('Sum 5', np.sum(run_280)) Nr = sign.shape[0] Nt = time.shape[0] @@ -28,7 +28,7 @@ year_sum = np.sum(runoff, axis=0) all_sum = np.sum(np.abs(year_sum)) -print year_sum.shape, all_sum +print(year_sum.shape, all_sum) for i in range(Nr): if np.abs(year_sum[i]) > 0.: @@ -44,7 +44,7 @@ salt2 = np.zeros((Nt)) temp2 = temp -print 'Squeezing down to', count, 'rivers' +print('Squeezing down to', count, 'rivers') it = 0 for i in range(Nr): if np.abs(year_sum[i]) > 0.: @@ -120,10 +120,10 @@ out.variables['river_time'][:] = time run_280 = np.abs(runoff2[280,:]) -print 'Sum 6', np.sum(run_280) +print('Sum 6', np.sum(run_280)) year_sum = np.sum(runoff2, axis=0) all_sum = np.sum(np.abs(year_sum)) -print year_sum.shape, all_sum +print(year_sum.shape, all_sum) out.close() diff --git a/examples/rivers/view_temp.py b/examples/rivers/view_temp.py index abd3283..8980a16 100644 --- a/examples/rivers/view_temp.py +++ b/examples/rivers/view_temp.py @@ -25,7 +25,7 @@ temp.append(float(b)) salt.append(0.0) -print temp +print(temp) # create file with all the objects out = netCDF4.Dataset(outfile, 'a', format='NETCDF3_64BIT') diff --git a/examples/runoff/compute_daitren_remap_weights.py b/examples/runoff/compute_daitren_remap_weights.py new file mode 100644 index 0000000..3750f4f --- /dev/null +++ b/examples/runoff/compute_daitren_remap_weights.py @@ -0,0 +1,107 @@ +import numpy as np +from datetime import datetime +import netCDF4 as netCDF + +import pyroms +import pyroms_toolbox + + +## load 2-dimentional interannual discharge data +## from 1948-2007. See Dai and Trenberth (2002) and Dai et al. (2009) +print('Load interannual discharge data') +nc_data = netCDF.Dataset('/archive/u1/uaf/kate/CORE2/runoff.daitren.iaf.10FEB2011.nc', 'r') +runoff = nc_data.variables['runoff'][:] +lon = nc_data.variables['xc'][:] +lat = nc_data.variables['yc'][:] +lon_corner = nc_data.variables['xv'][:] +lat_corner = nc_data.variables['yv'][:] +mask = nc_data.variables['mask'][:] + +## create data remap file for scrip +print('Create remap grid file for Dai and Trenberth runoff') +remap_filename = 'remap_grid_daitren.nc' +nc = netCDF.Dataset(remap_filename, 'w', format='NETCDF3_CLASSIC') +nc.Description = 'remap grid file for Dai and Trenberth runoff data' +nc.Author = 'build_runoff' +nc.Created = datetime.now().strftime("%Y-%m-%d %H:%M:%S") +nc.title = 'Dai and Trenberth runoff' + +grid_center_lon = lon.flatten() +grid_center_lat = lat.flatten() +Mp, Lp = lon.shape +grid_imask = mask.flatten() +grid_size = Lp * Mp +grid_corner_lon = np.zeros((grid_size, 4)) +grid_corner_lat = np.zeros((grid_size, 4)) +k = 0 +for j in range(Mp): + for i in range(Lp): + grid_corner_lon[k,0] = lon_corner[0,j,i] + grid_corner_lat[k,0] = lat_corner[0,j,i] + grid_corner_lon[k,1] = lon_corner[1,j,i] + grid_corner_lat[k,1] = lat_corner[1,j,i] + grid_corner_lon[k,2] = lon_corner[2,j,i] + grid_corner_lat[k,2] = lat_corner[2,j,i] + grid_corner_lon[k,3] = lon_corner[3,j,i] + grid_corner_lat[k,3] = lat_corner[3,j,i] + k = k + 1 + +nc.createDimension('grid_size', grid_size) +nc.createDimension('grid_corners', 4) +nc.createDimension('grid_rank', 2) + +nc.createVariable('grid_dims', 'i4', ('grid_rank')) +nc.variables['grid_dims'].long_name = 'grid size along x and y axis' +nc.variables['grid_dims'].units = 'None' +nc.variables['grid_dims'][:] = [(Lp, Mp)] + +nc.createVariable('grid_center_lon', 'f8', ('grid_size')) +nc.variables['grid_center_lon'].long_name = 'longitude of cell center' +nc.variables['grid_center_lon'].units = 'degrees' +nc.variables['grid_center_lon'][:] = grid_center_lon + +nc.createVariable('grid_center_lat', 'f8', ('grid_size')) +nc.variables['grid_center_lat'].long_name = 'latitude of cell center' +nc.variables['grid_center_lat'].units = 'degrees' +nc.variables['grid_center_lat'][:] = grid_center_lat + +nc.createVariable('grid_imask', 'i4', ('grid_size')) +nc.variables['grid_imask'].long_name = 'mask' +nc.variables['grid_imask'].units = 'None' +nc.variables['grid_imask'][:] = grid_imask + +nc.createVariable('grid_corner_lon', 'f8', ('grid_size', 'grid_corners')) +nc.variables['grid_corner_lon'].long_name = 'longitude of cell corner' +nc.variables['grid_corner_lon'].units = 'degrees' +nc.variables['grid_corner_lon'][:] = grid_corner_lon + +nc.createVariable('grid_corner_lat', 'f8', ('grid_size', 'grid_corners')) +nc.variables['grid_corner_lat'].long_name = 'latitude of cell corner' +nc.variables['grid_corner_lat'].units = 'degrees' +nc.variables['grid_corner_lat'][:] = grid_corner_lat + +nc.close() + + +## create SCS remap file for scrip +print('Create remap grid file for SCS grid') +dstgrd = pyroms.grid.get_ROMS_grid('SCS') +dstgrd.hgrid.mask_rho = np.ones(dstgrd.hgrid.mask_rho.shape) +pyroms.remapping.make_remap_grid_file(dstgrd, Cpos='rho') + + +## compute remap weights +print('compute remap weights using scrip') +# input namelist variables for conservative remapping at rho points +grid1_file = 'remap_grid_daitren.nc' +grid2_file = 'remap_grid_SCS_rho.nc' +interp_file1 = 'remap_weights_daitren_to_SCS_conservative_nomask.nc' +interp_file2 = 'remap_weights_SCS_to_daitren_conservative_nomask.nc' +map1_name = 'daitren to SCS conservative Mapping' +map2_name = 'SCS to daitren conservative Mapping' +num_maps = 1 +map_method = 'conservative' + +pyroms.remapping.compute_remap_weights(grid1_file, grid2_file, \ + interp_file1, interp_file2, map1_name, \ + map2_name, num_maps, map_method) diff --git a/examples/runoff/make_SCS_runoff_clim.py b/examples/runoff/make_SCS_runoff_clim.py new file mode 100644 index 0000000..1aa9062 --- /dev/null +++ b/examples/runoff/make_SCS_runoff_clim.py @@ -0,0 +1,132 @@ +import numpy as np +import netCDF4 as netCDF +from datetime import datetime + +import pyroms +import pyroms_toolbox + + +# load 2-dimentional interannual discharge data +# from 1948-2007. See Dai and Trenberth (2002) and Dai et al. (2009) +print('Load interannual discharge data') +nc_data = netCDF.Dataset('/archive/u1/uaf/kate/CORE2/runoff.daitren.clim.10FEB2011.nc', 'r') +data = nc_data.variables['runoff'][:] + +# time: cyclic year (365.25 days) +time = np.array([15.21875, 45.65625, 76.09375, 106.53125, 136.96875, 167.40625, \ + 197.84375, 228.28125, 258.71875, 289.15625, 319.59375, 350.03125]) + + +# load SCS grid object +grd = pyroms.grid.get_ROMS_grid('SCS') + + +# define some variables +wts_file = 'remap_weights_daitren_to_SCS_conservative_nomask.nc' +nt = data.shape[0] +Mp, Lp = grd.hgrid.mask_rho.shape +spval = -1e30 +runoff_raw = np.zeros((Mp,Lp)) +runoff = np.zeros((Mp,Lp)) +rspread = 6 + +# create runoff file +#runoff_file = 'runoff_SCS_daitren_inter_annual_2002-2004.nc' +runoff_file = 'runoff_SCS_daitren_clim.nc' +nc = netCDF.Dataset(runoff_file, 'w', format='NETCDF3_64BIT') +nc.Description = 'Dai & Trenberth monthly climatology river discharge' +nc.Author = 'make_SCS_runoff.py' +nc.Created = datetime.now().strftime("%Y-%m-%d %H:%M:%S") +nc.title = 'Dai & Trenberth river discharge' + +# creat dimensions and variables +nc.createDimension('xi_rho', np.size(grd.hgrid.mask_rho,1)) +nc.createDimension('eta_rho', np.size(grd.hgrid.mask_rho,0)) +nc.createDimension('runoff_time', (12)) + +nc.createVariable('lon_rho', 'f8', ('eta_rho', 'xi_rho')) +nc.variables['lon_rho'].long_name = 'longitude of RHO-points' +nc.variables['lon_rho'].units = 'degree_east' +nc.variables['lon_rho'].field = 'lon_rho, scalar' +nc.variables['lon_rho'][:] = grd.hgrid.lon_rho + +nc.createVariable('lat_rho', 'f8', ('eta_rho', 'xi_rho')) +nc.variables['lat_rho'].long_name = 'latitude of RHO-points' +nc.variables['lat_rho'].units = 'degree_north' +nc.variables['lat_rho'].field = 'lat_rho, scalar' +nc.variables['lat_rho'][:] = grd.hgrid.lat_rho + +nc.createVariable('runoff_time', 'f8', ('runoff_time')) +nc.variables['runoff_time'].long_name = 'time' +nc.variables['runoff_time'].units = 'days since 1900-01-01 00:00:00' +nc.variables['runoff_time'].cycle_length = 365.25 +nc.variables['runoff_time'][:] = time + +nc.createVariable('Runoff_raw', 'f8', ('runoff_time', 'eta_rho', 'xi_rho')) +nc.variables['Runoff_raw'].long_name = 'Dai_Trenberth River Runoff raw' +nc.variables['Runoff_raw'].missing_value = str(spval) +nc.variables['Runoff_raw'].units = 'kg/s/m^2' + +nc.createVariable('Runoff', 'f8', ('runoff_time', 'eta_rho', 'xi_rho')) +nc.variables['Runoff'].long_name = 'Dai_Trenberth River Runoff' +nc.variables['Runoff'].missing_value = str(spval) +nc.variables['Runoff'].units = 'kg/s/m^2' + + +# get littoral (here 4 cells width) +width = 4 +idx = [] +idy = [] +maskl = grd.hgrid.mask_rho.copy() +for w in range(width): + lit = pyroms_toolbox.get_littoral(maskl) + idx.extend(lit[0]) + idy.extend(lit[1]) + maskl[lit] = 0 + +littoral_idx = (np.array(idx), np.array(idy)) +maskl = np.zeros(grd.hgrid.mask_rho.shape) +maskl[littoral_idx] = 1 + +mask_idx = np.where(grd.hgrid.mask_rho == 0) + +nct=0 +for t in range(nt): + print('Remapping runoff for time %f' %time[nct]) + # conservative horizontal interpolation using scrip + runoff_raw = pyroms.remapping.remap(data[t,:,:], wts_file, \ + spval=spval) + idx = np.where(runoff_raw != 0) + runoff = pyroms_toolbox.move_runoff(runoff_raw, \ + np.array(idx).T + 1, np.array(littoral_idx).T + 1, maskl, \ + grd.hgrid.x_rho, grd.hgrid.y_rho, grd.hgrid.dx, grd.hgrid.dy) + + # spread the runoff within the littoral band + runoff_spread = np.zeros((Mp,Lp)) + idx = np.where(runoff != 0) + for p in range(np.size(idx,1)): + j = list(range(max(0,idx[0][p]-rspread), min(Mp-1,idx[0][p]+rspread+1))) + i = list(range(max(0,idx[1][p]-rspread), min(Lp-1,idx[1][p]+rspread+1))) + ji = np.meshgrid(j,i) + sidx = np.where(maskl[ji] == 1) + nbpt = np.size(sidx) / 2 + rpt = runoff[idx[0][p],idx[1][p]] * grd.hgrid.dx[idx[0][p],idx[1][p]] * grd.hgrid.dy[idx[0][p],idx[1][p]] + rpt = rpt / nbpt + for pa in range(nbpt): + pai = sidx[0][pa] + ji[1].min() + paj = sidx[1][pa] + ji[0].min() + runoff_spread[paj, pai] = runoff_spread[paj, pai] + \ + rpt / (grd.hgrid.dx[paj, pai] * grd.hgrid.dy[paj, pai]) + + + # spval + runoff_spread[mask_idx] = spval + + # write data in destination file + nc.variables['Runoff'][nct] = runoff_spread + nc.variables['Runoff_raw'][nct] = runoff_raw + + nct = nct + 1 + +# close netcdf file +nc.close() diff --git a/examples/stuff/aice.py b/examples/stuff/aice.py index 7bbba1f..50357f8 100644 --- a/examples/stuff/aice.py +++ b/examples/stuff/aice.py @@ -27,10 +27,10 @@ # "(/1.,.42,.42/)", "(/1.,.30,.30/)", \ # "(/1., 0., 0./)", "(/.85,0., 0./)"] cmap = ["white", "black", \ - "(/0., 0.,.85/)", "(/ 0., 0.,1./)", "(/.30,.30,1./)", \ - "(/.42,.42,1./)", "(/.55,.55,1./)", \ - "(/.64,.64,1./)", "(/.72,.72,1./)", "(/.80,.80,1./)", \ - "(/.88,.88,1./)", "(/.9, .9, .9/)", "burlywood"] + "(/0., 0.,.85/)", "(/ 0., 0.,1./)", "(/.30,.30,1./)", \ + "(/.42,.42,1./)", "(/.55,.55,1./)", \ + "(/.64,.64,1./)", "(/.72,.72,1./)", "(/.80,.80,1./)", \ + "(/.88,.88,1./)", "(/.9, .9, .9/)", "burlywood"] rlist = Ngl.Resources() rlist.wkColorMap = cmap @@ -116,13 +116,13 @@ txres.txFontHeightF = 0.015 for file in lst_file: - print "Plotting "+file + print("Plotting "+file) nc = netCDF4.Dataset(file, "r") aice = nc.variables["aice"][0,:,:] time = nc.variables["ocean_time"][0] myday = jday2date(time/86400.) date_tag = myday.strftime('%d %B %Y') - print date_tag + print(date_tag) plot = Ngl.contour_map(wks, aice, res) Ngl.text_ndc(wks, date_tag, 0.85, 0.94, txres) nc.close() diff --git a/examples/stuff/caldate.py b/examples/stuff/caldate.py index 470b41a..0843b1c 100644 --- a/examples/stuff/caldate.py +++ b/examples/stuff/caldate.py @@ -78,13 +78,13 @@ def caldate_1900(Julian): min = floor((sec%3600)/60) sec = round(sec%60) - print "Year: "+str(yr) - print "Year Day: "+str(yday) - print "Month: "+str(mo) - print "Day: "+str(d) - print "Hour: "+str(hour) - print "Min: "+str(min) - print "Sec: "+str(sec) + print("Year: "+str(yr)) + print("Year Day: "+str(yday)) + print("Month: "+str(mo)) + print("Day: "+str(d)) + print("Hour: "+str(hour)) + print("Min: "+str(min)) + print("Sec: "+str(sec)) cal = {'year':yr,'yearday':yday,'month':mo,'day':d,\ 'hour':hour,'minute':min,'second':sec} diff --git a/examples/stuff/make_grid.py b/examples/stuff/make_grid.py index 131330a..4483944 100644 --- a/examples/stuff/make_grid.py +++ b/examples/stuff/make_grid.py @@ -1,3 +1,6 @@ +# This file is designed to be cut and pasted into an ipython --pylab +# session. Otherwise, you'll need to "import numpy as np" then +# convert "array" to "np.array". import os from mpl_toolkits.basemap import Basemap, shiftgrid import matplotlib.colors as colors @@ -6,7 +9,7 @@ import pyroms import pyroms_toolbox -from ROMS_bathy_smoother import * +from bathy_smoother import * #Grid dimension @@ -55,7 +58,7 @@ #hgrd = bry.grd -lonv, latv = map(hgrd.x_vert, hgrd.y_vert, inverse=True) +lonv, latv = list(map(hgrd.x_vert, hgrd.y_vert, inverse=True)) hgrd = pyroms.grid.CGrid_geo(lonv, latv, map) # generate the mask @@ -77,7 +80,7 @@ # read in topo data (on a regular lat/lon grid) # this topo come with basemap so you should have it on your laptop. # just update datadir with the appropriate path -# you can get this data from matplolib svn with +# you can get this data from matplolib svn with # svn co https://matplotlib.svn.sourceforge.net/svnroot/matplotlib/trunk/htdocs/screenshots/data/" datadir = '/home/frederic/python/basemap-0.99.4/examples/' topo = np.loadtxt(os.path.join(datadir, 'etopo20data.gz')) @@ -108,7 +111,7 @@ # check bathymetry roughness RoughMat = bathy_tools.RoughnessMatrix(h, hgrd.mask_rho) -print 'Max Roughness value is: ', RoughMat.max() +print('Max Roughness value is: ', RoughMat.max()) # smooth the raw bathy using the direct iterative method from Martinho and Batteen (2006) rx0_max = 0.35 @@ -116,7 +119,7 @@ # check bathymetry roughness again RoughMat = bathy_tools.RoughnessMatrix(h, hgrd.mask_rho) -print 'Max Roughness value is: ', RoughMat.max() +print('Max Roughness value is: ', RoughMat.max()) hgrd.h = h diff --git a/examples/stuff/make_scgrid.py b/examples/stuff/make_scgrid.py index d22fe3e..2dd696e 100644 --- a/examples/stuff/make_scgrid.py +++ b/examples/stuff/make_scgrid.py @@ -6,7 +6,7 @@ import pyroms import pyroms_toolbox -from ROMS_bathy_smoother import * +from bathy_smoother import * #Grid dimension @@ -19,7 +19,7 @@ lon1=0. ; lat1=0. lon2=10. ; lat2=0. lon3=40. ; lat3=4.724700232622634 -lon4=40. ; lat3=30. +lon4=40. ; lat4=30. #define map projection (here mercator) lon_min = min(lon0,lon1,lon2,lon3,lon4) @@ -49,7 +49,7 @@ #hgrd = bry.grd -lonv, latv = map(hgrd.x_vert, hgrd.y_vert, inverse=True) +lonv, latv = list(map(hgrd.x_vert, hgrd.y_vert, inverse=True)) hgrd = pyroms.grid.CGrid_geo(lonv, latv, map) # generate the mask @@ -63,7 +63,7 @@ # read in topo data (on a regular lat/lon grid) # this topo come with basemap so you should have it on your laptop. # just update datadir with the appropriate path -# you can get this data from matplolib svn with +# you can get this data from matplolib svn with # svn co https://matplotlib.svn.sourceforge.net/svnroot/matplotlib/trunk/htdocs/screenshots/data/" datadir = '/home/frederic/python/basemap-0.99.4/examples/' topo = np.loadtxt(os.path.join(datadir, 'etopo20data.gz')) @@ -94,7 +94,7 @@ # check bathymetry roughness RoughMat = bathy_tools.RoughnessMatrix(h, hgrd.mask_rho) -print 'Max Roughness value is: ', RoughMat.max() +print('Max Roughness value is: ', RoughMat.max()) # smooth the raw bathy using the direct iterative method from Martinho and Batteen (2006) rx0_max = 0.35 @@ -102,7 +102,7 @@ # check bathymetry roughness again RoughMat = bathy_tools.RoughnessMatrix(h, hgrd.mask_rho) -print 'Max Roughness value is: ', RoughMat.max() +print('Max Roughness value is: ', RoughMat.max()) hgrd.h = h diff --git a/examples/stuff/rename_file.py b/examples/stuff/rename_file.py index 1ac73a2..5405451 100644 --- a/examples/stuff/rename_file.py +++ b/examples/stuff/rename_file.py @@ -5,77 +5,78 @@ class tagfile(): - def __init__(self,filename): - self.filein = filename - return None + def __init__(self,filename): + self.filein = filename + return None - def __call__(self): - # read time in netcdf - time, timeunits = self.read_time() - # create the date tag - tag = self.create_tag(time, timeunits) - # define a new filename - self.create_new_filename(tag) - # rename file - self.rename_file() - return None + def __call__(self): + # read time in netcdf + time, timeunits = self.read_time() + # create the date tag + tag = self.create_tag(time, timeunits) + # define a new filename + self.create_new_filename(tag) + # rename file + self.rename_file() + return None - def read_time(self): - ''' read ocean_time variable and units in netcdf file''' - fid = nc.Dataset(self.filein,'r') - time = fid.variables['ocean_time'][:] - timeunits = fid.variables['ocean_time'].units - fid.close() - if len(time) > 1: + def read_time(self): + ''' read ocean_time variable and units in netcdf file''' + fid = nc.Dataset(self.filein,'r') + time = fid.variables['ocean_time'][:] + timeunits = fid.variables['ocean_time'].units + fid.close() + if len(time) > 1: # print 'error : multiple values in time array' ; exit() ntim = len(time) - time = time[ntim-1] - else: - time = time[0] - return time, timeunits + time = time[ntim-1] + else: + time = time[0] + return time, timeunits - def create_tag(self,time, timeunits): - ''' create a datetime object from reference date and ocean_time''' - # ugly part to get reference date from units string - units_wrk = timeunits.replace(':',' ').replace('-',' ').split() - delta_type = units_wrk[0] - year_ref = int(units_wrk[2]) - month_ref = int(units_wrk[3]) - day_ref = int(units_wrk[4]) - hour_ref = int(units_wrk[5]) - min_ref = int(units_wrk[6]) - sec_ref = int(units_wrk[7]) - # create datetime object for reference date - dateref = dt.datetime(year_ref,month_ref,day_ref,hour_ref,min_ref,sec_ref) - # create a datetime object for current time - if delta_type == 'seconds': - tag = dateref + dt.timedelta(seconds=time) - return tag + def create_tag(self,time, timeunits): + ''' create a datetime object from reference date and ocean_time''' + # ugly part to get reference date from units string + units_wrk = timeunits.replace(':',' ').replace('-',' ').split() + delta_type = units_wrk[0] + year_ref = int(units_wrk[2]) + month_ref = int(units_wrk[3]) + day_ref = int(units_wrk[4]) + hour_ref = int(units_wrk[5]) + min_ref = int(units_wrk[6]) + sec_ref = int(units_wrk[7]) + # create datetime object for reference date + dateref = dt.datetime(year_ref,month_ref,day_ref,hour_ref,min_ref,sec_ref) + # create a datetime object for current time + if delta_type == 'seconds': + tag = dateref + dt.timedelta(seconds=time) + return tag - def create_new_filename(self,tag): - ''' based on tag, generate a new filename ''' - # get rid of full path (if any) - filein = self.filein.replace('/',' ').split()[-1] - # get the pieces we want to keep in filename - filein_wrk = filein.replace('_',' ').split() - runname = filein_wrk[0] - filetype = filein_wrk[1] - # write our new filename - self.fileout = runname + '_' + filetype + '_' + tag.isoformat() + '.nc' - return None + def create_new_filename(self,tag): + ''' based on tag, generate a new filename ''' + # get rid of full path (if any) + filein = self.filein.replace('/',' ').split()[-1] + # get the pieces we want to keep in filename + filein_wrk = filein.replace('_',' ').split() + runname = filein_wrk[0] + filetype = filein_wrk[1] + # write our new filename + self.fileout = runname + '_' + filetype + '_' + tag.isoformat() + '.nc' + self.fileout = self.fileout.replace(':00:00','') + return None - def rename_file(self): - ''' call unix command mv ''' - # remove filein from full path - wrk = self.filein.replace('/',' ').split()[0:-1] - # re-create path - path = '.' - for part in wrk: - path = path + '/' + part - # rename file - subprocess.call('mv ' + self.filein + ' ' + path + '/' + self.fileout, shell=True) - return None + def rename_file(self): + ''' call unix command mv ''' + # remove filein from full path + wrk = self.filein.replace('/',' ').split()[0:-1] + # re-create path + path = '.' + for part in wrk: + path = path + '/' + part + # rename file + subprocess.call('mv ' + self.filein + ' ' + path + '/' + self.fileout, shell=True) + return None #---------------------------------------------------------------------------------------------- @@ -85,6 +86,7 @@ def rename_file(self): lis = lis.split() for file in lis: + file = str(file).replace("b'","").replace("'","") print(file) mytag = tagfile(file) mytag() diff --git a/examples/stuff/restart.py b/examples/stuff/restart.py index 559d8a6..616dd5e 100644 --- a/examples/stuff/restart.py +++ b/examples/stuff/restart.py @@ -2,10 +2,10 @@ import os.path import netCDF4 -num = raw_input("Restart number? ") +num = input("Restart number? ") file_path = "$ARCHIVE/Arctic2/run45/restart_" + num if os.path.exists(file_path): - print "directory exists already:", file_path + print("directory exists already:", file_path) exit() else: cmd = "mkdir " + file_path @@ -16,7 +16,7 @@ fh = netCDF4.Dataset("arctic2_flt.nc", "r") nt = len(fh.dimensions['ocean_time']) nt = str(nt-1) -print nt +print(nt) fh.close() cmd = "mv -i arctic2_flt.nc arctic2_flt_" + num + ".nc" subprocess.call([cmd], shell=True) diff --git a/examples/stuff/slice.py b/examples/stuff/slice.py index 387076e..06eba7b 100644 --- a/examples/stuff/slice.py +++ b/examples/stuff/slice.py @@ -2,7 +2,7 @@ import netCDF4 import os import sys -import commands +import subprocess #import Ngl import pyroms from pyroms_toolbox import jday2date @@ -16,8 +16,8 @@ #for year in lst_year: # year = np.str(year) -#lst = commands.getoutput('ls averages/*74??.nc') -lst = commands.getoutput('ls months/*.nc') +#lst = subprocess.getoutput('ls averages/*74??.nc') +lst = subprocess.getoutput('ls months/*.nc') lst = lst.split() lst_file = lst_file + lst @@ -26,103 +26,20 @@ #clat = grd.hgrid.lat_rho #clon = grd.hgrid.lon_rho -#******************************************** -#wks = Ngl.open_wks("ncgm", "aice") # open workstation -#cmap = ["white", "black", \ -# "(/0., 0.,.85/)", "(/ 0., 0.,1./)", "(/.30,.30,1./)", \ -# "(/.42,.42,1./)", "(/.55,.55,1./)", \ -# "(/.64,.64,1./)", "(/.72,.72,1./)", "(/.80,.80,1./)", \ -# "(/.88,.88,1./)", "(/.9, .9, .9/)", "burlywood"] - -#rlist = Ngl.Resources() -#rlist.wkColorMap = cmap -#Ngl.set_values(wks, rlist) - -#ncolors = len(cmap) - 3 -#print ncolors -#; gsn_define_colormap(wks, cmap) -# -#zmin = 0.0 -#zmax = 1.0 -#cnLevels = np.zeros(ncolors-1) -#for i in range(ncolors-1): -# cnLevels[i] = zmin + (zmax-zmin)*(i+1)/ncolors -#spcng = (zmax-zmin)/ncolors - -# -#; Names is used when drawing the labelbar -#; names = new(dimsizes(cnLevels), string) -#; do i=0,dimsizes(cnLevels)-1 -#; names(i) = sprintf("%5.1f", doubletofloat(cnLevels(i))) -#; end do - -#i = NhlNewColor(wks,0.8,0.8,0.8) #; add gray to colormap - -#res = Ngl.Resources() #; plot mods desired -#res.nglMaximize = True -#res.sfXArray = clon -#res.sfYArray = clat -# -#res.cnFillOn = True #; color fill -#res.cnLinesOn = True #; no contour lines -#res.cnLineLabelsOn = False # ; no contour labels -#res.cnFillDrawOrder = "PreDraw" # ; put continents on top -# -##res.gsnSpreadColors = True # ; use total colormap -##res.gsnSpreadColorEnd = -3 -## res.cnInfoLabelOn = False ; no contour info label -#res.cnLevelSelectionMode = "ManualLevels" -#res.cnMinLevelValF = cnLevels[0] -#res.cnMaxLevelValF = cnLevels[ncolors-2] -#res.cnLevelSpacingF = spcng - -## Chukchi plot parameters -#res.mpProjection = "Stereographic" -#res.mpCenterLatF = 90 -#res.mpCenterLonF = 205 -# -#res.mpLimitMode = "Corners" -#res.mpLeftCornerLatF = 60.0 -#res.mpLeftCornerLonF = 180.0 -#res.mpRightCornerLatF = 74. -#res.mpRightCornerLonF = 250.0 -#res.mpDataBaseVersion = "MediumRes" -# -#res.mpFillOn = True -#res.mpFillColors = ["background","transparent","burlywood","transparent"] -# -#res.nglFrame = False # we can attach some text. -#res.tiMainString = "ROMS Arctic Simulation" -#res.tiMainOffsetYF = 0.04 -#res.tiMainFontHeightF = 0.02 -#res.nglSpreadColorEnd = -2 -#res.lbOrientation = "Horizontal" -#res.lbTitleString = "Sea Ice Concentration" -#res.lbTitleFontHeightF = 0.012 -#res.lbLabelFontHeightF = 0.015 -#res.pmLabelBarOrthogonalPosF = +0.02 -#res.pmLabelBarHeightF = 0.1 -#res.pmLabelBarWidthF = 0.6 -# -#txres = Ngl.Resources() # Text resources desired -#txres.txFontHeightF = 0.015 - -#txres.txFontColor = "OrangeRed" - istart = 356 jstart = 373 iend = 387 jend = 411 for file in lst_file: - print "Plotting "+file + print("Plotting "+file) nc = netCDF4.Dataset(file, "r") temp = nc.variables["temp"][0,:,:,:] time = nc.variables["ocean_time"][0] myday = jday2date(time/86400.) # date_tag = myday.strftime('%d %B %Y') date_tag = myday.strftime('%Y_%m_%d') - print date_tag + print(date_tag) plotout = date_tag + '.png' # plot = Ngl.contour_map(wks, aice, res) # Ngl.text_ndc(wks, date_tag, 0.85, 0.84, txres) diff --git a/pyroms/.doxygen b/pyroms/.doxygen new file mode 100644 index 0000000..b32a83a --- /dev/null +++ b/pyroms/.doxygen @@ -0,0 +1,1519 @@ +# Doxyfile 1.6.1 + +# This file describes the settings to be used by the documentation system +# doxygen (www.doxygen.org) for a project +# +# All text after a hash (#) is considered a comment and will be ignored +# The format is: +# TAG = value [value, ...] +# For lists items can also be appended using: +# TAG += value [value, ...] +# Values that contain spaces should be placed between quotes (" ") + +#--------------------------------------------------------------------------- +# Project related configuration options +#--------------------------------------------------------------------------- + +# This tag specifies the encoding used for all characters in the config file +# that follow. The default is UTF-8 which is also the encoding used for all +# text before the first occurrence of this tag. Doxygen uses libiconv (or the +# iconv built into libc) for the transcoding. See +# http://www.gnu.org/software/libiconv for the list of possible encodings. + +DOXYFILE_ENCODING = UTF-8 + +# The PROJECT_NAME tag is a single word (or a sequence of words surrounded +# by quotes) that should identify the project. + +PROJECT_NAME = pycnal + +# The PROJECT_NUMBER tag can be used to enter a project or revision number. +# This could be handy for archiving the generated documentation or +# if some version control system is used. + +PROJECT_NUMBER = + +# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) +# base path where the generated documentation will be put. +# If a relative path is entered, it will be relative to the location +# where doxygen was started. If left blank the current directory will be used. + +OUTPUT_DIRECTORY = + +# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create +# 4096 sub-directories (in 2 levels) under the output directory of each output +# format and will distribute the generated files over these directories. +# Enabling this option can be useful when feeding doxygen a huge amount of +# source files, where putting all generated files in the same directory would +# otherwise cause performance problems for the file system. + +CREATE_SUBDIRS = YES + +# The OUTPUT_LANGUAGE tag is used to specify the language in which all +# documentation generated by doxygen is written. Doxygen will use this +# information to generate all constant output in the proper language. +# The default language is English, other supported languages are: +# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, +# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, +# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English +# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, +# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrilic, Slovak, +# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. + +OUTPUT_LANGUAGE = English + +# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will +# include brief member descriptions after the members that are listed in +# the file and class documentation (similar to JavaDoc). +# Set to NO to disable this. + +BRIEF_MEMBER_DESC = YES + +# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend +# the brief description of a member or function before the detailed description. +# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the +# brief descriptions will be completely suppressed. + +REPEAT_BRIEF = YES + +# This tag implements a quasi-intelligent brief description abbreviator +# that is used to form the text in various listings. Each string +# in this list, if found as the leading text of the brief description, will be +# stripped from the text and the result after processing the whole list, is +# used as the annotated text. Otherwise, the brief description is used as-is. +# If left blank, the following values are used ("$name" is automatically +# replaced with the name of the entity): "The $name class" "The $name widget" +# "The $name file" "is" "provides" "specifies" "contains" +# "represents" "a" "an" "the" + +ABBREVIATE_BRIEF = + +# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then +# Doxygen will generate a detailed section even if there is only a brief +# description. + +ALWAYS_DETAILED_SEC = NO + +# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all +# inherited members of a class in the documentation of that class as if those +# members were ordinary class members. Constructors, destructors and assignment +# operators of the base classes will not be shown. + +INLINE_INHERITED_MEMB = NO + +# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full +# path before files name in the file list and in the header files. If set +# to NO the shortest path that makes the file name unique will be used. + +FULL_PATH_NAMES = YES + +# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag +# can be used to strip a user-defined part of the path. Stripping is +# only done if one of the specified strings matches the left-hand part of +# the path. The tag can be used to show relative paths in the file list. +# If left blank the directory from which doxygen is run is used as the +# path to strip. + +STRIP_FROM_PATH = + +# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of +# the path mentioned in the documentation of a class, which tells +# the reader which header file to include in order to use a class. +# If left blank only the name of the header file containing the class +# definition is used. Otherwise one should specify the include paths that +# are normally passed to the compiler using the -I flag. + +STRIP_FROM_INC_PATH = + +# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter +# (but less readable) file names. This can be useful is your file systems +# doesn't support long names like on DOS, Mac, or CD-ROM. + +SHORT_NAMES = NO + +# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen +# will interpret the first line (until the first dot) of a JavaDoc-style +# comment as the brief description. If set to NO, the JavaDoc +# comments will behave just like regular Qt-style comments +# (thus requiring an explicit @brief command for a brief description.) + +JAVADOC_AUTOBRIEF = NO + +# If the QT_AUTOBRIEF tag is set to YES then Doxygen will +# interpret the first line (until the first dot) of a Qt-style +# comment as the brief description. If set to NO, the comments +# will behave just like regular Qt-style comments (thus requiring +# an explicit \brief command for a brief description.) + +QT_AUTOBRIEF = NO + +# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen +# treat a multi-line C++ special comment block (i.e. a block of //! or /// +# comments) as a brief description. This used to be the default behaviour. +# The new default is to treat a multi-line C++ comment block as a detailed +# description. Set this tag to YES if you prefer the old behaviour instead. + +MULTILINE_CPP_IS_BRIEF = NO + +# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented +# member inherits the documentation from any documented member that it +# re-implements. + +INHERIT_DOCS = YES + +# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce +# a new page for each member. If set to NO, the documentation of a member will +# be part of the file/class/namespace that contains it. + +SEPARATE_MEMBER_PAGES = NO + +# The TAB_SIZE tag can be used to set the number of spaces in a tab. +# Doxygen uses this value to replace tabs by spaces in code fragments. + +TAB_SIZE = 8 + +# This tag can be used to specify a number of aliases that acts +# as commands in the documentation. An alias has the form "name=value". +# For example adding "sideeffect=\par Side Effects:\n" will allow you to +# put the command \sideeffect (or @sideeffect) in the documentation, which +# will result in a user-defined paragraph with heading "Side Effects:". +# You can put \n's in the value part of an alias to insert newlines. + +ALIASES = + +# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C +# sources only. Doxygen will then generate output that is more tailored for C. +# For instance, some of the names that are used will be different. The list +# of all members will be omitted, etc. + +OPTIMIZE_OUTPUT_FOR_C = NO + +# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java +# sources only. Doxygen will then generate output that is more tailored for +# Java. For instance, namespaces will be presented as packages, qualified +# scopes will look different, etc. + +OPTIMIZE_OUTPUT_JAVA = YES + +# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran +# sources only. Doxygen will then generate output that is more tailored for +# Fortran. + +OPTIMIZE_FOR_FORTRAN = NO + +# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL +# sources. Doxygen will then generate output that is tailored for +# VHDL. + +OPTIMIZE_OUTPUT_VHDL = NO + +# Doxygen selects the parser to use depending on the extension of the files it parses. +# With this tag you can assign which parser to use for a given extension. +# Doxygen has a built-in mapping, but you can override or extend it using this tag. +# The format is ext=language, where ext is a file extension, and language is one of +# the parsers supported by doxygen: IDL, Java, Javascript, C#, C, C++, D, PHP, +# Objective-C, Python, Fortran, VHDL, C, C++. For instance to make doxygen treat +# .inc files as Fortran files (default is PHP), and .f files as C (default is Fortran), +# use: inc=Fortran f=C. Note that for custom extensions you also need to set FILE_PATTERNS otherwise the files are not read by doxygen. + +EXTENSION_MAPPING = + +# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want +# to include (a tag file for) the STL sources as input, then you should +# set this tag to YES in order to let doxygen match functions declarations and +# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. +# func(std::string) {}). This also make the inheritance and collaboration +# diagrams that involve STL classes more complete and accurate. + +BUILTIN_STL_SUPPORT = NO + +# If you use Microsoft's C++/CLI language, you should set this option to YES to +# enable parsing support. + +CPP_CLI_SUPPORT = NO + +# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. +# Doxygen will parse them like normal C++ but will assume all classes use public +# instead of private inheritance when no explicit protection keyword is present. + +SIP_SUPPORT = NO + +# For Microsoft's IDL there are propget and propput attributes to indicate getter +# and setter methods for a property. Setting this option to YES (the default) +# will make doxygen to replace the get and set methods by a property in the +# documentation. This will only work if the methods are indeed getting or +# setting a simple type. If this is not the case, or you want to show the +# methods anyway, you should set this option to NO. + +IDL_PROPERTY_SUPPORT = YES + +# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC +# tag is set to YES, then doxygen will reuse the documentation of the first +# member in the group (if any) for the other members of the group. By default +# all members of a group must be documented explicitly. + +DISTRIBUTE_GROUP_DOC = NO + +# Set the SUBGROUPING tag to YES (the default) to allow class member groups of +# the same type (for instance a group of public functions) to be put as a +# subgroup of that type (e.g. under the Public Functions section). Set it to +# NO to prevent subgrouping. Alternatively, this can be done per class using +# the \nosubgrouping command. + +SUBGROUPING = YES + +# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum +# is documented as struct, union, or enum with the name of the typedef. So +# typedef struct TypeS {} TypeT, will appear in the documentation as a struct +# with name TypeT. When disabled the typedef will appear as a member of a file, +# namespace, or class. And the struct will be named TypeS. This can typically +# be useful for C code in case the coding convention dictates that all compound +# types are typedef'ed and only the typedef is referenced, never the tag name. + +TYPEDEF_HIDES_STRUCT = NO + +# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to +# determine which symbols to keep in memory and which to flush to disk. +# When the cache is full, less often used symbols will be written to disk. +# For small to medium size projects (<1000 input files) the default value is +# probably good enough. For larger projects a too small cache size can cause +# doxygen to be busy swapping symbols to and from disk most of the time +# causing a significant performance penality. +# If the system has enough physical memory increasing the cache will improve the +# performance by keeping more symbols in memory. Note that the value works on +# a logarithmic scale so increasing the size by one will rougly double the +# memory usage. The cache size is given by this formula: +# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, +# corresponding to a cache size of 2^16 = 65536 symbols + +SYMBOL_CACHE_SIZE = 0 + +#--------------------------------------------------------------------------- +# Build related configuration options +#--------------------------------------------------------------------------- + +# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in +# documentation are documented, even if no documentation was available. +# Private class members and static file members will be hidden unless +# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES + +EXTRACT_ALL = YES + +# If the EXTRACT_PRIVATE tag is set to YES all private members of a class +# will be included in the documentation. + +EXTRACT_PRIVATE = YES + +# If the EXTRACT_STATIC tag is set to YES all static members of a file +# will be included in the documentation. + +EXTRACT_STATIC = YES + +# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) +# defined locally in source files will be included in the documentation. +# If set to NO only classes defined in header files are included. + +EXTRACT_LOCAL_CLASSES = YES + +# This flag is only useful for Objective-C code. When set to YES local +# methods, which are defined in the implementation section but not in +# the interface are included in the documentation. +# If set to NO (the default) only methods in the interface are included. + +EXTRACT_LOCAL_METHODS = YES + +# If this flag is set to YES, the members of anonymous namespaces will be +# extracted and appear in the documentation as a namespace called +# 'anonymous_namespace{file}', where file will be replaced with the base +# name of the file that contains the anonymous namespace. By default +# anonymous namespace are hidden. + +EXTRACT_ANON_NSPACES = NO + +# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all +# undocumented members of documented classes, files or namespaces. +# If set to NO (the default) these members will be included in the +# various overviews, but no documentation section is generated. +# This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_MEMBERS = NO + +# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all +# undocumented classes that are normally visible in the class hierarchy. +# If set to NO (the default) these classes will be included in the various +# overviews. This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_CLASSES = NO + +# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all +# friend (class|struct|union) declarations. +# If set to NO (the default) these declarations will be included in the +# documentation. + +HIDE_FRIEND_COMPOUNDS = NO + +# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any +# documentation blocks found inside the body of a function. +# If set to NO (the default) these blocks will be appended to the +# function's detailed documentation block. + +HIDE_IN_BODY_DOCS = NO + +# The INTERNAL_DOCS tag determines if documentation +# that is typed after a \internal command is included. If the tag is set +# to NO (the default) then the documentation will be excluded. +# Set it to YES to include the internal documentation. + +INTERNAL_DOCS = YES + +# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate +# file names in lower-case letters. If set to YES upper-case letters are also +# allowed. This is useful if you have classes or files whose names only differ +# in case and if your file system supports case sensitive file names. Windows +# and Mac users are advised to set this option to NO. + +CASE_SENSE_NAMES = YES + +# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen +# will show members with their full class and namespace scopes in the +# documentation. If set to YES the scope will be hidden. + +HIDE_SCOPE_NAMES = NO + +# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen +# will put a list of the files that are included by a file in the documentation +# of that file. + +SHOW_INCLUDE_FILES = YES + +# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] +# is inserted in the documentation for inline members. + +INLINE_INFO = YES + +# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen +# will sort the (detailed) documentation of file and class members +# alphabetically by member name. If set to NO the members will appear in +# declaration order. + +SORT_MEMBER_DOCS = YES + +# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the +# brief documentation of file, namespace and class members alphabetically +# by member name. If set to NO (the default) the members will appear in +# declaration order. + +SORT_BRIEF_DOCS = NO + +# If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen will sort the (brief and detailed) documentation of class members so that constructors and destructors are listed first. If set to NO (the default) the constructors will appear in the respective orders defined by SORT_MEMBER_DOCS and SORT_BRIEF_DOCS. This tag will be ignored for brief docs if SORT_BRIEF_DOCS is set to NO and ignored for detailed docs if SORT_MEMBER_DOCS is set to NO. + +SORT_MEMBERS_CTORS_1ST = NO + +# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the +# hierarchy of group names into alphabetical order. If set to NO (the default) +# the group names will appear in their defined order. + +SORT_GROUP_NAMES = NO + +# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be +# sorted by fully-qualified names, including namespaces. If set to +# NO (the default), the class list will be sorted only by class name, +# not including the namespace part. +# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. +# Note: This option applies only to the class list, not to the +# alphabetical list. + +SORT_BY_SCOPE_NAME = NO + +# The GENERATE_TODOLIST tag can be used to enable (YES) or +# disable (NO) the todo list. This list is created by putting \todo +# commands in the documentation. + +GENERATE_TODOLIST = YES + +# The GENERATE_TESTLIST tag can be used to enable (YES) or +# disable (NO) the test list. This list is created by putting \test +# commands in the documentation. + +GENERATE_TESTLIST = YES + +# The GENERATE_BUGLIST tag can be used to enable (YES) or +# disable (NO) the bug list. This list is created by putting \bug +# commands in the documentation. + +GENERATE_BUGLIST = YES + +# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or +# disable (NO) the deprecated list. This list is created by putting +# \deprecated commands in the documentation. + +GENERATE_DEPRECATEDLIST= YES + +# The ENABLED_SECTIONS tag can be used to enable conditional +# documentation sections, marked by \if sectionname ... \endif. + +ENABLED_SECTIONS = + +# The MAX_INITIALIZER_LINES tag determines the maximum number of lines +# the initial value of a variable or define consists of for it to appear in +# the documentation. If the initializer consists of more lines than specified +# here it will be hidden. Use a value of 0 to hide initializers completely. +# The appearance of the initializer of individual variables and defines in the +# documentation can be controlled using \showinitializer or \hideinitializer +# command in the documentation regardless of this setting. + +MAX_INITIALIZER_LINES = 30 + +# Set the SHOW_USED_FILES tag to NO to disable the list of files generated +# at the bottom of the documentation of classes and structs. If set to YES the +# list will mention the files that were used to generate the documentation. + +SHOW_USED_FILES = YES + +# If the sources in your project are distributed over multiple directories +# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy +# in the documentation. The default is NO. + +SHOW_DIRECTORIES = YES + +# Set the SHOW_FILES tag to NO to disable the generation of the Files page. +# This will remove the Files entry from the Quick Index and from the +# Folder Tree View (if specified). The default is YES. + +SHOW_FILES = YES + +# Set the SHOW_NAMESPACES tag to NO to disable the generation of the +# Namespaces page. +# This will remove the Namespaces entry from the Quick Index +# and from the Folder Tree View (if specified). The default is YES. + +SHOW_NAMESPACES = YES + +# The FILE_VERSION_FILTER tag can be used to specify a program or script that +# doxygen should invoke to get the current version for each file (typically from +# the version control system). Doxygen will invoke the program by executing (via +# popen()) the command , where is the value of +# the FILE_VERSION_FILTER tag, and is the name of an input file +# provided by doxygen. Whatever the program writes to standard output +# is used as the file version. See the manual for examples. + +FILE_VERSION_FILTER = + +# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed by +# doxygen. The layout file controls the global structure of the generated output files +# in an output format independent way. The create the layout file that represents +# doxygen's defaults, run doxygen with the -l option. You can optionally specify a +# file name after the option, if omitted DoxygenLayout.xml will be used as the name +# of the layout file. + +LAYOUT_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to warning and progress messages +#--------------------------------------------------------------------------- + +# The QUIET tag can be used to turn on/off the messages that are generated +# by doxygen. Possible values are YES and NO. If left blank NO is used. + +QUIET = NO + +# The WARNINGS tag can be used to turn on/off the warning messages that are +# generated by doxygen. Possible values are YES and NO. If left blank +# NO is used. + +WARNINGS = YES + +# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings +# for undocumented members. If EXTRACT_ALL is set to YES then this flag will +# automatically be disabled. + +WARN_IF_UNDOCUMENTED = YES + +# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for +# potential errors in the documentation, such as not documenting some +# parameters in a documented function, or documenting parameters that +# don't exist or using markup commands wrongly. + +WARN_IF_DOC_ERROR = YES + +# This WARN_NO_PARAMDOC option can be abled to get warnings for +# functions that are documented, but have no documentation for their parameters +# or return value. If set to NO (the default) doxygen will only warn about +# wrong or incomplete parameter documentation, but not about the absence of +# documentation. + +WARN_NO_PARAMDOC = NO + +# The WARN_FORMAT tag determines the format of the warning messages that +# doxygen can produce. The string should contain the $file, $line, and $text +# tags, which will be replaced by the file and line number from which the +# warning originated and the warning text. Optionally the format may contain +# $version, which will be replaced by the version of the file (if it could +# be obtained via FILE_VERSION_FILTER) + +WARN_FORMAT = "$file:$line: $text" + +# The WARN_LOGFILE tag can be used to specify a file to which warning +# and error messages should be written. If left blank the output is written +# to stderr. + +WARN_LOGFILE = doxygen.log + +#--------------------------------------------------------------------------- +# configuration options related to the input files +#--------------------------------------------------------------------------- + +# The INPUT tag can be used to specify the files and/or directories that contain +# documented source files. You may enter file names like "myfile.cpp" or +# directories like "/usr/src/myproject". Separate the files or directories +# with spaces. + +INPUT = pycnal + +# This tag can be used to specify the character encoding of the source files +# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is +# also the default input encoding. Doxygen uses libiconv (or the iconv built +# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for +# the list of possible encodings. + +INPUT_ENCODING = UTF-8 + +# If the value of the INPUT tag contains directories, you can use the +# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank the following patterns are tested: +# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx +# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py *.f90 + +FILE_PATTERNS = + +# The RECURSIVE tag can be used to turn specify whether or not subdirectories +# should be searched for input files as well. Possible values are YES and NO. +# If left blank NO is used. + +RECURSIVE = YES + +# The EXCLUDE tag can be used to specify files and/or directories that should +# excluded from the INPUT source files. This way you can easily exclude a +# subdirectory from a directory tree whose root is specified with the INPUT tag. + +EXCLUDE = + +# The EXCLUDE_SYMLINKS tag can be used select whether or not files or +# directories that are symbolic links (a Unix filesystem feature) are excluded +# from the input. + +EXCLUDE_SYMLINKS = NO + +# If the value of the INPUT tag contains directories, you can use the +# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude +# certain files from those directories. Note that the wildcards are matched +# against the file with absolute path, so to exclude all test directories +# for example use the pattern */test/* + +EXCLUDE_PATTERNS = + +# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names +# (namespaces, classes, functions, etc.) that should be excluded from the +# output. The symbol name can be a fully qualified name, a word, or if the +# wildcard * is used, a substring. Examples: ANamespace, AClass, +# AClass::ANamespace, ANamespace::*Test + +EXCLUDE_SYMBOLS = + +# The EXAMPLE_PATH tag can be used to specify one or more files or +# directories that contain example code fragments that are included (see +# the \include command). + +EXAMPLE_PATH = + +# If the value of the EXAMPLE_PATH tag contains directories, you can use the +# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank all files are included. + +EXAMPLE_PATTERNS = + +# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be +# searched for input files to be used with the \include or \dontinclude +# commands irrespective of the value of the RECURSIVE tag. +# Possible values are YES and NO. If left blank NO is used. + +EXAMPLE_RECURSIVE = NO + +# The IMAGE_PATH tag can be used to specify one or more files or +# directories that contain image that are included in the documentation (see +# the \image command). + +IMAGE_PATH = + +# The INPUT_FILTER tag can be used to specify a program that doxygen should +# invoke to filter for each input file. Doxygen will invoke the filter program +# by executing (via popen()) the command , where +# is the value of the INPUT_FILTER tag, and is the name of an +# input file. Doxygen will then use the output that the filter program writes +# to standard output. +# If FILTER_PATTERNS is specified, this tag will be +# ignored. + +INPUT_FILTER = + +# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern +# basis. +# Doxygen will compare the file name with each pattern and apply the +# filter if there is a match. +# The filters are a list of the form: +# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further +# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER +# is applied to all files. + +FILTER_PATTERNS = + +# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using +# INPUT_FILTER) will be used to filter the input files when producing source +# files to browse (i.e. when SOURCE_BROWSER is set to YES). + +FILTER_SOURCE_FILES = NO + +#--------------------------------------------------------------------------- +# configuration options related to source browsing +#--------------------------------------------------------------------------- + +# If the SOURCE_BROWSER tag is set to YES then a list of source files will +# be generated. Documented entities will be cross-referenced with these sources. +# Note: To get rid of all source code in the generated output, make sure also +# VERBATIM_HEADERS is set to NO. + +SOURCE_BROWSER = YES + +# Setting the INLINE_SOURCES tag to YES will include the body +# of functions and classes directly in the documentation. + +INLINE_SOURCES = YES + +# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct +# doxygen to hide any special comment blocks from generated source code +# fragments. Normal C and C++ comments will always remain visible. + +STRIP_CODE_COMMENTS = NO + +# If the REFERENCED_BY_RELATION tag is set to YES +# then for each documented function all documented +# functions referencing it will be listed. + +REFERENCED_BY_RELATION = YES + +# If the REFERENCES_RELATION tag is set to YES +# then for each documented function all documented entities +# called/used by that function will be listed. + +REFERENCES_RELATION = YES + +# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) +# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from +# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will +# link to the source code. +# Otherwise they will link to the documentation. + +REFERENCES_LINK_SOURCE = YES + +# If the USE_HTAGS tag is set to YES then the references to source code +# will point to the HTML generated by the htags(1) tool instead of doxygen +# built-in source browser. The htags tool is part of GNU's global source +# tagging system (see http://www.gnu.org/software/global/global.html). You +# will need version 4.8.6 or higher. + +USE_HTAGS = NO + +# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen +# will generate a verbatim copy of the header file for each class for +# which an include is specified. Set to NO to disable this. + +VERBATIM_HEADERS = YES + +#--------------------------------------------------------------------------- +# configuration options related to the alphabetical class index +#--------------------------------------------------------------------------- + +# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index +# of all compounds will be generated. Enable this if the project +# contains a lot of classes, structs, unions or interfaces. + +ALPHABETICAL_INDEX = YES + +# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then +# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns +# in which this list will be split (can be a number in the range [1..20]) + +COLS_IN_ALPHA_INDEX = 5 + +# In case all classes in a project start with a common prefix, all +# classes will be put under the same header in the alphabetical index. +# The IGNORE_PREFIX tag can be used to specify one or more prefixes that +# should be ignored while generating the index headers. + +IGNORE_PREFIX = + +#--------------------------------------------------------------------------- +# configuration options related to the HTML output +#--------------------------------------------------------------------------- + +# If the GENERATE_HTML tag is set to YES (the default) Doxygen will +# generate HTML output. + +GENERATE_HTML = YES + +# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `html' will be used as the default path. + +HTML_OUTPUT = html + +# The HTML_FILE_EXTENSION tag can be used to specify the file extension for +# each generated HTML page (for example: .htm,.php,.asp). If it is left blank +# doxygen will generate files with .html extension. + +HTML_FILE_EXTENSION = .html + +# The HTML_HEADER tag can be used to specify a personal HTML header for +# each generated HTML page. If it is left blank doxygen will generate a +# standard header. + +HTML_HEADER = + +# The HTML_FOOTER tag can be used to specify a personal HTML footer for +# each generated HTML page. If it is left blank doxygen will generate a +# standard footer. + +HTML_FOOTER = + +# If the HTML_TIMESTAMP tag is set to YES then the generated HTML +# documentation will contain the timesstamp. + +HTML_TIMESTAMP = NO + +# The HTML_STYLESHEET tag can be used to specify a user-defined cascading +# style sheet that is used by each HTML page. It can be used to +# fine-tune the look of the HTML output. If the tag is left blank doxygen +# will generate a default style sheet. Note that doxygen will try to copy +# the style sheet file to the HTML output directory, so don't put your own +# stylesheet in the HTML output directory as well, or it will be erased! + +HTML_STYLESHEET = + +# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, +# files or namespaces will be aligned in HTML using tables. If set to +# NO a bullet list will be used. + +HTML_ALIGN_MEMBERS = YES + +# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML +# documentation will contain sections that can be hidden and shown after the +# page has loaded. For this to work a browser that supports +# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox +# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari). + +HTML_DYNAMIC_SECTIONS = NO + +# If the GENERATE_DOCSET tag is set to YES, additional index files +# will be generated that can be used as input for Apple's Xcode 3 +# integrated development environment, introduced with OSX 10.5 (Leopard). +# To create a documentation set, doxygen will generate a Makefile in the +# HTML output directory. Running make will produce the docset in that +# directory and running "make install" will install the docset in +# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find +# it at startup. +# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html for more information. + +GENERATE_DOCSET = NO + +# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the +# feed. A documentation feed provides an umbrella under which multiple +# documentation sets from a single provider (such as a company or product suite) +# can be grouped. + +DOCSET_FEEDNAME = "Doxygen generated docs" + +# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that +# should uniquely identify the documentation set bundle. This should be a +# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen +# will append .docset to the name. + +DOCSET_BUNDLE_ID = org.doxygen.Project + +# If the GENERATE_HTMLHELP tag is set to YES, additional index files +# will be generated that can be used as input for tools like the +# Microsoft HTML help workshop to generate a compiled HTML help file (.chm) +# of the generated HTML documentation. + +GENERATE_HTMLHELP = NO + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can +# be used to specify the file name of the resulting .chm file. You +# can add a path in front of the file if the result should not be +# written to the html output directory. + +CHM_FILE = + +# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can +# be used to specify the location (absolute path including file name) of +# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run +# the HTML help compiler on the generated index.hhp. + +HHC_LOCATION = + +# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag +# controls if a separate .chi index file is generated (YES) or that +# it should be included in the master .chm file (NO). + +GENERATE_CHI = NO + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING +# is used to encode HtmlHelp index (hhk), content (hhc) and project file +# content. + +CHM_INDEX_ENCODING = + +# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag +# controls whether a binary table of contents is generated (YES) or a +# normal table of contents (NO) in the .chm file. + +BINARY_TOC = NO + +# The TOC_EXPAND flag can be set to YES to add extra items for group members +# to the contents of the HTML help documentation and to the tree view. + +TOC_EXPAND = NO + +# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and QHP_VIRTUAL_FOLDER +# are set, an additional index file will be generated that can be used as input for +# Qt's qhelpgenerator to generate a Qt Compressed Help (.qch) of the generated +# HTML documentation. + +GENERATE_QHP = NO + +# If the QHG_LOCATION tag is specified, the QCH_FILE tag can +# be used to specify the file name of the resulting .qch file. +# The path specified is relative to the HTML output folder. + +QCH_FILE = + +# The QHP_NAMESPACE tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#namespace + +QHP_NAMESPACE = + +# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#virtual-folders + +QHP_VIRTUAL_FOLDER = doc + +# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to add. +# For more information please see +# http://doc.trolltech.com/qthelpproject.html#custom-filters + +QHP_CUST_FILTER_NAME = + +# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the custom filter to add.For more information please see +# Qt Help Project / Custom Filters. + +QHP_CUST_FILTER_ATTRS = + +# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this project's +# filter section matches. +# Qt Help Project / Filter Attributes. + +QHP_SECT_FILTER_ATTRS = + +# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can +# be used to specify the location of Qt's qhelpgenerator. +# If non-empty doxygen will try to run qhelpgenerator on the generated +# .qhp file. + +QHG_LOCATION = + +# The DISABLE_INDEX tag can be used to turn on/off the condensed index at +# top of each HTML page. The value NO (the default) enables the index and +# the value YES disables it. + +DISABLE_INDEX = NO + +# This tag can be used to set the number of enum values (range [1..20]) +# that doxygen will group on one line in the generated HTML documentation. + +ENUM_VALUES_PER_LINE = 4 + +# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index +# structure should be generated to display hierarchical information. +# If the tag value is set to YES, a side panel will be generated +# containing a tree-like index structure (just like the one that +# is generated for HTML Help). For this to work a browser that supports +# JavaScript, DHTML, CSS and frames is required (i.e. any modern browser). +# Windows users are probably better off using the HTML help feature. + +GENERATE_TREEVIEW = YES + +# By enabling USE_INLINE_TREES, doxygen will generate the Groups, Directories, +# and Class Hierarchy pages using a tree view instead of an ordered list. + +USE_INLINE_TREES = NO + +# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be +# used to set the initial width (in pixels) of the frame in which the tree +# is shown. + +TREEVIEW_WIDTH = 250 + +# Use this tag to change the font size of Latex formulas included +# as images in the HTML documentation. The default is 10. Note that +# when you change the font size after a successful doxygen run you need +# to manually remove any form_*.png images from the HTML output directory +# to force them to be regenerated. + +FORMULA_FONTSIZE = 10 + +# When the SEARCHENGINE tag is enable doxygen will generate a search box for the HTML output. The underlying search engine uses javascript +# and DHTML and should work on any modern browser. Note that when using HTML help (GENERATE_HTMLHELP) or Qt help (GENERATE_QHP) +# there is already a search function so this one should typically +# be disabled. + +SEARCHENGINE = YES + +#--------------------------------------------------------------------------- +# configuration options related to the LaTeX output +#--------------------------------------------------------------------------- + +# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will +# generate Latex output. + +GENERATE_LATEX = NO + +# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `latex' will be used as the default path. + +LATEX_OUTPUT = latex + +# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be +# invoked. If left blank `latex' will be used as the default command name. + +LATEX_CMD_NAME = latex + +# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to +# generate index for LaTeX. If left blank `makeindex' will be used as the +# default command name. + +MAKEINDEX_CMD_NAME = makeindex + +# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact +# LaTeX documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_LATEX = NO + +# The PAPER_TYPE tag can be used to set the paper type that is used +# by the printer. Possible values are: a4, a4wide, letter, legal and +# executive. If left blank a4wide will be used. + +PAPER_TYPE = letter + +# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX +# packages that should be included in the LaTeX output. + +EXTRA_PACKAGES = + +# The LATEX_HEADER tag can be used to specify a personal LaTeX header for +# the generated latex document. The header should contain everything until +# the first chapter. If it is left blank doxygen will generate a +# standard header. Notice: only use this tag if you know what you are doing! + +LATEX_HEADER = + +# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated +# is prepared for conversion to pdf (using ps2pdf). The pdf file will +# contain links (just like the HTML output) instead of page references +# This makes the output suitable for online browsing using a pdf viewer. + +PDF_HYPERLINKS = YES + +# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of +# plain latex in the generated Makefile. Set this option to YES to get a +# higher quality PDF documentation. + +USE_PDFLATEX = YES + +# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. +# command to the generated LaTeX files. This will instruct LaTeX to keep +# running if errors occur, instead of asking the user for help. +# This option is also used when generating formulas in HTML. + +LATEX_BATCHMODE = NO + +# If LATEX_HIDE_INDICES is set to YES then doxygen will not +# include the index chapters (such as File Index, Compound Index, etc.) +# in the output. + +LATEX_HIDE_INDICES = NO + +# If LATEX_SOURCE_CODE is set to YES then doxygen will include source code with syntax highlighting in the LaTeX output. Note that which sources are shown also depends on other settings such as SOURCE_BROWSER. + +LATEX_SOURCE_CODE = NO + +#--------------------------------------------------------------------------- +# configuration options related to the RTF output +#--------------------------------------------------------------------------- + +# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output +# The RTF output is optimized for Word 97 and may not look very pretty with +# other RTF readers or editors. + +GENERATE_RTF = NO + +# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `rtf' will be used as the default path. + +RTF_OUTPUT = rtf + +# If the COMPACT_RTF tag is set to YES Doxygen generates more compact +# RTF documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_RTF = NO + +# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated +# will contain hyperlink fields. The RTF file will +# contain links (just like the HTML output) instead of page references. +# This makes the output suitable for online browsing using WORD or other +# programs which support those fields. +# Note: wordpad (write) and others do not support links. + +RTF_HYPERLINKS = NO + +# Load stylesheet definitions from file. Syntax is similar to doxygen's +# config file, i.e. a series of assignments. You only have to provide +# replacements, missing definitions are set to their default value. + +RTF_STYLESHEET_FILE = + +# Set optional variables used in the generation of an rtf document. +# Syntax is similar to doxygen's config file. + +RTF_EXTENSIONS_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to the man page output +#--------------------------------------------------------------------------- + +# If the GENERATE_MAN tag is set to YES (the default) Doxygen will +# generate man pages + +GENERATE_MAN = NO + +# The MAN_OUTPUT tag is used to specify where the man pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `man' will be used as the default path. + +MAN_OUTPUT = man + +# The MAN_EXTENSION tag determines the extension that is added to +# the generated man pages (default is the subroutine's section .3) + +MAN_EXTENSION = .3 + +# If the MAN_LINKS tag is set to YES and Doxygen generates man output, +# then it will generate one additional man file for each entity +# documented in the real man page(s). These additional files +# only source the real man page, but without them the man command +# would be unable to find the correct page. The default is NO. + +MAN_LINKS = NO + +#--------------------------------------------------------------------------- +# configuration options related to the XML output +#--------------------------------------------------------------------------- + +# If the GENERATE_XML tag is set to YES Doxygen will +# generate an XML file that captures the structure of +# the code including all documentation. + +GENERATE_XML = NO + +# The XML_OUTPUT tag is used to specify where the XML pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `xml' will be used as the default path. + +XML_OUTPUT = xml + +# The XML_SCHEMA tag can be used to specify an XML schema, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_SCHEMA = + +# The XML_DTD tag can be used to specify an XML DTD, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_DTD = + +# If the XML_PROGRAMLISTING tag is set to YES Doxygen will +# dump the program listings (including syntax highlighting +# and cross-referencing information) to the XML output. Note that +# enabling this will significantly increase the size of the XML output. + +XML_PROGRAMLISTING = YES + +#--------------------------------------------------------------------------- +# configuration options for the AutoGen Definitions output +#--------------------------------------------------------------------------- + +# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will +# generate an AutoGen Definitions (see autogen.sf.net) file +# that captures the structure of the code including all +# documentation. Note that this feature is still experimental +# and incomplete at the moment. + +GENERATE_AUTOGEN_DEF = NO + +#--------------------------------------------------------------------------- +# configuration options related to the Perl module output +#--------------------------------------------------------------------------- + +# If the GENERATE_PERLMOD tag is set to YES Doxygen will +# generate a Perl module file that captures the structure of +# the code including all documentation. Note that this +# feature is still experimental and incomplete at the +# moment. + +GENERATE_PERLMOD = NO + +# If the PERLMOD_LATEX tag is set to YES Doxygen will generate +# the necessary Makefile rules, Perl scripts and LaTeX code to be able +# to generate PDF and DVI output from the Perl module output. + +PERLMOD_LATEX = NO + +# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be +# nicely formatted so it can be parsed by a human reader. +# This is useful +# if you want to understand what is going on. +# On the other hand, if this +# tag is set to NO the size of the Perl module output will be much smaller +# and Perl will parse it just the same. + +PERLMOD_PRETTY = YES + +# The names of the make variables in the generated doxyrules.make file +# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. +# This is useful so different doxyrules.make files included by the same +# Makefile don't overwrite each other's variables. + +PERLMOD_MAKEVAR_PREFIX = + +#--------------------------------------------------------------------------- +# Configuration options related to the preprocessor +#--------------------------------------------------------------------------- + +# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will +# evaluate all C-preprocessor directives found in the sources and include +# files. + +ENABLE_PREPROCESSING = YES + +# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro +# names in the source code. If set to NO (the default) only conditional +# compilation will be performed. Macro expansion can be done in a controlled +# way by setting EXPAND_ONLY_PREDEF to YES. + +MACRO_EXPANSION = NO + +# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES +# then the macro expansion is limited to the macros specified with the +# PREDEFINED and EXPAND_AS_DEFINED tags. + +EXPAND_ONLY_PREDEF = NO + +# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files +# in the INCLUDE_PATH (see below) will be search if a #include is found. + +SEARCH_INCLUDES = YES + +# The INCLUDE_PATH tag can be used to specify one or more directories that +# contain include files that are not input files but should be processed by +# the preprocessor. + +INCLUDE_PATH = + +# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard +# patterns (like *.h and *.hpp) to filter out the header-files in the +# directories. If left blank, the patterns specified with FILE_PATTERNS will +# be used. + +INCLUDE_FILE_PATTERNS = + +# The PREDEFINED tag can be used to specify one or more macro names that +# are defined before the preprocessor is started (similar to the -D option of +# gcc). The argument of the tag is a list of macros of the form: name +# or name=definition (no spaces). If the definition and the = are +# omitted =1 is assumed. To prevent a macro definition from being +# undefined via #undef or recursively expanded use the := operator +# instead of the = operator. + +PREDEFINED = + +# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then +# this tag can be used to specify a list of macro names that should be expanded. +# The macro definition that is found in the sources will be used. +# Use the PREDEFINED tag if you want to use a different macro definition. + +EXPAND_AS_DEFINED = + +# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then +# doxygen's preprocessor will remove all function-like macros that are alone +# on a line, have an all uppercase name, and do not end with a semicolon. Such +# function macros are typically used for boiler-plate code, and will confuse +# the parser if not removed. + +SKIP_FUNCTION_MACROS = YES + +#--------------------------------------------------------------------------- +# Configuration::additions related to external references +#--------------------------------------------------------------------------- + +# The TAGFILES option can be used to specify one or more tagfiles. +# Optionally an initial location of the external documentation +# can be added for each tagfile. The format of a tag file without +# this location is as follows: +# +# TAGFILES = file1 file2 ... +# Adding location for the tag files is done as follows: +# +# TAGFILES = file1=loc1 "file2 = loc2" ... +# where "loc1" and "loc2" can be relative or absolute paths or +# URLs. If a location is present for each tag, the installdox tool +# does not have to be run to correct the links. +# Note that each tag file must have a unique name +# (where the name does NOT include the path) +# If a tag file is not located in the directory in which doxygen +# is run, you must also specify the path to the tagfile here. + +TAGFILES = + +# When a file name is specified after GENERATE_TAGFILE, doxygen will create +# a tag file that is based on the input files it reads. + +GENERATE_TAGFILE = + +# If the ALLEXTERNALS tag is set to YES all external classes will be listed +# in the class index. If set to NO only the inherited external classes +# will be listed. + +ALLEXTERNALS = NO + +# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed +# in the modules index. If set to NO, only the current project's groups will +# be listed. + +EXTERNAL_GROUPS = YES + +# The PERL_PATH should be the absolute path and name of the perl script +# interpreter (i.e. the result of `which perl'). + +PERL_PATH = /usr/bin/perl + +#--------------------------------------------------------------------------- +# Configuration options related to the dot tool +#--------------------------------------------------------------------------- + +# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will +# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base +# or super classes. Setting the tag to NO turns the diagrams off. Note that +# this option is superseded by the HAVE_DOT option below. This is only a +# fallback. It is recommended to install and use dot, since it yields more +# powerful graphs. + +CLASS_DIAGRAMS = YES + +# You can define message sequence charts within doxygen comments using the \msc +# command. Doxygen will then run the mscgen tool (see +# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the +# documentation. The MSCGEN_PATH tag allows you to specify the directory where +# the mscgen tool resides. If left empty the tool is assumed to be found in the +# default search path. + +MSCGEN_PATH = + +# If set to YES, the inheritance and collaboration graphs will hide +# inheritance and usage relations if the target is undocumented +# or is not a class. + +HIDE_UNDOC_RELATIONS = YES + +# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is +# available from the path. This tool is part of Graphviz, a graph visualization +# toolkit from AT&T and Lucent Bell Labs. The other options in this section +# have no effect if this option is set to NO (the default) + +HAVE_DOT = YES + +# By default doxygen will write a font called FreeSans.ttf to the output +# directory and reference it in all dot files that doxygen generates. This +# font does not include all possible unicode characters however, so when you need +# these (or just want a differently looking font) you can specify the font name +# using DOT_FONTNAME. You need need to make sure dot is able to find the font, +# which can be done by putting it in a standard location or by setting the +# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory +# containing the font. + +DOT_FONTNAME = FreeSans + +# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. +# The default size is 10pt. + +DOT_FONTSIZE = 10 + +# By default doxygen will tell dot to use the output directory to look for the +# FreeSans.ttf font (which doxygen will put there itself). If you specify a +# different font using DOT_FONTNAME you can set the path where dot +# can find it using this tag. + +DOT_FONTPATH = + +# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect inheritance relations. Setting this tag to YES will force the +# the CLASS_DIAGRAMS tag to NO. + +CLASS_GRAPH = YES + +# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect implementation dependencies (inheritance, containment, and +# class references variables) of the class with other documented classes. + +COLLABORATION_GRAPH = YES + +# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for groups, showing the direct groups dependencies + +GROUP_GRAPHS = YES + +# If the UML_LOOK tag is set to YES doxygen will generate inheritance and +# collaboration diagrams in a style similar to the OMG's Unified Modeling +# Language. + +UML_LOOK = NO + +# If set to YES, the inheritance and collaboration graphs will show the +# relations between templates and their instances. + +TEMPLATE_RELATIONS = NO + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT +# tags are set to YES then doxygen will generate a graph for each documented +# file showing the direct and indirect include dependencies of the file with +# other documented files. + +INCLUDE_GRAPH = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and +# HAVE_DOT tags are set to YES then doxygen will generate a graph for each +# documented header file showing the documented files that directly or +# indirectly include this file. + +INCLUDED_BY_GRAPH = YES + +# If the CALL_GRAPH and HAVE_DOT options are set to YES then +# doxygen will generate a call dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable call graphs +# for selected functions only using the \callgraph command. + +CALL_GRAPH = NO + +# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then +# doxygen will generate a caller dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable caller +# graphs for selected functions only using the \callergraph command. + +CALLER_GRAPH = NO + +# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen +# will graphical hierarchy of all classes instead of a textual one. + +GRAPHICAL_HIERARCHY = YES + +# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES +# then doxygen will show the dependencies a directory has on other directories +# in a graphical way. The dependency relations are determined by the #include +# relations between the files in the directories. + +DIRECTORY_GRAPH = YES + +# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images +# generated by dot. Possible values are png, jpg, or gif +# If left blank png will be used. + +DOT_IMAGE_FORMAT = png + +# The tag DOT_PATH can be used to specify the path where the dot tool can be +# found. If left blank, it is assumed the dot tool can be found in the path. + +DOT_PATH = + +# The DOTFILE_DIRS tag can be used to specify one or more directories that +# contain dot files that are included in the documentation (see the +# \dotfile command). + +DOTFILE_DIRS = + +# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of +# nodes that will be shown in the graph. If the number of nodes in a graph +# becomes larger than this value, doxygen will truncate the graph, which is +# visualized by representing a node as a red box. Note that doxygen if the +# number of direct children of the root node in a graph is already larger than +# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note +# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. + +DOT_GRAPH_MAX_NODES = 50 + +# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the +# graphs generated by dot. A depth value of 3 means that only nodes reachable +# from the root by following a path via at most 3 edges will be shown. Nodes +# that lay further from the root node will be omitted. Note that setting this +# option to 1 or 2 may greatly reduce the computation time needed for large +# code bases. Also note that the size of a graph can be further restricted by +# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. + +MAX_DOT_GRAPH_DEPTH = 0 + +# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent +# background. This is disabled by default, because dot on Windows does not +# seem to support this out of the box. Warning: Depending on the platform used, +# enabling this option may lead to badly anti-aliased labels on the edges of +# a graph (i.e. they become hard to read). + +DOT_TRANSPARENT = NO + +# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output +# files in one run (i.e. multiple -o and -T options on the command line). This +# makes dot run faster, but since only newer versions of dot (>1.8.10) +# support this, this feature is disabled by default. + +DOT_MULTI_TARGETS = NO + +# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will +# generate a legend page explaining the meaning of the various boxes and +# arrows in the dot generated graphs. + +GENERATE_LEGEND = YES + +# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will +# remove the intermediate dot files that are used to generate +# the various graphs. + +DOT_CLEANUP = YES diff --git a/pyroms/docs/api-objects.txt b/pyroms/docs/api-objects.txt deleted file mode 100644 index 1f1b061..0000000 --- a/pyroms/docs/api-objects.txt +++ /dev/null @@ -1,393 +0,0 @@ -pyroms pyroms-module.html -pyroms.cf pyroms.cf-module.html -pyroms.extern pyroms.extern-module.html -pyroms.extern.greatcircle pyroms.extern.greatcircle-module.html -pyroms.extern.greatcircle.vinc_dist pyroms.extern.greatcircle-module.html#vinc_dist -pyroms.extern.greatcircle.vinc_pt pyroms.extern.greatcircle-module.html#vinc_pt -pyroms.extern.kdtree pyroms.extern.kdtree-module.html -pyroms.extern.kdtree.minkowski_distance_p pyroms.extern.kdtree-module.html#minkowski_distance_p -pyroms.extern.kdtree.minkowski_distance pyroms.extern.kdtree-module.html#minkowski_distance -pyroms.extern.kdtree.distance_matrix pyroms.extern.kdtree-module.html#distance_matrix -pyroms.extern.pupynere pyroms.extern.pupynere-module.html -pyroms.extern.pupynere.ABSENT pyroms.extern.pupynere-module.html#ABSENT -pyroms.extern.pupynere.NC_DOUBLE pyroms.extern.pupynere-module.html#NC_DOUBLE -pyroms.extern.pupynere.NC_BYTE pyroms.extern.pupynere-module.html#NC_BYTE -pyroms.extern.pupynere.NC_ATTRIBUTE pyroms.extern.pupynere-module.html#NC_ATTRIBUTE -pyroms.extern.pupynere._test pyroms.extern.pupynere-module.html#_test -pyroms.extern.pupynere.NC_FLOAT pyroms.extern.pupynere-module.html#NC_FLOAT -pyroms.extern.pupynere.NC_INT pyroms.extern.pupynere-module.html#NC_INT -pyroms.extern.pupynere.ZERO pyroms.extern.pupynere-module.html#ZERO -pyroms.extern.pupynere.NC_VARIABLE pyroms.extern.pupynere-module.html#NC_VARIABLE -pyroms.extern.pupynere.NC_SHORT pyroms.extern.pupynere-module.html#NC_SHORT -pyroms.extern.pupynere.NC_DIMENSION pyroms.extern.pupynere-module.html#NC_DIMENSION -pyroms.extern.pupynere.NC_CHAR pyroms.extern.pupynere-module.html#NC_CHAR -pyroms.grid pyroms.grid-module.html -pyroms.grid.get_ROMS_vgrid pyroms.grid-module.html#get_ROMS_vgrid -pyroms.grid.uvp_masks pyroms.hgrid-module.html#uvp_masks -pyroms.grid.list_ROMS_gridid pyroms.grid-module.html#list_ROMS_gridid -pyroms.grid.rho_to_vert pyroms.hgrid-module.html#rho_to_vert -pyroms.grid.gridid_dictionary pyroms.grid-module.html#gridid_dictionary -pyroms.grid.write_ROMS_grid pyroms.grid-module.html#write_ROMS_grid -pyroms.grid.get_ROMS_grid pyroms.grid-module.html#get_ROMS_grid -pyroms.grid.get_ROMS_hgrid pyroms.grid-module.html#get_ROMS_hgrid -pyroms.grid.rho_to_vert_geo pyroms.hgrid-module.html#rho_to_vert_geo -pyroms.grid.print_ROMS_gridinfo pyroms.grid-module.html#print_ROMS_gridinfo -pyroms.grid.points_inside_poly pyroms.hgrid-module.html#points_inside_poly -pyroms.hgrid pyroms.hgrid-module.html -pyroms.hgrid.uvp_masks pyroms.hgrid-module.html#uvp_masks -pyroms.hgrid.rho_to_vert pyroms.hgrid-module.html#rho_to_vert -pyroms.hgrid._approximate_erf pyroms.hgrid-module.html#_approximate_erf -pyroms.hgrid.rho_to_vert_geo pyroms.hgrid-module.html#rho_to_vert_geo -pyroms.hgrid_old pyroms.hgrid_old-module.html -pyroms.hgrid_old.uvp_masks pyroms.hgrid_old-module.html#uvp_masks -pyroms.hgrid_old.rho_to_vert pyroms.hgrid_old-module.html#rho_to_vert -pyroms.hgrid_old._approximate_erf pyroms.hgrid_old-module.html#_approximate_erf -pyroms.hgrid_old.rho_to_vert_geo pyroms.hgrid_old-module.html#rho_to_vert_geo -pyroms.io pyroms.io-module.html -pyroms.io.Dataset pyroms.io-module.html#Dataset -pyroms.io.MFDataset pyroms.io-module.html#MFDataset -pyroms.remapping pyroms.remapping-module.html -pyroms.remapping.compute_remap_weights' pyroms.remapping.compute_remap_weights%27-module.html -pyroms.remapping.compute_remap_weights'.compute_remap_weights pyroms.remapping.compute_remap_weights%27-module.html#compute_remap_weights -pyroms.remapping.flood' pyroms.remapping.flood%27-module.html -pyroms.remapping.flood'.flood pyroms.remapping.flood%27-module.html#flood -pyroms.remapping.make_remap_grid_file' pyroms.remapping.make_remap_grid_file%27-module.html -pyroms.remapping.make_remap_grid_file'.make_remap_grid_file pyroms.remapping.make_remap_grid_file%27-module.html#make_remap_grid_file -pyroms.remapping.remap' pyroms.remapping.remap%27-module.html -pyroms.remapping.remap'.remap pyroms.remapping.remap%27-module.html#remap -pyroms.remapping.roms2z' pyroms.remapping.roms2z%27-module.html -pyroms.remapping.roms2z'.roms2z pyroms.remapping.roms2z%27-module.html#roms2z -pyroms.remapping.test_remap_weights' pyroms.remapping.test_remap_weights%27-module.html -pyroms.remapping.test_remap_weights'.test_remap_weights pyroms.remapping.test_remap_weights%27-module.html#test_remap_weights -pyroms.remapping.z2roms' pyroms.remapping.z2roms%27-module.html -pyroms.remapping.z2roms'.z2roms pyroms.remapping.z2roms%27-module.html#z2roms -pyroms.tools pyroms.tools-module.html -pyroms.tools.zslice pyroms.tools-module.html#zslice -pyroms.tools.interm_pt pyroms.tools-module.html#interm_pt -pyroms.tools.transect pyroms.tools-module.html#transect -pyroms.tools.lonslice pyroms.tools-module.html#lonslice -pyroms.tools.section_transport pyroms.tools-module.html#section_transport -pyroms.tools.jslice pyroms.tools-module.html#jslice -pyroms.tools.isoslice pyroms.tools-module.html#isoslice -pyroms.tools.islice pyroms.tools-module.html#islice -pyroms.tools.sslice pyroms.tools-module.html#sslice -pyroms.tools.latslice pyroms.tools-module.html#latslice -pyroms.utility pyroms.utility-module.html -pyroms.utility.get_grid_proj pyroms.utility-module.html#get_grid_proj -pyroms.utility.find_nearestgridpoints pyroms.utility-module.html#find_nearestgridpoints -pyroms.utility.roms_varlist pyroms.utility-module.html#roms_varlist -pyroms.utility.get_ij pyroms.utility-module.html#get_ij -pyroms.utility.get_surface pyroms.utility-module.html#get_surface -pyroms.utility.get_nc_var pyroms.utility-module.html#get_nc_var -pyroms.utility.get_date_tag pyroms.utility-module.html#get_date_tag -pyroms.utility.get_lonlat pyroms.utility-module.html#get_lonlat -pyroms.utility.get_bottom pyroms.utility-module.html#get_bottom -pyroms.utility.move2grid pyroms.utility-module.html#move2grid -pyroms.vgrid pyroms.vgrid-module.html -pyroms.cf.time pyroms.cf.time-class.html -pyroms.cf.time.nearest_date pyroms.cf.time-class.html#nearest_date -pyroms.cf.time.get_seconds pyroms.cf.time-class.html#get_seconds -pyroms.cf.time.seconds pyroms.cf.time-class.html#seconds -pyroms.cf.time.get_hours pyroms.cf.time-class.html#get_hours -pyroms.cf.time.get_dates pyroms.cf.time-class.html#get_dates -pyroms.cf.time.hours pyroms.cf.time-class.html#hours -pyroms.cf.time.dates pyroms.cf.time-class.html#dates -pyroms.cf.time.days pyroms.cf.time-class.html#days -pyroms.cf.time._unit2sec pyroms.cf.time-class.html#_unit2sec -pyroms.cf.time.jd pyroms.cf.time-class.html#jd -pyroms.cf.time.arg_nearest pyroms.cf.time-class.html#arg_nearest -pyroms.cf.time.arg_nearest_date pyroms.cf.time-class.html#arg_nearest_date -pyroms.cf.time.get_days pyroms.cf.time-class.html#get_days -pyroms.cf.time.minutes pyroms.cf.time-class.html#minutes -pyroms.cf.time.nearest pyroms.cf.time-class.html#nearest -pyroms.cf.time.get_jd pyroms.cf.time-class.html#get_jd -pyroms.cf.time.__array_finalize__ pyroms.cf.time-class.html#__array_finalize__ -pyroms.cf.time.__new__ pyroms.cf.time-class.html#__new__ -pyroms.cf.time._sec2unit pyroms.cf.time-class.html#_sec2unit -pyroms.cf.time.get_minutes pyroms.cf.time-class.html#get_minutes -pyroms.extern.greatcircle.GreatCircle pyroms.extern.greatcircle.GreatCircle-class.html -pyroms.extern.greatcircle.GreatCircle.points pyroms.extern.greatcircle.GreatCircle-class.html#points -pyroms.extern.greatcircle.GreatCircle.__init__ pyroms.extern.greatcircle.GreatCircle-class.html#__init__ -pyroms.extern.kdtree.KDTree pyroms.extern.kdtree.KDTree-class.html -pyroms.extern.kdtree.KDTree.node pyroms.extern.kdtree.KDTree.node-class.html -pyroms.extern.kdtree.KDTree.query_ball_tree pyroms.extern.kdtree.KDTree-class.html#query_ball_tree -pyroms.extern.kdtree.KDTree.query_ball_point pyroms.extern.kdtree.KDTree-class.html#query_ball_point -pyroms.extern.kdtree.KDTree.__query_ball_point pyroms.extern.kdtree.KDTree-class.html#__query_ball_point -pyroms.extern.kdtree.KDTree.leafnode pyroms.extern.kdtree.KDTree.leafnode-class.html -pyroms.extern.kdtree.KDTree.innernode pyroms.extern.kdtree.KDTree.innernode-class.html -pyroms.extern.kdtree.KDTree.count_neighbors pyroms.extern.kdtree.KDTree-class.html#count_neighbors -pyroms.extern.kdtree.KDTree.query pyroms.extern.kdtree.KDTree-class.html#query -pyroms.extern.kdtree.KDTree.__query pyroms.extern.kdtree.KDTree-class.html#__query -pyroms.extern.kdtree.KDTree.__build pyroms.extern.kdtree.KDTree-class.html#__build -pyroms.extern.kdtree.KDTree.__init__ pyroms.extern.kdtree.KDTree-class.html#__init__ -pyroms.extern.kdtree.KDTree.innernode pyroms.extern.kdtree.KDTree.innernode-class.html -pyroms.extern.kdtree.KDTree.innernode.__init__ pyroms.extern.kdtree.KDTree.innernode-class.html#__init__ -pyroms.extern.kdtree.KDTree.leafnode pyroms.extern.kdtree.KDTree.leafnode-class.html -pyroms.extern.kdtree.KDTree.leafnode.__init__ pyroms.extern.kdtree.KDTree.leafnode-class.html#__init__ -pyroms.extern.kdtree.KDTree.node pyroms.extern.kdtree.KDTree.node-class.html -pyroms.extern.kdtree.Rectangle pyroms.extern.kdtree.Rectangle-class.html -pyroms.extern.kdtree.Rectangle.max_distance_point pyroms.extern.kdtree.Rectangle-class.html#max_distance_point -pyroms.extern.kdtree.Rectangle.volume pyroms.extern.kdtree.Rectangle-class.html#volume -pyroms.extern.kdtree.Rectangle.min_distance_point pyroms.extern.kdtree.Rectangle-class.html#min_distance_point -pyroms.extern.kdtree.Rectangle.split pyroms.extern.kdtree.Rectangle-class.html#split -pyroms.extern.kdtree.Rectangle.min_distance_rectangle pyroms.extern.kdtree.Rectangle-class.html#min_distance_rectangle -pyroms.extern.kdtree.Rectangle.max_distance_rectangle pyroms.extern.kdtree.Rectangle-class.html#max_distance_rectangle -pyroms.extern.kdtree.Rectangle.__init__ pyroms.extern.kdtree.Rectangle-class.html#__init__ -pyroms.extern.kdtree.Rectangle.__repr__ pyroms.extern.kdtree.Rectangle-class.html#__repr__ -pyroms.extern.pupynere.NetCDFFile pyroms.extern.pupynere.NetCDFFile-class.html -pyroms.extern.pupynere.NetCDFFile._dim_array pyroms.extern.pupynere.NetCDFFile-class.html#_dim_array -pyroms.extern.pupynere.NetCDFFile._read_var pyroms.extern.pupynere.NetCDFFile-class.html#_read_var -pyroms.extern.pupynere.NetCDFFile.close pyroms.extern.pupynere.NetCDFFile-class.html#close -pyroms.extern.pupynere.NetCDFFile._att_array pyroms.extern.pupynere.NetCDFFile-class.html#_att_array -pyroms.extern.pupynere.NetCDFFile.__init__ pyroms.extern.pupynere.NetCDFFile-class.html#__init__ -pyroms.extern.pupynere.NetCDFFile._unpack_int64 pyroms.extern.pupynere.NetCDFFile-class.html#_unpack_int64 -pyroms.extern.pupynere.NetCDFFile._var_array pyroms.extern.pupynere.NetCDFFile-class.html#_var_array -pyroms.extern.pupynere.NetCDFFile._gatt_array pyroms.extern.pupynere.NetCDFFile-class.html#_gatt_array -pyroms.extern.pupynere.NetCDFFile._numrecs pyroms.extern.pupynere.NetCDFFile-class.html#_numrecs -pyroms.extern.pupynere.NetCDFFile._read_values pyroms.extern.pupynere.NetCDFFile-class.html#_read_values -pyroms.extern.pupynere.NetCDFFile._read_string pyroms.extern.pupynere.NetCDFFile-class.html#_read_string -pyroms.extern.pupynere.NetCDFFile.read pyroms.extern.pupynere.NetCDFFile-class.html#read -pyroms.extern.pupynere.NetCDFFile._unpack_int pyroms.extern.pupynere.NetCDFFile-class.html#_unpack_int -pyroms.extern.pupynere.NetCDFFile._unpack_int32 pyroms.extern.pupynere.NetCDFFile-class.html#_unpack_int32 -pyroms.extern.pupynere.NetCDFFile._parse pyroms.extern.pupynere.NetCDFFile-class.html#_parse -pyroms.extern.pupynere.NetCDFFile._read_recsize pyroms.extern.pupynere.NetCDFFile-class.html#_read_recsize -pyroms.extern.pupynere.NetCDFVariable pyroms.extern.pupynere.NetCDFVariable-class.html -pyroms.extern.pupynere.NetCDFVariable.__getitem__ pyroms.extern.pupynere.NetCDFVariable-class.html#__getitem__ -pyroms.extern.pupynere.NetCDFVariable.typecode pyroms.extern.pupynere.NetCDFVariable-class.html#typecode -pyroms.extern.pupynere.NetCDFVariable.getValue pyroms.extern.pupynere.NetCDFVariable-class.html#getValue -pyroms.extern.pupynere.NetCDFVariable.__init__ pyroms.extern.pupynere.NetCDFVariable-class.html#__init__ -pyroms.grid.ROMS_Grid pyroms.grid.ROMS_Grid-class.html -pyroms.grid.ROMS_Grid.__init__ pyroms.grid.ROMS_Grid-class.html#__init__ -pyroms.grid.ROMS_gridinfo pyroms.grid.ROMS_gridinfo-class.html -pyroms.grid.ROMS_gridinfo._get_grid_info pyroms.grid.ROMS_gridinfo-class.html#_get_grid_info -pyroms.grid.ROMS_gridinfo.__init__ pyroms.grid.ROMS_gridinfo-class.html#__init__ -pyroms.hgrid.BoundaryInteractor pyroms.hgrid.BoundaryInteractor-class.html -pyroms.hgrid.BoundaryInteractor.remove_grid pyroms.hgrid.BoundaryInteractor-class.html#remove_grid -pyroms.hgrid.BoundaryInteractor.save_bry pyroms.hgrid.BoundaryInteractor-class.html#save_bry -pyroms.hgrid.BoundaryInteractor._get_ind_under_point pyroms.hgrid.BoundaryInteractor-class.html#_get_ind_under_point -pyroms.hgrid.BoundaryInteractor.__init__ pyroms.hgrid.BoundaryInteractor-class.html#__init__ -pyroms.hgrid.BoundaryInteractor._showbetas pyroms.hgrid.BoundaryInteractor-class.html#_showbetas -pyroms.hgrid.BoundaryInteractor._motion_notify_callback pyroms.hgrid.BoundaryInteractor-class.html#_motion_notify_callback -pyroms.hgrid.BoundaryInteractor.save_grid pyroms.hgrid.BoundaryInteractor-class.html#save_grid -pyroms.hgrid.BoundaryInteractor._showgrid pyroms.hgrid.BoundaryInteractor-class.html#_showgrid -pyroms.hgrid.BoundaryInteractor.get_ydata pyroms.hgrid.BoundaryInteractor-class.html#get_ydata -pyroms.hgrid.BoundaryInteractor._draw_callback pyroms.hgrid.BoundaryInteractor-class.html#_draw_callback -pyroms.hgrid.BoundaryInteractor._key_press_callback pyroms.hgrid.BoundaryInteractor-class.html#_key_press_callback -pyroms.hgrid.BoundaryInteractor.load_bry pyroms.hgrid.BoundaryInteractor-class.html#load_bry -pyroms.hgrid.BoundaryInteractor._get_verts pyroms.hgrid.BoundaryInteractor-class.html#_get_verts -pyroms.hgrid.BoundaryInteractor._showverts pyroms.hgrid.BoundaryInteractor-class.html#_showverts -pyroms.hgrid.BoundaryInteractor._poly_changed pyroms.hgrid.BoundaryInteractor-class.html#_poly_changed -pyroms.hgrid.BoundaryInteractor._button_press_callback pyroms.hgrid.BoundaryInteractor-class.html#_button_press_callback -pyroms.hgrid.BoundaryInteractor._button_release_callback pyroms.hgrid.BoundaryInteractor-class.html#_button_release_callback -pyroms.hgrid.BoundaryInteractor._epsilon pyroms.hgrid.BoundaryInteractor-class.html#_epsilon -pyroms.hgrid.BoundaryInteractor.get_xdata pyroms.hgrid.BoundaryInteractor-class.html#get_xdata -pyroms.hgrid.BoundaryInteractor.verts pyroms.hgrid.BoundaryInteractor-class.html#verts -pyroms.hgrid.BoundaryInteractor.y pyroms.hgrid.BoundaryInteractor-class.html#y -pyroms.hgrid.BoundaryInteractor.x pyroms.hgrid.BoundaryInteractor-class.html#x -pyroms.hgrid.BoundaryInteractor._update_beta_lines pyroms.hgrid.BoundaryInteractor-class.html#_update_beta_lines -pyroms.hgrid.CGrid pyroms.hgrid.CGrid-class.html -pyroms.hgrid.CGrid.mask_v pyroms.hgrid.CGrid-class.html#mask_v -pyroms.hgrid.CGrid.mask_u pyroms.hgrid.CGrid-class.html#mask_u -pyroms.hgrid.CGrid._calculate_metrics pyroms.hgrid.CGrid-class.html#_calculate_metrics -pyroms.hgrid.CGrid._calculate_subgrids pyroms.hgrid.CGrid-class.html#_calculate_subgrids -pyroms.hgrid.CGrid.__init__ pyroms.hgrid.CGrid-class.html#__init__ -pyroms.hgrid.CGrid._get_mask_v pyroms.hgrid.CGrid-class.html#_get_mask_v -pyroms.hgrid.CGrid._get_mask_u pyroms.hgrid.CGrid-class.html#_get_mask_u -pyroms.hgrid.CGrid._calculate_angle_rho pyroms.hgrid.CGrid-class.html#_calculate_angle_rho -pyroms.hgrid.CGrid._calculate_angle pyroms.hgrid.CGrid-class.html#_calculate_angle -pyroms.hgrid.CGrid._calculate_derivative_metrics pyroms.hgrid.CGrid-class.html#_calculate_derivative_metrics -pyroms.hgrid.CGrid.mask_psi pyroms.hgrid.CGrid-class.html#mask_psi -pyroms.hgrid.CGrid._set_mask_rho pyroms.hgrid.CGrid-class.html#_set_mask_rho -pyroms.hgrid.CGrid.mask_polygon pyroms.hgrid.CGrid-class.html#mask_polygon -pyroms.hgrid.CGrid.mask pyroms.hgrid.CGrid-class.html#mask -pyroms.hgrid.CGrid._get_mask_psi pyroms.hgrid.CGrid-class.html#_get_mask_psi -pyroms.hgrid.CGrid.calculate_orthogonality pyroms.hgrid.CGrid-class.html#calculate_orthogonality -pyroms.hgrid.CGrid.y pyroms.hgrid.CGrid-class.html#y -pyroms.hgrid.CGrid.x pyroms.hgrid.CGrid-class.html#x -pyroms.hgrid.CGrid_geo pyroms.hgrid.CGrid_geo-class.html -pyroms.hgrid.CGrid.mask_v pyroms.hgrid.CGrid-class.html#mask_v -pyroms.hgrid.CGrid.mask_u pyroms.hgrid.CGrid-class.html#mask_u -pyroms.hgrid.CGrid_geo._calculate_metrics pyroms.hgrid.CGrid_geo-class.html#_calculate_metrics -pyroms.hgrid.CGrid._calculate_subgrids pyroms.hgrid.CGrid-class.html#_calculate_subgrids -pyroms.hgrid.CGrid_geo.__init__ pyroms.hgrid.CGrid_geo-class.html#__init__ -pyroms.hgrid.CGrid._get_mask_v pyroms.hgrid.CGrid-class.html#_get_mask_v -pyroms.hgrid.CGrid._get_mask_u pyroms.hgrid.CGrid-class.html#_get_mask_u -pyroms.hgrid.CGrid_geo.lon pyroms.hgrid.CGrid_geo-class.html#lon -pyroms.hgrid.CGrid_geo._calculate_angle_rho pyroms.hgrid.CGrid_geo-class.html#_calculate_angle_rho -pyroms.hgrid.CGrid._calculate_angle pyroms.hgrid.CGrid-class.html#_calculate_angle -pyroms.hgrid.CGrid_geo._calculate_derivative_metrics pyroms.hgrid.CGrid_geo-class.html#_calculate_derivative_metrics -pyroms.hgrid.CGrid.mask_psi pyroms.hgrid.CGrid-class.html#mask_psi -pyroms.hgrid.CGrid._set_mask_rho pyroms.hgrid.CGrid-class.html#_set_mask_rho -pyroms.hgrid.CGrid_geo.lat pyroms.hgrid.CGrid_geo-class.html#lat -pyroms.hgrid.CGrid.mask_polygon pyroms.hgrid.CGrid-class.html#mask_polygon -pyroms.hgrid.CGrid.mask pyroms.hgrid.CGrid-class.html#mask -pyroms.hgrid.CGrid._get_mask_psi pyroms.hgrid.CGrid-class.html#_get_mask_psi -pyroms.hgrid.CGrid_geo.mask_polygon_geo pyroms.hgrid.CGrid_geo-class.html#mask_polygon_geo -pyroms.hgrid.CGrid.calculate_orthogonality pyroms.hgrid.CGrid-class.html#calculate_orthogonality -pyroms.hgrid.CGrid.y pyroms.hgrid.CGrid-class.html#y -pyroms.hgrid.CGrid.x pyroms.hgrid.CGrid-class.html#x -pyroms.hgrid.Focus pyroms.hgrid.Focus-class.html -pyroms.hgrid.Focus.add_focus_x pyroms.hgrid.Focus-class.html#add_focus_x -pyroms.hgrid.Focus.add_focus_y pyroms.hgrid.Focus-class.html#add_focus_y -pyroms.hgrid.Focus.__call__ pyroms.hgrid.Focus-class.html#__call__ -pyroms.hgrid.Focus.__init__ pyroms.hgrid.Focus-class.html#__init__ -pyroms.hgrid.Gridgen pyroms.hgrid.Gridgen-class.html -pyroms.hgrid.CGrid.mask_v pyroms.hgrid.CGrid-class.html#mask_v -pyroms.hgrid.CGrid.mask_u pyroms.hgrid.CGrid-class.html#mask_u -pyroms.hgrid.CGrid._calculate_metrics pyroms.hgrid.CGrid-class.html#_calculate_metrics -pyroms.hgrid.CGrid._calculate_subgrids pyroms.hgrid.CGrid-class.html#_calculate_subgrids -pyroms.hgrid.Gridgen.__init__ pyroms.hgrid.Gridgen-class.html#__init__ -pyroms.hgrid.CGrid._get_mask_v pyroms.hgrid.CGrid-class.html#_get_mask_v -pyroms.hgrid.CGrid._get_mask_u pyroms.hgrid.CGrid-class.html#_get_mask_u -pyroms.hgrid.CGrid._calculate_angle_rho pyroms.hgrid.CGrid-class.html#_calculate_angle_rho -pyroms.hgrid.CGrid._calculate_angle pyroms.hgrid.CGrid-class.html#_calculate_angle -pyroms.hgrid.Gridgen.generate_grid pyroms.hgrid.Gridgen-class.html#generate_grid -pyroms.hgrid.Gridgen.__del__ pyroms.hgrid.Gridgen-class.html#__del__ -pyroms.hgrid.CGrid._calculate_derivative_metrics pyroms.hgrid.CGrid-class.html#_calculate_derivative_metrics -pyroms.hgrid.CGrid.mask_psi pyroms.hgrid.CGrid-class.html#mask_psi -pyroms.hgrid.CGrid._set_mask_rho pyroms.hgrid.CGrid-class.html#_set_mask_rho -pyroms.hgrid.CGrid.mask_polygon pyroms.hgrid.CGrid-class.html#mask_polygon -pyroms.hgrid.CGrid.mask pyroms.hgrid.CGrid-class.html#mask -pyroms.hgrid.CGrid._get_mask_psi pyroms.hgrid.CGrid-class.html#_get_mask_psi -pyroms.hgrid.CGrid.calculate_orthogonality pyroms.hgrid.CGrid-class.html#calculate_orthogonality -pyroms.hgrid.CGrid.y pyroms.hgrid.CGrid-class.html#y -pyroms.hgrid.CGrid.x pyroms.hgrid.CGrid-class.html#x -pyroms.hgrid._Focus_x pyroms.hgrid._Focus_x-class.html -pyroms.hgrid._Focus_x.__call__ pyroms.hgrid._Focus_x-class.html#__call__ -pyroms.hgrid._Focus_x.__init__ pyroms.hgrid._Focus_x-class.html#__init__ -pyroms.hgrid._Focus_y pyroms.hgrid._Focus_y-class.html -pyroms.hgrid._Focus_y.__call__ pyroms.hgrid._Focus_y-class.html#__call__ -pyroms.hgrid._Focus_y.__init__ pyroms.hgrid._Focus_y-class.html#__init__ -pyroms.hgrid.edit_mask_mesh pyroms.hgrid.edit_mask_mesh-class.html -pyroms.hgrid.edit_mask_mesh._on_click pyroms.hgrid.edit_mask_mesh-class.html#_on_click -pyroms.hgrid.edit_mask_mesh._on_key pyroms.hgrid.edit_mask_mesh-class.html#_on_key -pyroms.hgrid.edit_mask_mesh.__init__ pyroms.hgrid.edit_mask_mesh-class.html#__init__ -pyroms.hgrid.get_position_from_map pyroms.hgrid.get_position_from_map-class.html -pyroms.hgrid.get_position_from_map._on_click pyroms.hgrid.get_position_from_map-class.html#_on_click -pyroms.hgrid.get_position_from_map._on_key pyroms.hgrid.get_position_from_map-class.html#_on_key -pyroms.hgrid.get_position_from_map.__init__ pyroms.hgrid.get_position_from_map-class.html#__init__ -pyroms.hgrid_old.BoundaryInteractor pyroms.hgrid_old.BoundaryInteractor-class.html -pyroms.hgrid_old.BoundaryInteractor.remove_grid pyroms.hgrid_old.BoundaryInteractor-class.html#remove_grid -pyroms.hgrid_old.BoundaryInteractor.save_bry pyroms.hgrid_old.BoundaryInteractor-class.html#save_bry -pyroms.hgrid_old.BoundaryInteractor._get_ind_under_point pyroms.hgrid_old.BoundaryInteractor-class.html#_get_ind_under_point -pyroms.hgrid_old.BoundaryInteractor.__init__ pyroms.hgrid_old.BoundaryInteractor-class.html#__init__ -pyroms.hgrid_old.BoundaryInteractor._showbetas pyroms.hgrid_old.BoundaryInteractor-class.html#_showbetas -pyroms.hgrid_old.BoundaryInteractor._motion_notify_callback pyroms.hgrid_old.BoundaryInteractor-class.html#_motion_notify_callback -pyroms.hgrid_old.BoundaryInteractor.save_grid pyroms.hgrid_old.BoundaryInteractor-class.html#save_grid -pyroms.hgrid_old.BoundaryInteractor._showgrid pyroms.hgrid_old.BoundaryInteractor-class.html#_showgrid -pyroms.hgrid_old.BoundaryInteractor.get_ydata pyroms.hgrid_old.BoundaryInteractor-class.html#get_ydata -pyroms.hgrid_old.BoundaryInteractor._draw_callback pyroms.hgrid_old.BoundaryInteractor-class.html#_draw_callback -pyroms.hgrid_old.BoundaryInteractor._key_press_callback pyroms.hgrid_old.BoundaryInteractor-class.html#_key_press_callback -pyroms.hgrid_old.BoundaryInteractor.load_bry pyroms.hgrid_old.BoundaryInteractor-class.html#load_bry -pyroms.hgrid_old.BoundaryInteractor._get_verts pyroms.hgrid_old.BoundaryInteractor-class.html#_get_verts -pyroms.hgrid_old.BoundaryInteractor._showverts pyroms.hgrid_old.BoundaryInteractor-class.html#_showverts -pyroms.hgrid_old.BoundaryInteractor._poly_changed pyroms.hgrid_old.BoundaryInteractor-class.html#_poly_changed -pyroms.hgrid_old.BoundaryInteractor._button_press_callback pyroms.hgrid_old.BoundaryInteractor-class.html#_button_press_callback -pyroms.hgrid_old.BoundaryInteractor._button_release_callback pyroms.hgrid_old.BoundaryInteractor-class.html#_button_release_callback -pyroms.hgrid_old.BoundaryInteractor._epsilon pyroms.hgrid_old.BoundaryInteractor-class.html#_epsilon -pyroms.hgrid_old.BoundaryInteractor.get_xdata pyroms.hgrid_old.BoundaryInteractor-class.html#get_xdata -pyroms.hgrid_old.BoundaryInteractor.verts pyroms.hgrid_old.BoundaryInteractor-class.html#verts -pyroms.hgrid_old.BoundaryInteractor.y pyroms.hgrid_old.BoundaryInteractor-class.html#y -pyroms.hgrid_old.BoundaryInteractor.x pyroms.hgrid_old.BoundaryInteractor-class.html#x -pyroms.hgrid_old.BoundaryInteractor._update_beta_lines pyroms.hgrid_old.BoundaryInteractor-class.html#_update_beta_lines -pyroms.hgrid_old.CGrid pyroms.hgrid_old.CGrid-class.html -pyroms.hgrid_old.CGrid.mask_v pyroms.hgrid_old.CGrid-class.html#mask_v -pyroms.hgrid_old.CGrid.mask_u pyroms.hgrid_old.CGrid-class.html#mask_u -pyroms.hgrid_old.CGrid._calculate_metrics pyroms.hgrid_old.CGrid-class.html#_calculate_metrics -pyroms.hgrid_old.CGrid._calculate_subgrids pyroms.hgrid_old.CGrid-class.html#_calculate_subgrids -pyroms.hgrid_old.CGrid.__init__ pyroms.hgrid_old.CGrid-class.html#__init__ -pyroms.hgrid_old.CGrid._get_mask_v pyroms.hgrid_old.CGrid-class.html#_get_mask_v -pyroms.hgrid_old.CGrid._get_mask_u pyroms.hgrid_old.CGrid-class.html#_get_mask_u -pyroms.hgrid_old.CGrid._calculate_angle_rho pyroms.hgrid_old.CGrid-class.html#_calculate_angle_rho -pyroms.hgrid_old.CGrid._calculate_angle pyroms.hgrid_old.CGrid-class.html#_calculate_angle -pyroms.hgrid_old.CGrid._calculate_derivative_metrics pyroms.hgrid_old.CGrid-class.html#_calculate_derivative_metrics -pyroms.hgrid_old.CGrid.mask_psi pyroms.hgrid_old.CGrid-class.html#mask_psi -pyroms.hgrid_old.CGrid._set_mask_rho pyroms.hgrid_old.CGrid-class.html#_set_mask_rho -pyroms.hgrid_old.CGrid.mask_polygon pyroms.hgrid_old.CGrid-class.html#mask_polygon -pyroms.hgrid_old.CGrid.mask pyroms.hgrid_old.CGrid-class.html#mask -pyroms.hgrid_old.CGrid._get_mask_psi pyroms.hgrid_old.CGrid-class.html#_get_mask_psi -pyroms.hgrid_old.CGrid.calculate_orthogonality pyroms.hgrid_old.CGrid-class.html#calculate_orthogonality -pyroms.hgrid_old.CGrid.y pyroms.hgrid_old.CGrid-class.html#y -pyroms.hgrid_old.CGrid.x pyroms.hgrid_old.CGrid-class.html#x -pyroms.hgrid_old.CGrid_geo pyroms.hgrid_old.CGrid_geo-class.html -pyroms.hgrid_old.CGrid.mask_v pyroms.hgrid_old.CGrid-class.html#mask_v -pyroms.hgrid_old.CGrid.mask_u pyroms.hgrid_old.CGrid-class.html#mask_u -pyroms.hgrid_old.CGrid_geo._calculate_metrics pyroms.hgrid_old.CGrid_geo-class.html#_calculate_metrics -pyroms.hgrid_old.CGrid._calculate_subgrids pyroms.hgrid_old.CGrid-class.html#_calculate_subgrids -pyroms.hgrid_old.CGrid_geo.__init__ pyroms.hgrid_old.CGrid_geo-class.html#__init__ -pyroms.hgrid_old.CGrid._get_mask_v pyroms.hgrid_old.CGrid-class.html#_get_mask_v -pyroms.hgrid_old.CGrid._get_mask_u pyroms.hgrid_old.CGrid-class.html#_get_mask_u -pyroms.hgrid_old.CGrid_geo.lon pyroms.hgrid_old.CGrid_geo-class.html#lon -pyroms.hgrid_old.CGrid_geo._calculate_angle_rho pyroms.hgrid_old.CGrid_geo-class.html#_calculate_angle_rho -pyroms.hgrid_old.CGrid._calculate_angle pyroms.hgrid_old.CGrid-class.html#_calculate_angle -pyroms.hgrid_old.CGrid_geo._calculate_derivative_metrics pyroms.hgrid_old.CGrid_geo-class.html#_calculate_derivative_metrics -pyroms.hgrid_old.CGrid.mask_psi pyroms.hgrid_old.CGrid-class.html#mask_psi -pyroms.hgrid_old.CGrid._set_mask_rho pyroms.hgrid_old.CGrid-class.html#_set_mask_rho -pyroms.hgrid_old.CGrid_geo.lat pyroms.hgrid_old.CGrid_geo-class.html#lat -pyroms.hgrid_old.CGrid.mask_polygon pyroms.hgrid_old.CGrid-class.html#mask_polygon -pyroms.hgrid_old.CGrid.mask pyroms.hgrid_old.CGrid-class.html#mask -pyroms.hgrid_old.CGrid._get_mask_psi pyroms.hgrid_old.CGrid-class.html#_get_mask_psi -pyroms.hgrid_old.CGrid_geo.mask_polygon_geo pyroms.hgrid_old.CGrid_geo-class.html#mask_polygon_geo -pyroms.hgrid_old.CGrid.calculate_orthogonality pyroms.hgrid_old.CGrid-class.html#calculate_orthogonality -pyroms.hgrid_old.CGrid.y pyroms.hgrid_old.CGrid-class.html#y -pyroms.hgrid_old.CGrid.x pyroms.hgrid_old.CGrid-class.html#x -pyroms.hgrid_old.Focus pyroms.hgrid_old.Focus-class.html -pyroms.hgrid_old.Focus.add_focus_x pyroms.hgrid_old.Focus-class.html#add_focus_x -pyroms.hgrid_old.Focus.add_focus_y pyroms.hgrid_old.Focus-class.html#add_focus_y -pyroms.hgrid_old.Focus.__call__ pyroms.hgrid_old.Focus-class.html#__call__ -pyroms.hgrid_old.Focus.__init__ pyroms.hgrid_old.Focus-class.html#__init__ -pyroms.hgrid_old.Gridgen pyroms.hgrid_old.Gridgen-class.html -pyroms.hgrid_old.CGrid.mask_v pyroms.hgrid_old.CGrid-class.html#mask_v -pyroms.hgrid_old.CGrid.mask_u pyroms.hgrid_old.CGrid-class.html#mask_u -pyroms.hgrid_old.CGrid._calculate_metrics pyroms.hgrid_old.CGrid-class.html#_calculate_metrics -pyroms.hgrid_old.CGrid._calculate_subgrids pyroms.hgrid_old.CGrid-class.html#_calculate_subgrids -pyroms.hgrid_old.Gridgen.__init__ pyroms.hgrid_old.Gridgen-class.html#__init__ -pyroms.hgrid_old.CGrid._get_mask_v pyroms.hgrid_old.CGrid-class.html#_get_mask_v -pyroms.hgrid_old.CGrid._get_mask_u pyroms.hgrid_old.CGrid-class.html#_get_mask_u -pyroms.hgrid_old.CGrid._calculate_angle_rho pyroms.hgrid_old.CGrid-class.html#_calculate_angle_rho -pyroms.hgrid_old.CGrid._calculate_angle pyroms.hgrid_old.CGrid-class.html#_calculate_angle -pyroms.hgrid_old.Gridgen.generate_grid pyroms.hgrid_old.Gridgen-class.html#generate_grid -pyroms.hgrid_old.Gridgen.__del__ pyroms.hgrid_old.Gridgen-class.html#__del__ -pyroms.hgrid_old.CGrid._calculate_derivative_metrics pyroms.hgrid_old.CGrid-class.html#_calculate_derivative_metrics -pyroms.hgrid_old.CGrid.mask_psi pyroms.hgrid_old.CGrid-class.html#mask_psi -pyroms.hgrid_old.CGrid._set_mask_rho pyroms.hgrid_old.CGrid-class.html#_set_mask_rho -pyroms.hgrid_old.CGrid.mask_polygon pyroms.hgrid_old.CGrid-class.html#mask_polygon -pyroms.hgrid_old.CGrid.mask pyroms.hgrid_old.CGrid-class.html#mask -pyroms.hgrid_old.CGrid._get_mask_psi pyroms.hgrid_old.CGrid-class.html#_get_mask_psi -pyroms.hgrid_old.CGrid.calculate_orthogonality pyroms.hgrid_old.CGrid-class.html#calculate_orthogonality -pyroms.hgrid_old.CGrid.y pyroms.hgrid_old.CGrid-class.html#y -pyroms.hgrid_old.CGrid.x pyroms.hgrid_old.CGrid-class.html#x -pyroms.hgrid_old._Focus_x pyroms.hgrid_old._Focus_x-class.html -pyroms.hgrid_old._Focus_x.__call__ pyroms.hgrid_old._Focus_x-class.html#__call__ -pyroms.hgrid_old._Focus_x.__init__ pyroms.hgrid_old._Focus_x-class.html#__init__ -pyroms.hgrid_old._Focus_y pyroms.hgrid_old._Focus_y-class.html -pyroms.hgrid_old._Focus_y.__call__ pyroms.hgrid_old._Focus_y-class.html#__call__ -pyroms.hgrid_old._Focus_y.__init__ pyroms.hgrid_old._Focus_y-class.html#__init__ -pyroms.hgrid_old.edit_mask_mesh pyroms.hgrid_old.edit_mask_mesh-class.html -pyroms.hgrid_old.edit_mask_mesh._on_click pyroms.hgrid_old.edit_mask_mesh-class.html#_on_click -pyroms.hgrid_old.edit_mask_mesh._on_key pyroms.hgrid_old.edit_mask_mesh-class.html#_on_key -pyroms.hgrid_old.edit_mask_mesh.__init__ pyroms.hgrid_old.edit_mask_mesh-class.html#__init__ -pyroms.hgrid_old.get_position_from_map pyroms.hgrid_old.get_position_from_map-class.html -pyroms.hgrid_old.get_position_from_map._on_click pyroms.hgrid_old.get_position_from_map-class.html#_on_click -pyroms.hgrid_old.get_position_from_map._on_key pyroms.hgrid_old.get_position_from_map-class.html#_on_key -pyroms.hgrid_old.get_position_from_map.__init__ pyroms.hgrid_old.get_position_from_map-class.html#__init__ -pyroms.vgrid.s_coordinate pyroms.vgrid.s_coordinate-class.html -pyroms.vgrid.s_coordinate._get_Cs_r pyroms.vgrid.s_coordinate-class.html#_get_Cs_r -pyroms.vgrid.s_coordinate._get_Cs_w pyroms.vgrid.s_coordinate-class.html#_get_Cs_w -pyroms.vgrid.s_coordinate._get_s_w pyroms.vgrid.s_coordinate-class.html#_get_s_w -pyroms.vgrid.s_coordinate._get_s_rho pyroms.vgrid.s_coordinate-class.html#_get_s_rho -pyroms.vgrid.s_coordinate.__init__ pyroms.vgrid.s_coordinate-class.html#__init__ -pyroms.vgrid.s_coordinate_2 pyroms.vgrid.s_coordinate_2-class.html -pyroms.vgrid.s_coordinate_2._get_Cs_r pyroms.vgrid.s_coordinate_2-class.html#_get_Cs_r -pyroms.vgrid.s_coordinate_2._get_Cs_w pyroms.vgrid.s_coordinate_2-class.html#_get_Cs_w -pyroms.vgrid.s_coordinate_2._get_s_w pyroms.vgrid.s_coordinate_2-class.html#_get_s_w -pyroms.vgrid.s_coordinate_2._get_s_rho pyroms.vgrid.s_coordinate_2-class.html#_get_s_rho -pyroms.vgrid.s_coordinate_2.__init__ pyroms.vgrid.s_coordinate_2-class.html#__init__ -pyroms.vgrid.z_coordinate pyroms.vgrid.z_coordinate-class.html -pyroms.vgrid.z_coordinate.__init__ pyroms.vgrid.z_coordinate-class.html#__init__ -pyroms.vgrid.z_r pyroms.vgrid.z_r-class.html -pyroms.vgrid.z_r.__getitem__ pyroms.vgrid.z_r-class.html#__getitem__ -pyroms.vgrid.z_r.__init__ pyroms.vgrid.z_r-class.html#__init__ -pyroms.vgrid.z_w pyroms.vgrid.z_w-class.html -pyroms.vgrid.z_w.__getitem__ pyroms.vgrid.z_w-class.html#__getitem__ -pyroms.vgrid.z_w.__init__ pyroms.vgrid.z_w-class.html#__init__ diff --git a/pyroms/docs/class-tree.html b/pyroms/docs/class-tree.html deleted file mode 100644 index 3f56bb7..0000000 --- a/pyroms/docs/class-tree.html +++ /dev/null @@ -1,222 +0,0 @@ - - - - - Class Hierarchy - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  - - - - -
[hide private]
[frames] | no frames]
-
-
- [ Module Hierarchy - | Class Hierarchy ] -

-

Class Hierarchy

- - - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/crarr.png b/pyroms/docs/crarr.png deleted file mode 100644 index 26b43c5..0000000 Binary files a/pyroms/docs/crarr.png and /dev/null differ diff --git a/pyroms/docs/epydoc.css b/pyroms/docs/epydoc.css deleted file mode 100644 index 86d4170..0000000 --- a/pyroms/docs/epydoc.css +++ /dev/null @@ -1,322 +0,0 @@ - - -/* Epydoc CSS Stylesheet - * - * This stylesheet can be used to customize the appearance of epydoc's - * HTML output. - * - */ - -/* Default Colors & Styles - * - Set the default foreground & background color with 'body'; and - * link colors with 'a:link' and 'a:visited'. - * - Use bold for decision list terms. - * - The heading styles defined here are used for headings *within* - * docstring descriptions. All headings used by epydoc itself use - * either class='epydoc' or class='toc' (CSS styles for both - * defined below). - */ -body { background: #ffffff; color: #000000; } -p { margin-top: 0.5em; margin-bottom: 0.5em; } -a:link { color: #0000ff; } -a:visited { color: #204080; } -dt { font-weight: bold; } -h1 { font-size: +140%; font-style: italic; - font-weight: bold; } -h2 { font-size: +125%; font-style: italic; - font-weight: bold; } -h3 { font-size: +110%; font-style: italic; - font-weight: normal; } -code { font-size: 100%; } -/* N.B.: class, not pseudoclass */ -a.link { font-family: monospace; } - -/* Page Header & Footer - * - The standard page header consists of a navigation bar (with - * pointers to standard pages such as 'home' and 'trees'); a - * breadcrumbs list, which can be used to navigate to containing - * classes or modules; options links, to show/hide private - * variables and to show/hide frames; and a page title (using - *

). The page title may be followed by a link to the - * corresponding source code (using 'span.codelink'). - * - The footer consists of a navigation bar, a timestamp, and a - * pointer to epydoc's homepage. - */ -h1.epydoc { margin: 0; font-size: +140%; font-weight: bold; } -h2.epydoc { font-size: +130%; font-weight: bold; } -h3.epydoc { font-size: +115%; font-weight: bold; - margin-top: 0.2em; } -td h3.epydoc { font-size: +115%; font-weight: bold; - margin-bottom: 0; } -table.navbar { background: #a0c0ff; color: #000000; - border: 2px groove #c0d0d0; } -table.navbar table { color: #000000; } -th.navbar-select { background: #70b0ff; - color: #000000; } -table.navbar a { text-decoration: none; } -table.navbar a:link { color: #0000ff; } -table.navbar a:visited { color: #204080; } -span.breadcrumbs { font-size: 85%; font-weight: bold; } -span.options { font-size: 70%; } -span.codelink { font-size: 85%; } -td.footer { font-size: 85%; } - -/* Table Headers - * - Each summary table and details section begins with a 'header' - * row. This row contains a section title (marked by - * 'span.table-header') as well as a show/hide private link - * (marked by 'span.options', defined above). - * - Summary tables that contain user-defined groups mark those - * groups using 'group header' rows. - */ -td.table-header { background: #70b0ff; color: #000000; - border: 1px solid #608090; } -td.table-header table { color: #000000; } -td.table-header table a:link { color: #0000ff; } -td.table-header table a:visited { color: #204080; } -span.table-header { font-size: 120%; font-weight: bold; } -th.group-header { background: #c0e0f8; color: #000000; - text-align: left; font-style: italic; - font-size: 115%; - border: 1px solid #608090; } - -/* Summary Tables (functions, variables, etc) - * - Each object is described by a single row of the table with - * two cells. The left cell gives the object's type, and is - * marked with 'code.summary-type'. The right cell gives the - * object's name and a summary description. - * - CSS styles for the table's header and group headers are - * defined above, under 'Table Headers' - */ -table.summary { border-collapse: collapse; - background: #e8f0f8; color: #000000; - border: 1px solid #608090; - margin-bottom: 0.5em; } -td.summary { border: 1px solid #608090; } -code.summary-type { font-size: 85%; } -table.summary a:link { color: #0000ff; } -table.summary a:visited { color: #204080; } - - -/* Details Tables (functions, variables, etc) - * - Each object is described in its own div. - * - A single-row summary table w/ table-header is used as - * a header for each details section (CSS style for table-header - * is defined above, under 'Table Headers'). - */ -table.details { border-collapse: collapse; - background: #e8f0f8; color: #000000; - border: 1px solid #608090; - margin: .2em 0 0 0; } -table.details table { color: #000000; } -table.details a:link { color: #0000ff; } -table.details a:visited { color: #204080; } - -/* Fields */ -dl.fields { margin-left: 2em; margin-top: 1em; - margin-bottom: 1em; } -dl.fields dd ul { margin-left: 0em; padding-left: 0em; } -dl.fields dd ul li ul { margin-left: 2em; padding-left: 0em; } -div.fields { margin-left: 2em; } -div.fields p { margin-bottom: 0.5em; } - -/* Index tables (identifier index, term index, etc) - * - link-index is used for indices containing lists of links - * (namely, the identifier index & term index). - * - index-where is used in link indices for the text indicating - * the container/source for each link. - * - metadata-index is used for indices containing metadata - * extracted from fields (namely, the bug index & todo index). - */ -table.link-index { border-collapse: collapse; - background: #e8f0f8; color: #000000; - border: 1px solid #608090; } -td.link-index { border-width: 0px; } -table.link-index a:link { color: #0000ff; } -table.link-index a:visited { color: #204080; } -span.index-where { font-size: 70%; } -table.metadata-index { border-collapse: collapse; - background: #e8f0f8; color: #000000; - border: 1px solid #608090; - margin: .2em 0 0 0; } -td.metadata-index { border-width: 1px; border-style: solid; } -table.metadata-index a:link { color: #0000ff; } -table.metadata-index a:visited { color: #204080; } - -/* Function signatures - * - sig* is used for the signature in the details section. - * - .summary-sig* is used for the signature in the summary - * table, and when listing property accessor functions. - * */ -.sig-name { color: #006080; } -.sig-arg { color: #008060; } -.sig-default { color: #602000; } -.summary-sig { font-family: monospace; } -.summary-sig-name { color: #006080; font-weight: bold; } -table.summary a.summary-sig-name:link - { color: #006080; font-weight: bold; } -table.summary a.summary-sig-name:visited - { color: #006080; font-weight: bold; } -.summary-sig-arg { color: #006040; } -.summary-sig-default { color: #501800; } - -/* Subclass list - */ -ul.subclass-list { display: inline; } -ul.subclass-list li { display: inline; } - -/* To render variables, classes etc. like functions */ -table.summary .summary-name { color: #006080; font-weight: bold; - font-family: monospace; } -table.summary - a.summary-name:link { color: #006080; font-weight: bold; - font-family: monospace; } -table.summary - a.summary-name:visited { color: #006080; font-weight: bold; - font-family: monospace; } - -/* Variable values - * - In the 'variable details' sections, each varaible's value is - * listed in a 'pre.variable' box. The width of this box is - * restricted to 80 chars; if the value's repr is longer than - * this it will be wrapped, using a backslash marked with - * class 'variable-linewrap'. If the value's repr is longer - * than 3 lines, the rest will be ellided; and an ellipsis - * marker ('...' marked with 'variable-ellipsis') will be used. - * - If the value is a string, its quote marks will be marked - * with 'variable-quote'. - * - If the variable is a regexp, it is syntax-highlighted using - * the re* CSS classes. - */ -pre.variable { padding: .5em; margin: 0; - background: #dce4ec; color: #000000; - border: 1px solid #708890; } -.variable-linewrap { color: #604000; font-weight: bold; } -.variable-ellipsis { color: #604000; font-weight: bold; } -.variable-quote { color: #604000; font-weight: bold; } -.variable-group { color: #008000; font-weight: bold; } -.variable-op { color: #604000; font-weight: bold; } -.variable-string { color: #006030; } -.variable-unknown { color: #a00000; font-weight: bold; } -.re { color: #000000; } -.re-char { color: #006030; } -.re-op { color: #600000; } -.re-group { color: #003060; } -.re-ref { color: #404040; } - -/* Base tree - * - Used by class pages to display the base class hierarchy. - */ -pre.base-tree { font-size: 80%; margin: 0; } - -/* Frames-based table of contents headers - * - Consists of two frames: one for selecting modules; and - * the other listing the contents of the selected module. - * - h1.toc is used for each frame's heading - * - h2.toc is used for subheadings within each frame. - */ -h1.toc { text-align: center; font-size: 105%; - margin: 0; font-weight: bold; - padding: 0; } -h2.toc { font-size: 100%; font-weight: bold; - margin: 0.5em 0 0 -0.3em; } - -/* Syntax Highlighting for Source Code - * - doctest examples are displayed in a 'pre.py-doctest' block. - * If the example is in a details table entry, then it will use - * the colors specified by the 'table pre.py-doctest' line. - * - Source code listings are displayed in a 'pre.py-src' block. - * Each line is marked with 'span.py-line' (used to draw a line - * down the left margin, separating the code from the line - * numbers). Line numbers are displayed with 'span.py-lineno'. - * The expand/collapse block toggle button is displayed with - * 'a.py-toggle' (Note: the CSS style for 'a.py-toggle' should not - * modify the font size of the text.) - * - If a source code page is opened with an anchor, then the - * corresponding code block will be highlighted. The code - * block's header is highlighted with 'py-highlight-hdr'; and - * the code block's body is highlighted with 'py-highlight'. - * - The remaining py-* classes are used to perform syntax - * highlighting (py-string for string literals, py-name for names, - * etc.) - */ -pre.py-doctest { padding: .5em; margin: 1em; - background: #e8f0f8; color: #000000; - border: 1px solid #708890; } -table pre.py-doctest { background: #dce4ec; - color: #000000; } -pre.py-src { border: 2px solid #000000; - background: #f0f0f0; color: #000000; } -.py-line { border-left: 2px solid #000000; - margin-left: .2em; padding-left: .4em; } -.py-lineno { font-style: italic; font-size: 90%; - padding-left: .5em; } -a.py-toggle { text-decoration: none; } -div.py-highlight-hdr { border-top: 2px solid #000000; - border-bottom: 2px solid #000000; - background: #d8e8e8; } -div.py-highlight { border-bottom: 2px solid #000000; - background: #d0e0e0; } -.py-prompt { color: #005050; font-weight: bold;} -.py-more { color: #005050; font-weight: bold;} -.py-string { color: #006030; } -.py-comment { color: #003060; } -.py-keyword { color: #600000; } -.py-output { color: #404040; } -.py-name { color: #000050; } -.py-name:link { color: #000050 !important; } -.py-name:visited { color: #000050 !important; } -.py-number { color: #005000; } -.py-defname { color: #000060; font-weight: bold; } -.py-def-name { color: #000060; font-weight: bold; } -.py-base-class { color: #000060; } -.py-param { color: #000060; } -.py-docstring { color: #006030; } -.py-decorator { color: #804020; } -/* Use this if you don't want links to names underlined: */ -/*a.py-name { text-decoration: none; }*/ - -/* Graphs & Diagrams - * - These CSS styles are used for graphs & diagrams generated using - * Graphviz dot. 'img.graph-without-title' is used for bare - * diagrams (to remove the border created by making the image - * clickable). - */ -img.graph-without-title { border: none; } -img.graph-with-title { border: 1px solid #000000; } -span.graph-title { font-weight: bold; } -span.graph-caption { } - -/* General-purpose classes - * - 'p.indent-wrapped-lines' defines a paragraph whose first line - * is not indented, but whose subsequent lines are. - * - The 'nomargin-top' class is used to remove the top margin (e.g. - * from lists). The 'nomargin' class is used to remove both the - * top and bottom margin (but not the left or right margin -- - * for lists, that would cause the bullets to disappear.) - */ -p.indent-wrapped-lines { padding: 0 0 0 7em; text-indent: -7em; - margin: 0; } -.nomargin-top { margin-top: 0; } -.nomargin { margin-top: 0; margin-bottom: 0; } - -/* HTML Log */ -div.log-block { padding: 0; margin: .5em 0 .5em 0; - background: #e8f0f8; color: #000000; - border: 1px solid #000000; } -div.log-error { padding: .1em .3em .1em .3em; margin: 4px; - background: #ffb0b0; color: #000000; - border: 1px solid #000000; } -div.log-warning { padding: .1em .3em .1em .3em; margin: 4px; - background: #ffffb0; color: #000000; - border: 1px solid #000000; } -div.log-info { padding: .1em .3em .1em .3em; margin: 4px; - background: #b0ffb0; color: #000000; - border: 1px solid #000000; } -h2.log-hdr { background: #70b0ff; color: #000000; - margin: 0; padding: 0em 0.5em 0em 0.5em; - border-bottom: 1px solid #000000; font-size: 110%; } -p.log { font-weight: bold; margin: .5em 0 .5em 0; } -tr.opt-changed { color: #000000; font-weight: bold; } -tr.opt-default { color: #606060; } -pre.log { margin: 0; padding: 0; padding-left: 1em; } diff --git a/pyroms/docs/epydoc.js b/pyroms/docs/epydoc.js deleted file mode 100644 index e787dbc..0000000 --- a/pyroms/docs/epydoc.js +++ /dev/null @@ -1,293 +0,0 @@ -function toggle_private() { - // Search for any private/public links on this page. Store - // their old text in "cmd," so we will know what action to - // take; and change their text to the opposite action. - var cmd = "?"; - var elts = document.getElementsByTagName("a"); - for(var i=0; i...
"; - elt.innerHTML = s; - } -} - -function toggle(id) { - elt = document.getElementById(id+"-toggle"); - if (elt.innerHTML == "-") - collapse(id); - else - expand(id); - return false; -} - -function highlight(id) { - var elt = document.getElementById(id+"-def"); - if (elt) elt.className = "py-highlight-hdr"; - var elt = document.getElementById(id+"-expanded"); - if (elt) elt.className = "py-highlight"; - var elt = document.getElementById(id+"-collapsed"); - if (elt) elt.className = "py-highlight"; -} - -function num_lines(s) { - var n = 1; - var pos = s.indexOf("\n"); - while ( pos > 0) { - n += 1; - pos = s.indexOf("\n", pos+1); - } - return n; -} - -// Collapse all blocks that mave more than `min_lines` lines. -function collapse_all(min_lines) { - var elts = document.getElementsByTagName("div"); - for (var i=0; i 0) - if (elt.id.substring(split, elt.id.length) == "-expanded") - if (num_lines(elt.innerHTML) > min_lines) - collapse(elt.id.substring(0, split)); - } -} - -function expandto(href) { - var start = href.indexOf("#")+1; - if (start != 0 && start != href.length) { - if (href.substring(start, href.length) != "-") { - collapse_all(4); - pos = href.indexOf(".", start); - while (pos != -1) { - var id = href.substring(start, pos); - expand(id); - pos = href.indexOf(".", pos+1); - } - var id = href.substring(start, href.length); - expand(id); - highlight(id); - } - } -} - -function kill_doclink(id) { - var parent = document.getElementById(id); - parent.removeChild(parent.childNodes.item(0)); -} -function auto_kill_doclink(ev) { - if (!ev) var ev = window.event; - if (!this.contains(ev.toElement)) { - var parent = document.getElementById(this.parentID); - parent.removeChild(parent.childNodes.item(0)); - } -} - -function doclink(id, name, targets_id) { - var elt = document.getElementById(id); - - // If we already opened the box, then destroy it. - // (This case should never occur, but leave it in just in case.) - if (elt.childNodes.length > 1) { - elt.removeChild(elt.childNodes.item(0)); - } - else { - // The outer box: relative + inline positioning. - var box1 = document.createElement("div"); - box1.style.position = "relative"; - box1.style.display = "inline"; - box1.style.top = 0; - box1.style.left = 0; - - // A shadow for fun - var shadow = document.createElement("div"); - shadow.style.position = "absolute"; - shadow.style.left = "-1.3em"; - shadow.style.top = "-1.3em"; - shadow.style.background = "#404040"; - - // The inner box: absolute positioning. - var box2 = document.createElement("div"); - box2.style.position = "relative"; - box2.style.border = "1px solid #a0a0a0"; - box2.style.left = "-.2em"; - box2.style.top = "-.2em"; - box2.style.background = "white"; - box2.style.padding = ".3em .4em .3em .4em"; - box2.style.fontStyle = "normal"; - box2.onmouseout=auto_kill_doclink; - box2.parentID = id; - - // Get the targets - var targets_elt = document.getElementById(targets_id); - var targets = targets_elt.getAttribute("targets"); - var links = ""; - target_list = targets.split(","); - for (var i=0; i" + - target[0] + ""; - } - - // Put it all together. - elt.insertBefore(box1, elt.childNodes.item(0)); - //box1.appendChild(box2); - box1.appendChild(shadow); - shadow.appendChild(box2); - box2.innerHTML = - "Which "+name+" do you want to see documentation for?" + - ""; - } - return false; -} - -function get_anchor() { - var href = location.href; - var start = href.indexOf("#")+1; - if ((start != 0) && (start != href.length)) - return href.substring(start, href.length); - } -function redirect_url(dottedName) { - // Scan through each element of the "pages" list, and check - // if "name" matches with any of them. - for (var i=0; i-m" or "-c"; - // extract the portion & compare it to dottedName. - var pagename = pages[i].substring(0, pages[i].length-2); - if (pagename == dottedName.substring(0,pagename.length)) { - - // We've found a page that matches `dottedName`; - // construct its URL, using leftover `dottedName` - // content to form an anchor. - var pagetype = pages[i].charAt(pages[i].length-1); - var url = pagename + ((pagetype=="m")?"-module.html": - "-class.html"); - if (dottedName.length > pagename.length) - url += "#" + dottedName.substring(pagename.length+1, - dottedName.length); - return url; - } - } - } diff --git a/pyroms/docs/frames.html b/pyroms/docs/frames.html deleted file mode 100644 index 0baa165..0000000 --- a/pyroms/docs/frames.html +++ /dev/null @@ -1,17 +0,0 @@ - - - - - API Documentation - - - - - - - - - diff --git a/pyroms/docs/help.html b/pyroms/docs/help.html deleted file mode 100644 index 9545651..0000000 --- a/pyroms/docs/help.html +++ /dev/null @@ -1,268 +0,0 @@ - - - - - Help - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  - - - - -
[hide private]
[frames] | no frames]
-
- -

API Documentation

- -

This document contains the API (Application Programming Interface) -documentation for this project. Documentation for the Python -objects defined by the project is divided into separate pages for each -package, module, and class. The API documentation also includes two -pages containing information about the project as a whole: a trees -page, and an index page.

- -

Object Documentation

- -

Each Package Documentation page contains:

-
    -
  • A description of the package.
  • -
  • A list of the modules and sub-packages contained by the - package.
  • -
  • A summary of the classes defined by the package.
  • -
  • A summary of the functions defined by the package.
  • -
  • A summary of the variables defined by the package.
  • -
  • A detailed description of each function defined by the - package.
  • -
  • A detailed description of each variable defined by the - package.
  • -
- -

Each Module Documentation page contains:

-
    -
  • A description of the module.
  • -
  • A summary of the classes defined by the module.
  • -
  • A summary of the functions defined by the module.
  • -
  • A summary of the variables defined by the module.
  • -
  • A detailed description of each function defined by the - module.
  • -
  • A detailed description of each variable defined by the - module.
  • -
- -

Each Class Documentation page contains:

-
    -
  • A class inheritance diagram.
  • -
  • A list of known subclasses.
  • -
  • A description of the class.
  • -
  • A summary of the methods defined by the class.
  • -
  • A summary of the instance variables defined by the class.
  • -
  • A summary of the class (static) variables defined by the - class.
  • -
  • A detailed description of each method defined by the - class.
  • -
  • A detailed description of each instance variable defined by the - class.
  • -
  • A detailed description of each class (static) variable defined - by the class.
  • -
- -

Project Documentation

- -

The Trees page contains the module and class hierarchies:

-
    -
  • The module hierarchy lists every package and module, with - modules grouped into packages. At the top level, and within each - package, modules and sub-packages are listed alphabetically.
  • -
  • The class hierarchy lists every class, grouped by base - class. If a class has more than one base class, then it will be - listed under each base class. At the top level, and under each base - class, classes are listed alphabetically.
  • -
- -

The Index page contains indices of terms and - identifiers:

-
    -
  • The term index lists every term indexed by any object's - documentation. For each term, the index provides links to each - place where the term is indexed.
  • -
  • The identifier index lists the (short) name of every package, - module, class, method, function, variable, and parameter. For each - identifier, the index provides a short description, and a link to - its documentation.
  • -
- -

The Table of Contents

- -

The table of contents occupies the two frames on the left side of -the window. The upper-left frame displays the project -contents, and the lower-left frame displays the module -contents:

- - - - - - - - - -
- Project
Contents
...
- API
Documentation
Frame


-
- Module
Contents
 
...
  -

- -

The project contents frame contains a list of all packages -and modules that are defined by the project. Clicking on an entry -will display its contents in the module contents frame. Clicking on a -special entry, labeled "Everything," will display the contents of -the entire project.

- -

The module contents frame contains a list of every -submodule, class, type, exception, function, and variable defined by a -module or package. Clicking on an entry will display its -documentation in the API documentation frame. Clicking on the name of -the module, at the top of the frame, will display the documentation -for the module itself.

- -

The "frames" and "no frames" buttons below the top -navigation bar can be used to control whether the table of contents is -displayed or not.

- -

The Navigation Bar

- -

A navigation bar is located at the top and bottom of every page. -It indicates what type of page you are currently viewing, and allows -you to go to related pages. The following table describes the labels -on the navigation bar. Note that not some labels (such as -[Parent]) are not displayed on all pages.

- - - - - - - - - - - - - - - - - - - - - - - - - - - - -
LabelHighlighted when...Links to...
[Parent](never highlighted) the parent of the current package
[Package]viewing a packagethe package containing the current object -
[Module]viewing a modulethe module containing the current object -
[Class]viewing a class the class containing the current object
[Trees]viewing the trees page the trees page
[Index]viewing the index page the index page
[Help]viewing the help page the help page
- -

The "show private" and "hide private" buttons below -the top navigation bar can be used to control whether documentation -for private objects is displayed. Private objects are usually defined -as objects whose (short) names begin with a single underscore, but do -not end with an underscore. For example, "_x", -"__pprint", and "epydoc.epytext._tokenize" -are private objects; but "re.sub", -"__init__", and "type_" are not. However, -if a module defines the "__all__" variable, then its -contents are used to decide which objects are private.

- -

A timestamp below the bottom navigation bar indicates when each -page was last updated.

- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/identifier-index.html b/pyroms/docs/identifier-index.html deleted file mode 100644 index 6bba8e5..0000000 --- a/pyroms/docs/identifier-index.html +++ /dev/null @@ -1,1183 +0,0 @@ - - - - - Identifier Index - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  - - - - -
[hide private]
[frames] | no frames]
-
- -
-

Identifier Index

-
-[ - A - B - C - D - E - F - G - H - I - J - K - L - M - N - O - P - Q - R - S - T - U - V - W - X - Y - Z - _ -] -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

A

- - - - - - - - - - - - - - - - - -

B

- - - - - - - - -

C

- - - - - - - - - - - - - - - - - - - - - - -

D

- - - - - - - - - - - - -

E

- - - - - - - - -

F

- - - - - - - - - - - - -

G

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

H

- - - - - - - - -

I

- - - - - - - - - - - - -

J

- - - - - - - - -

K

- - - - - - - - -

L

- - - - - - - - - - - - - - - - - - - - - - -

M

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

N

- - - - - - - - - - - - - - - - - - - - - - - - - - - -

P

- - - - - - - - - - - - -

Q

- - - - - - - - -

R

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

S

- - - - - - - - - - - - - - - - - - - - - - -

T

- - - - - - - - - - - - -

U

- - - - - - - - -

V

- - - - - - - - - - - - -

W

- - - - - - - - -

X

- - - - - - - - - - - - -

Y

- - - - - - - - - - - - -

Z

- - - - - - - - - - - - - - - - - -

_

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-

- - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/index.html b/pyroms/docs/index.html deleted file mode 100644 index 0baa165..0000000 --- a/pyroms/docs/index.html +++ /dev/null @@ -1,17 +0,0 @@ - - - - - API Documentation - - - - - - - - - diff --git a/pyroms/docs/module-tree.html b/pyroms/docs/module-tree.html deleted file mode 100644 index 408d60e..0000000 --- a/pyroms/docs/module-tree.html +++ /dev/null @@ -1,141 +0,0 @@ - - - - - Module Hierarchy - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  - - - - -
[hide private]
[frames] | no frames]
-
-
- [ Module Hierarchy - | Class Hierarchy ] -

-

Module Hierarchy

- - - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms-module.html b/pyroms/docs/pyroms-module.html deleted file mode 100644 index 5452296..0000000 --- a/pyroms/docs/pyroms-module.html +++ /dev/null @@ -1,172 +0,0 @@ - - - - - pyroms - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Package pyroms

source code

-

PYROMS is a toolkit for working with ROMS ocean models

-

pyroms is based on the python/numpy/matplotlib scientific python - suite. NetCDF I/O is based on the NetCDF4-python package. The toolkit - contains general modeling tools for dealing with arrays, diagnosing - standard properties, curvilinear grid generation, and interpolation.

- -
-

Version: - 0.1.0 -

-

Author: - Frederic Castruccio (frederic@marine.rutgers.edu) -

-
- - - - - - -
- - - - - -
Submodules[hide private]
-
-
- -
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms-pysrc.html b/pyroms/docs/pyroms-pysrc.html deleted file mode 100644 index 970e513..0000000 --- a/pyroms/docs/pyroms-pysrc.html +++ /dev/null @@ -1,133 +0,0 @@ - - - - - pyroms - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Package pyroms

-
- 1  # encoding: utf-8 
- 2  '''  
- 3  PYROMS is a toolkit for working with ROMS ocean models 
- 4   
- 5  pyroms is based on the python/numpy/matplotlib scientific python suite.  
- 6  NetCDF I/O is based on the NetCDF4-python package. The toolkit contains  
- 7  general modeling tools for dealing with arrays, diagnosing standard  
- 8  properties, curvilinear grid generation, and interpolation. 
- 9  ''' 
-10   
-11  import cf 
-12  import vgrid 
-13  import extern 
-14  import hgrid 
-15  import grid 
-16  import io 
-17  import tools 
-18  import remapping 
-19  import utility 
-20   
-21  __authors__ = ['Frederic Castruccio (frederic@marine.rutgers.edu)'] 
-22                  
-23  __version__ = '0.1.0' 
-24   
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.cf-module.html b/pyroms/docs/pyroms.cf-module.html deleted file mode 100644 index 5925ab7..0000000 --- a/pyroms/docs/pyroms.cf-module.html +++ /dev/null @@ -1,145 +0,0 @@ - - - - - pyroms.cf - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Module cf - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module cf

source code

-
-
-cf.py - classes around CF compliant files
-
-The cf module is made for reading CF-compliant datasets,
-knowing data, its structure, units and conversions
-between units afterwards.
-
-Dependencies:
-=============
-numpy
-netcdftime (packaged in netcdf4-python)
-
-
- - - - - - - - - - -
- - - - - -
Classes[hide private]
-
-   - - time
- Return time object from netCDF file -
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.cf-pysrc.html b/pyroms/docs/pyroms.cf-pysrc.html deleted file mode 100644 index 282c0c0..0000000 --- a/pyroms/docs/pyroms.cf-pysrc.html +++ /dev/null @@ -1,294 +0,0 @@ - - - - - pyroms.cf - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Module cf - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms.cf

-
-  1  # encoding: utf-8 
-  2  """ 
-  3  cf.py - classes around CF compliant files 
-  4   
-  5  The cf module is made for reading CF-compliant datasets, 
-  6  knowing data, its structure, units and conversions 
-  7  between units afterwards. 
-  8   
-  9  Dependencies: 
- 10  ============= 
- 11  numpy 
- 12  netcdftime (packaged in netcdf4-python) 
- 13  """ 
- 14  __docformat__ = "restructuredtext en" 
- 15   
- 16  import numpy as np 
- 17   
- 18  import netcdftime 
- 19   
- 20  import pyroms.io 
- 21   
-
22 -class time (np.ndarray): -
23 """Return time object from netCDF file - 24 - 25 Parameters - 26 ---------- - 27 nc : netCDF3/4 object or filename - 28 Time information will be read from this netCDF3/4 file. - 29 name : string, optional - 30 The name of the the variable. - 31 units : string, optional - 32 The name of the variable units. - 33 calendar : string, optional - 34 A string representing the calandar to use. See netcdftime - 35 documentation for possible values. - 36 - 37 Returns - 38 ------- - 39 nctime : ndarray - 40 A subclass of numpy.ndarray with values equal to the time variable in - 41 the netCDF file referenced with nc. - 42 - 43 """ - 44 - 45 _unit2sec={'seconds' : 1.0, - 46 'minutes' : 60.0, - 47 'hours' : 3600.0, - 48 'days' : 3600.0*24.0, - 49 'weeks' : 3600.0*24.0*7.0, - 50 'years' : 3600.0*24.0*365.242198781} #ref to udunits - 51 - 52 _sec2unit={'seconds' : 1.0, - 53 'minutes' : 1.0/60.0, - 54 'hours' : 1.0/3600.0, - 55 'days' : 1.0/(24.0*3600.0)} - 56 -
57 - def __new__(self, ncfile, name='time', units=None, calendar='standard'): -
58 self._nc = pyroms.io.Dataset(ncfile) - 59 data = self._nc.variables[name][:] - 60 data = data.view(time) - 61 if units == None: - 62 units = self._nc.variables[name].units - 63 data.utime = netcdftime.utime(units, calendar=calendar) - 64 return data -
65 -
66 - def __array_finalize__(self, obj): -
67 self.utime = getattr(obj, 'utime', {}) -
68 -
69 - def arg_nearest_date(self, dateo): -
70 """Return index of date nearest to query date. - 71 - 72 Prameters - 73 --------- - 74 dateo : datetime object - 75 The query date - 76 - 77 Returns - 78 ------- - 79 idx : integer - 80 The index of the date closest to dateo. If two dates are - 81 equidistant, the smaller is returned. - 82 - 83 """ - 84 to = self.utime.date2num(dateo) - 85 return np.min(np.where(np.abs(self-to) == \ - 86 np.min(np.abs(self-to)))[0]) -
87 -
88 - def nearest_date(self, dateo): -
89 """Return the nearest date to query date. - 90 - 91 Prameters - 92 --------- - 93 dateo : datetime object - 94 The query date - 95 - 96 Returns - 97 ------- - 98 nearest_date : datetime object - 99 A datetime object of the date closest to dateo. If two dates are -100 equidistant, the smaller is returned. -101 -102 """ -103 idx = np.where(np.abs(self.dates-dateo) == \ -104 np.min(np.abs(self.dates-dateo)))[0] -105 idx = np.min(idx) -106 return self.dates[idx] -
107 -
108 - def arg_nearest(self, to, units=None): -
109 """Return index of time nearest to query time. -110 -111 Prameters -112 --------- -113 to : float -114 The query time. -115 units : string, optional -116 The units of the reference time. Defaults to the reference time -117 string 'units' in the netcdf oject. -118 -119 Returns -120 ------- -121 idx : integer -122 The index of the date closest to to. If two times are equidistant, -123 the smaller is returned. -124 -125 """ -126 if units is not None: -127 to *= self._unit2sec[units] * self._sec2unit[self.utime.units] -128 return np.min(np.where(np.abs(self-to) == np.min(np.abs(self-to)))[0]) -
129 -
130 - def nearest(self, to, units=None): -
131 """Return time nearest to time query. -132 -133 Prameters -134 --------- -135 to : float -136 The query time. -137 units : string, optional -138 The units of the reference time. Defaults to the reference time -139 string 'units' in the netcdf oject. -140 -141 Returns -142 ------- -143 idx : integer -144 The index of the date closest to to. If two times are equidistant, -145 the smaller is returned. -146 -147 """ -148 if units is not None: -149 to *= self._unit2sec[units] * self._sec2unit[self.utime.units] -150 idx = np.where(np.abs(self-to) == np.min(np.abs(self-to)))[0] -151 idx = np.min(idx) -152 return self[idx] -
153 -
154 - def get_seconds(self): -
155 fac = self._unit2sec[self.utime.units] * self._sec2unit['seconds'] -156 return self*fac -
157 -
158 - def get_minutes(self): -
159 fac = self._unit2sec[self.utime.units] * self._sec2unit['minutes'] -160 return self*fac -
161 -
162 - def get_hours(self): -
163 fac = self._unit2sec[self.utime.units] * self._sec2unit['hours'] -164 return self*fac -
165 -
166 - def get_days(self): -
167 fac = self._unit2sec[self.utime.units] * self._sec2unit['days'] -168 return np.asarray(self,dtype='float64')*fac -
169 -
170 - def get_jd(self): -
171 utime = netcdftime.utime('days since 0001-01-01 00:00:00', \ -172 calendar='proleptic_gregorian') -173 return utime.date2num(self.dates) -
174 -
175 - def get_dates(self): -
176 return np.array([self.utime.num2date(tval) for tval in self]) -
177 -178 jd = property(get_jd, None, doc="Julian day, for plotting in pylab") -179 seconds = property(get_seconds, None, doc="seconds") -180 minutes = property(get_minutes, None, doc="minutes") -181 hours = property(get_hours, None, doc="hours") -182 days = property(get_days, None, doc="days") -183 dates = property(get_dates, None, doc="datetime objects") -
184 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.cf.time-class.html b/pyroms/docs/pyroms.cf.time-class.html deleted file mode 100644 index 6c42358..0000000 --- a/pyroms/docs/pyroms.cf.time-class.html +++ /dev/null @@ -1,1055 +0,0 @@ - - - - - pyroms.cf.time - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Module cf :: - Class time - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Class time

source code

-
-   object --+    
-            |    
-numpy.ndarray --+
-                |
-               time
-
- -
-
-Return time object from netCDF file
-
-Parameters
-----------
-nc : netCDF3/4 object or filename
-    Time information will be read from this netCDF3/4 file.
-name : string, optional
-    The name of the the variable.
-units : string, optional
-    The name of the variable units.
-calendar : string, optional
-    A string representing the calandar to use. See netcdftime
-    documentation for possible values.
-
-Returns
--------
-nctime : ndarray
-    A subclass of numpy.ndarray with values equal to the time variable in
-    the netCDF file referenced with nc.
-
-
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - -
Instance Methods[hide private]
-
-   - - - - - - -
__array_finalize__(self, - obj)
- None.
- source code - -
- -
-   - - - - - - -
arg_nearest_date(self, - dateo)
- Return index of date nearest to query date.
- source code - -
- -
-   - - - - - - -
nearest_date(self, - dateo)
- Return the nearest date to query date.
- source code - -
- -
-   - - - - - - -
arg_nearest(self, - to, - units=None)
- Return index of time nearest to query time.
- source code - -
- -
-   - - - - - - -
nearest(self, - to, - units=None)
- Return time nearest to time query.
- source code - -
- -
-   - - - - - - -
get_seconds(self) - source code - -
- -
-   - - - - - - -
get_minutes(self) - source code - -
- -
-   - - - - - - -
get_hours(self) - source code - -
- -
-   - - - - - - -
get_days(self) - source code - -
- -
-   - - - - - - -
get_jd(self) - source code - -
- -
-   - - - - - - -
get_dates(self) - source code - -
- -
-

Inherited from numpy.ndarray: - __abs__, - __add__, - __and__, - __array__, - __array_wrap__, - __contains__, - __copy__, - __deepcopy__, - __delitem__, - __delslice__, - __div__, - __divmod__, - __eq__, - __float__, - __floordiv__, - __ge__, - __getitem__, - __getslice__, - __gt__, - __hex__, - __iadd__, - __iand__, - __idiv__, - __ifloordiv__, - __ilshift__, - __imod__, - __imul__, - __index__, - __int__, - __invert__, - __ior__, - __ipow__, - __irshift__, - __isub__, - __iter__, - __itruediv__, - __ixor__, - __le__, - __len__, - __long__, - __lshift__, - __lt__, - __mod__, - __mul__, - __ne__, - __neg__, - __nonzero__, - __oct__, - __or__, - __pos__, - __pow__, - __radd__, - __rand__, - __rdiv__, - __rdivmod__, - __reduce__, - __repr__, - __rfloordiv__, - __rlshift__, - __rmod__, - __rmul__, - __ror__, - __rpow__, - __rrshift__, - __rshift__, - __rsub__, - __rtruediv__, - __rxor__, - __setitem__, - __setslice__, - __setstate__, - __str__, - __sub__, - __truediv__, - __xor__, - all, - any, - argmax, - argmin, - argsort, - astype, - byteswap, - choose, - clip, - compress, - conj, - conjugate, - copy, - cumprod, - cumsum, - diagonal, - dump, - dumps, - fill, - flatten, - getfield, - item, - itemset, - max, - mean, - min, - newbyteorder, - nonzero, - prod, - ptp, - put, - ravel, - repeat, - reshape, - resize, - round, - searchsorted, - setfield, - setflags, - sort, - squeeze, - std, - sum, - swapaxes, - take, - tofile, - tolist, - tostring, - trace, - transpose, - var, - view -

-

Inherited from object: - __delattr__, - __getattribute__, - __hash__, - __init__, - __reduce_ex__, - __setattr__ -

-
- - - - - - - - - -
- - - - - -
Static Methods[hide private]
-
- a new object with type S, a subtype of T - - - - - - -
__new__(self, - ncfile, - name='time', - units=None, - calendar='standard') - source code - -
- -
- - - - - - - - - - - - -
- - - - - -
Class Variables[hide private]
-
-   - - _unit2sec = {'days': 86400.0, 'hours': 3600.0, 'minutes': 60.0... -
-   - - _sec2unit = {'days': 1.15740740741e-05, 'hours': 0.00027777777... -
- - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - -
Properties[hide private]
-
-   - - jd
- Julian day, for plotting in pylab -
-   - - seconds
- seconds -
-   - - minutes
- minutes -
-   - - hours
- hours -
-   - - days
- days -
-   - - dates
- datetime objects -
-

Inherited from numpy.ndarray: - T, - __array_interface__, - __array_priority__, - __array_struct__, - base, - ctypes, - data, - dtype, - flags, - flat, - imag, - itemsize, - nbytes, - ndim, - real, - shape, - size, - strides -

-

Inherited from object: - __class__ -

-
- - - - - - -
- - - - - -
Method Details[hide private]
-
- -
- -
- - -
-

__new__(self, - ncfile, - name='time', - units=None, - calendar='standard') -
Static Method -

-
source code  -
- - -
-
Returns: a new object with type S, a subtype of T
-
Overrides: - object.__new__ -
(inherited documentation)
- -
-
-
- -
- -
- - -
-

__array_finalize__(self, - obj) -

-
source code  -
- -

None.

-
-
Overrides: - __array_finalize__ -
(inherited documentation)
- -
-
-
- -
- -
- - -
-

arg_nearest_date(self, - dateo) -

-
source code  -
- -
-Return index of date nearest to query date.
-
-Prameters
----------
-dateo : datetime object
-    The query date
-
-Returns
--------
-idx : integer
-    The index of the date closest to dateo. If two dates are
-    equidistant, the smaller is returned.
-
-
-
-
-
-
- -
- -
- - -
-

nearest_date(self, - dateo) -

-
source code  -
- -
-Return the nearest date to query date.
-
-Prameters
----------
-dateo : datetime object
-    The query date
-
-Returns
--------
-nearest_date : datetime object
-    A datetime object of the date closest to dateo. If two dates are
-    equidistant, the smaller is returned.
-
-
-
-
-
-
- -
- -
- - -
-

arg_nearest(self, - to, - units=None) -

-
source code  -
- -
-Return index of time nearest to query time.
-
-Prameters
----------
-to : float
-    The query time.
-units : string, optional
-    The units of the reference time. Defaults to the reference time
-    string 'units' in the netcdf oject.
-
-Returns
--------
-idx : integer
-    The index of the date closest to to. If two times are equidistant,
-    the smaller is returned.
-
-
-
-
-
-
- -
- -
- - -
-

nearest(self, - to, - units=None) -

-
source code  -
- -
-Return time nearest to time query.
-
-Prameters
----------
-to : float
-    The query time.
-units : string, optional
-    The units of the reference time. Defaults to the reference time
-    string 'units' in the netcdf oject.
-
-Returns
--------
-idx : integer
-    The index of the date closest to to. If two times are equidistant,
-    the smaller is returned.
-
-
-
-
-
-
-
- - - - - - -
- - - - - -
Class Variable Details[hide private]
-
- -
- -
-

_unit2sec

- -
-
-
-
Value:
-
-{'days': 86400.0,
- 'hours': 3600.0,
- 'minutes': 60.0,
- 'seconds': 1.0,
- 'weeks': 604800.0,
- 'years': 31556925.9747}
-
-
-
-
-
- -
- -
-

_sec2unit

- -
-
-
-
Value:
-
-{'days': 1.15740740741e-05,
- 'hours': 0.000277777777778,
- 'minutes': 0.0166666666667,
- 'seconds': 1.0}
-
-
-
-
-
-
- - - - - - -
- - - - - -
Property Details[hide private]
-
- -
- -
-

jd

-
-Julian day, for plotting in pylab
-
-
-
-
Get Method:
-
get_jd(self) -
-
-
-
- -
- -
-

seconds

-
-seconds
-
-
-
-
Get Method:
-
get_seconds(self) -
-
-
-
- -
- -
-

minutes

-
-minutes
-
-
-
-
Get Method:
-
get_minutes(self) -
-
-
-
- -
- -
-

hours

-
-hours
-
-
-
-
Get Method:
-
get_hours(self) -
-
-
-
- -
- -
-

days

-
-days
-
-
-
-
Get Method:
-
get_days(self) -
-
-
-
- -
- -
-

dates

-
-datetime objects
-
-
-
-
Get Method:
-
get_dates(self) -
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.extern-module.html b/pyroms/docs/pyroms.extern-module.html deleted file mode 100644 index 72b9ce8..0000000 --- a/pyroms/docs/pyroms.extern-module.html +++ /dev/null @@ -1,132 +0,0 @@ - - - - - pyroms.extern - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Package extern - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Package extern

source code

-

External packages

- - - - - - - - -
- - - - - -
Submodules[hide private]
-
-
- -
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.extern-pysrc.html b/pyroms/docs/pyroms.extern-pysrc.html deleted file mode 100644 index 77e8cda..0000000 --- a/pyroms/docs/pyroms.extern-pysrc.html +++ /dev/null @@ -1,121 +0,0 @@ - - - - - pyroms.extern - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Package extern - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Package pyroms.extern

-
- 1  'External packages' 
- 2   
- 3  # from an old version Jeff Whitaker's Basemap 
- 4  from greatcircle import GreatCircle 
- 5   
- 6  # from Roberto De Almeida <rob@pydap.org> 
- 7  import pupynere 
- 8   
- 9  # from Anne M. Archibald's scipy.spatial.kdtree pure python code 
-10  from kdtree import KDTree 
-11   
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.extern.greatcircle-module.html b/pyroms/docs/pyroms.extern.greatcircle-module.html deleted file mode 100644 index 9e339bc..0000000 --- a/pyroms/docs/pyroms.extern.greatcircle-module.html +++ /dev/null @@ -1,273 +0,0 @@ - - - - - pyroms.extern.greatcircle - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Package extern :: - Module greatcircle - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module greatcircle

source code

-
-

Version: - 1.0.1 -

-
- - - - - - - - -
- - - - - -
Classes[hide private]
-
-   - - GreatCircle
- formula for perfect sphere from Ed Williams' 'Aviation Formulary' - (http://williams.best.vwh.net/avform.htm) -
- - - - - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
vinc_dist(f, - a, - phi1, - lembda1, - phi2, - lembda2)
- Returns the distance between two geographic points on the ellipsoid - and the forward and reverse azimuths between these points.
- source code - -
- -
-   - - - - - - -
vinc_pt(f, - a, - phi1, - lembda1, - alpha12, - s)
- Returns the lat and long of projected point and reverse azimuth given - a reference point and a distance and azimuth to project.
- source code - -
- -
- - - - - - -
- - - - - -
Function Details[hide private]
-
- -
- -
- - -
-

vinc_dist(f, - a, - phi1, - lembda1, - phi2, - lembda2) -

-
source code  -
- -

Returns the distance between two geographic points on the ellipsoid - and the forward and reverse azimuths between these points. lats, longs - and azimuths are in radians, distance in metres

-

Returns ( s, alpha12, alpha21 ) as a tuple

-
-
-
-
- -
- -
- - -
-

vinc_pt(f, - a, - phi1, - lembda1, - alpha12, - s) -

-
source code  -
- -

Returns the lat and long of projected point and reverse azimuth given - a reference point and a distance and azimuth to project. lats, longs and - azimuths are passed in decimal degrees

-

Returns ( phi2, lambda2, alpha21 ) as a tuple

-
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.extern.greatcircle-pysrc.html b/pyroms/docs/pyroms.extern.greatcircle-pysrc.html deleted file mode 100644 index fe6f448..0000000 --- a/pyroms/docs/pyroms.extern.greatcircle-pysrc.html +++ /dev/null @@ -1,637 +0,0 @@ - - - - - pyroms.extern.greatcircle - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Package extern :: - Module greatcircle - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms.extern.greatcircle

-
-  1  import numpy as NX 
-  2  import math 
-  3   
-  4  __version__ = '1.0.1' 
-
5 -class GreatCircle(object): -
6 """ - 7 formula for perfect sphere from Ed Williams' 'Aviation Formulary' - 8 (http://williams.best.vwh.net/avform.htm) - 9 - 10 code for ellipsoid posted to GMT mailing list by Jim Leven in Dec 1999 - 11 - 12 Contact: Jeff Whitaker <jeffrey.s.whitaker@noaa.gov> - 13 """ - 14 - 15 -
16 - def __init__(self,rmajor,rminor,lon1,lat1,lon2,lat2): -
17 """ - 18 Define a great circle by specifying: - 19 rmajor - radius of major axis of ellipsoid - 20 rminor - radius of minor axis of ellipsoid. - 21 lon1 - starting longitude of great circle - 22 lat1 - starting latitude - 23 lon2 - ending longitude - 24 lat2 - ending latitude - 25 All must be given in degrees. - 26 - 27 Instance variables: - 28 distance - distance along great circle in radians. - 29 lon1,lat1,lon2,lat2 - start and end points (in radians). - 30 """ - 31 # convert to radians from degrees. - 32 lat1 = math.radians(lat1) - 33 lon1 = math.radians(lon1) - 34 lat2 = math.radians(lat2) - 35 lon2 = math.radians(lon2) - 36 self.a = rmajor - 37 self.f = (rmajor-rminor)/rmajor - 38 self.lat1 = lat1 - 39 self.lat2 = lat2 - 40 self.lon1 = lon1 - 41 self.lon2 = lon2 - 42 # distance along geodesic in meters. - 43 d,a12,a21 = vinc_dist(self.f, self.a, lat1, lon1, lat2, lon2 ) - 44 self.distance = d - 45 self.azimuth12 = a12 - 46 self.azimuth21 = a21 - 47 # great circle arc-length distance (in radians). - 48 self.gcarclen = 2.*math.asin(math.sqrt((math.sin((lat1-lat2)/2))**2+\ - 49 math.cos(lat1)*math.cos(lat2)*(math.sin((lon1-lon2)/2))**2)) - 50 # check to see if points are antipodal (if so, route is undefined). - 51 if self.gcarclen == math.pi: - 52 self.antipodal = True - 53 else: - 54 self.antipodal = False -
55 -
56 - def points(self,npoints): -
57 """ - 58 compute arrays of npoints equally spaced - 59 intermediate points along the great circle. - 60 - 61 input parameter npoints is the number of points - 62 to compute. - 63 - 64 Returns lons, lats (lists with longitudes and latitudes - 65 of intermediate points in degrees). - 66 - 67 For example npoints=10 will return arrays lons,lats of 10 - 68 equally spaced points along the great circle. - 69 """ - 70 # must ask for at least 2 points. - 71 if npoints <= 1: - 72 raise ValueError,'npoints must be greater than 1' - 73 elif npoints == 2: - 74 return [math.degrees(self.lon1),math.degrees(self.lon2)],[math.degrees(self.lat1),math.degrees(self.lat2)] - 75 # can't do it if endpoints are antipodal, since - 76 # route is undefined. - 77 if self.antipodal: - 78 raise ValueError,'cannot compute intermediate points on a great circle whose endpoints are antipodal' - 79 d = self.gcarclen - 80 delta = 1.0/(npoints-1) - 81 f = delta*NX.arange(npoints) # f=0 is point 1, f=1 is point 2. - 82 incdist = self.distance/(npoints-1) - 83 lat1 = self.lat1 - 84 lat2 = self.lat2 - 85 lon1 = self.lon1 - 86 lon2 = self.lon2 - 87 # perfect sphere, use great circle formula - 88 if self.f == 0.: - 89 A = NX.sin((1-f)*d)/math.sin(d) - 90 B = NX.sin(f*d)/math.sin(d) - 91 x = A*math.cos(lat1)*math.cos(lon1)+B*math.cos(lat2)*math.cos(lon2) - 92 y = A*math.cos(lat1)*math.sin(lon1)+B*math.cos(lat2)*math.sin(lon2) - 93 z = A*math.sin(lat1) +B*math.sin(lat2) - 94 lats=NX.arctan2(z,NX.sqrt(x**2+y**2)) - 95 lons=NX.arctan2(y,x) - 96 lons = map(math.degrees,lons.tolist()) - 97 lats = map(math.degrees,lats.tolist()) - 98 # use ellipsoid formulas - 99 else: -100 latpt = self.lat1 -101 lonpt = self.lon1 -102 azimuth = self.azimuth12 -103 lons = [math.degrees(lonpt)] -104 lats = [math.degrees(latpt)] -105 for n in range(npoints-2): -106 latptnew,lonptnew,alpha21=vinc_pt(self.f,self.a,latpt,lonpt,azimuth,incdist) -107 d,azimuth,a21=vinc_dist(self.f,self.a,latptnew,lonptnew,lat2,lon2) -108 lats.append(math.degrees(latptnew)) -109 lons.append(math.degrees(lonptnew)) -110 latpt = latptnew; lonpt = lonptnew -111 lons.append(math.degrees(self.lon2)) -112 lats.append(math.degrees(self.lat2)) -113 return lons,lats -
114 # -115 # --------------------------------------------------------------------- -116 # | | -117 # | geodetic.py - a collection of geodetic functions | -118 # | | -119 # --------------------------------------------------------------------- -120 # -121 # -122 # ---------------------------------------------------------------------- -123 # | Algrothims from Geocentric Datum of Australia Technical Manual | -124 # | | -125 # | http://www.anzlic.org.au/icsm/gdatum/chapter4.html | -126 # | | -127 # | This page last updated 11 May 1999 | -128 # | | -129 # | Computations on the Ellipsoid | -130 # | | -131 # | There are a number of formulae that are available | -132 # | to calculate accurate geodetic positions, | -133 # | azimuths and distances on the ellipsoid. | -134 # | | -135 # | Vincenty's formulae (Vincenty, 1975) may be used | -136 # | for lines ranging from a few cm to nearly 20,000 km, | -137 # | with millimetre accuracy. | -138 # | The formulae have been extensively tested | -139 # | for the Australian region, by comparison with results | -140 # | from other formulae (Rainsford, 1955 & Sodano, 1965). | -141 # | | -142 # | * Inverse problem: azimuth and distance from known | -143 # | latitudes and longitudes | -144 # | * Direct problem: Latitude and longitude from known | -145 # | position, azimuth and distance. | -146 # | * Sample data | -147 # | * Excel spreadsheet | -148 # | | -149 # | Vincenty's Inverse formulae | -150 # | Given: latitude and longitude of two points | -151 # | (phi1, lembda1 and phi2, lembda2), | -152 # | Calculate: the ellipsoidal distance (s) and | -153 # | forward and reverse azimuths between the points (alpha12, alpha21). | -154 # | | -155 # ---------------------------------------------------------------------- -156 -
157 -def vinc_dist( f, a, phi1, lembda1, phi2, lembda2 ) : -
158 """ -159 -160 Returns the distance between two geographic points on the ellipsoid -161 and the forward and reverse azimuths between these points. -162 lats, longs and azimuths are in radians, distance in metres -163 -164 Returns ( s, alpha12, alpha21 ) as a tuple -165 -166 """ -167 -168 if (abs( phi2 - phi1 ) < 1e-8) and ( abs( lembda2 - lembda1) < 1e-8 ) : -169 return 0.0, 0.0, 0.0 -170 -171 two_pi = 2.0*math.pi -172 -173 b = a * (1.0 - f) -174 -175 TanU1 = (1-f) * math.tan( phi1 ) -176 TanU2 = (1-f) * math.tan( phi2 ) -177 -178 U1 = math.atan(TanU1) -179 U2 = math.atan(TanU2) -180 -181 lembda = lembda2 - lembda1 -182 last_lembda = -4000000.0 # an impossibe value -183 omega = lembda -184 -185 # Iterate the following equations, -186 # until there is no significant change in lembda -187 -188 while ( last_lembda < -3000000.0 or lembda != 0 and abs( (last_lembda - lembda)/lembda) > 1.0e-9 ) : -189 -190 sqr_sin_sigma = pow( math.cos(U2) * math.sin(lembda), 2) + \ -191 pow( (math.cos(U1) * math.sin(U2) - \ -192 math.sin(U1) * math.cos(U2) * math.cos(lembda) ), 2 ) -193 -194 Sin_sigma = math.sqrt( sqr_sin_sigma ) -195 -196 Cos_sigma = math.sin(U1) * math.sin(U2) + math.cos(U1) * math.cos(U2) * math.cos(lembda) -197 -198 sigma = math.atan2( Sin_sigma, Cos_sigma ) -199 -200 Sin_alpha = math.cos(U1) * math.cos(U2) * math.sin(lembda) / math.sin(sigma) -201 alpha = math.asin( Sin_alpha ) -202 -203 Cos2sigma_m = math.cos(sigma) - (2 * math.sin(U1) * math.sin(U2) / pow(math.cos(alpha), 2) ) -204 -205 C = (f/16) * pow(math.cos(alpha), 2) * (4 + f * (4 - 3 * pow(math.cos(alpha), 2))) -206 -207 last_lembda = lembda -208 -209 lembda = omega + (1-C) * f * math.sin(alpha) * (sigma + C * math.sin(sigma) * \ -210 (Cos2sigma_m + C * math.cos(sigma) * (-1 + 2 * pow(Cos2sigma_m, 2) ))) -211 -212 -213 u2 = pow(math.cos(alpha),2) * (a*a-b*b) / (b*b) -214 -215 A = 1 + (u2/16384) * (4096 + u2 * (-768 + u2 * (320 - 175 * u2))) -216 -217 B = (u2/1024) * (256 + u2 * (-128+ u2 * (74 - 47 * u2))) -218 -219 delta_sigma = B * Sin_sigma * (Cos2sigma_m + (B/4) * \ -220 (Cos_sigma * (-1 + 2 * pow(Cos2sigma_m, 2) ) - \ -221 (B/6) * Cos2sigma_m * (-3 + 4 * sqr_sin_sigma) * \ -222 (-3 + 4 * pow(Cos2sigma_m,2 ) ))) -223 -224 s = b * A * (sigma - delta_sigma) -225 -226 alpha12 = math.atan2( (math.cos(U2) * math.sin(lembda)), \ -227 (math.cos(U1) * math.sin(U2) - math.sin(U1) * math.cos(U2) * math.cos(lembda))) -228 -229 alpha21 = math.atan2( (math.cos(U1) * math.sin(lembda)), \ -230 (-math.sin(U1) * math.cos(U2) + math.cos(U1) * math.sin(U2) * math.cos(lembda))) -231 -232 if ( alpha12 < 0.0 ) : -233 alpha12 = alpha12 + two_pi -234 if ( alpha12 > two_pi ) : -235 alpha12 = alpha12 - two_pi -236 -237 alpha21 = alpha21 + two_pi / 2.0 -238 if ( alpha21 < 0.0 ) : -239 alpha21 = alpha21 + two_pi -240 if ( alpha21 > two_pi ) : -241 alpha21 = alpha21 - two_pi -242 -243 return s, alpha12, alpha21 -
244 -245 # END of Vincenty's Inverse formulae -246 -247 -248 #---------------------------------------------------------------------------- -249 # Vincenty's Direct formulae | -250 # Given: latitude and longitude of a point (phi1, lembda1) and | -251 # the geodetic azimuth (alpha12) | -252 # and ellipsoidal distance in metres (s) to a second point, | -253 # | -254 # Calculate: the latitude and longitude of the second point (phi2, lembda2) | -255 # and the reverse azimuth (alpha21). | -256 # | -257 #---------------------------------------------------------------------------- -258 -
259 -def vinc_pt( f, a, phi1, lembda1, alpha12, s ) : -
260 """ -261 -262 Returns the lat and long of projected point and reverse azimuth -263 given a reference point and a distance and azimuth to project. -264 lats, longs and azimuths are passed in decimal degrees -265 -266 Returns ( phi2, lambda2, alpha21 ) as a tuple -267 -268 """ -269 -270 -271 two_pi = 2.0*math.pi -272 -273 if ( alpha12 < 0.0 ) : -274 alpha12 = alpha12 + two_pi -275 if ( alpha12 > two_pi ) : -276 alpha12 = alpha12 - two_pi -277 -278 -279 b = a * (1.0 - f) -280 -281 TanU1 = (1-f) * math.tan(phi1) -282 U1 = math.atan( TanU1 ) -283 sigma1 = math.atan2( TanU1, math.cos(alpha12) ) -284 Sinalpha = math.cos(U1) * math.sin(alpha12) -285 cosalpha_sq = 1.0 - Sinalpha * Sinalpha -286 -287 u2 = cosalpha_sq * (a * a - b * b ) / (b * b) -288 A = 1.0 + (u2 / 16384) * (4096 + u2 * (-768 + u2 * \ -289 (320 - 175 * u2) ) ) -290 B = (u2 / 1024) * (256 + u2 * (-128 + u2 * (74 - 47 * u2) ) ) -291 -292 # Starting with the approximation -293 sigma = (s / (b * A)) -294 -295 last_sigma = 2.0 * sigma + 2.0 # something impossible -296 -297 # Iterate the following three equations -298 # until there is no significant change in sigma -299 -300 # two_sigma_m , delta_sigma -301 -302 while ( abs( (last_sigma - sigma) / sigma) > 1.0e-9 ) : -303 -304 two_sigma_m = 2 * sigma1 + sigma -305 -306 delta_sigma = B * math.sin(sigma) * ( math.cos(two_sigma_m) \ -307 + (B/4) * (math.cos(sigma) * \ -308 (-1 + 2 * math.pow( math.cos(two_sigma_m), 2 ) - \ -309 (B/6) * math.cos(two_sigma_m) * \ -310 (-3 + 4 * math.pow(math.sin(sigma), 2 )) * \ -311 (-3 + 4 * math.pow( math.cos (two_sigma_m), 2 ))))) \ -312 -313 last_sigma = sigma -314 sigma = (s / (b * A)) + delta_sigma -315 -316 -317 phi2 = math.atan2 ( (math.sin(U1) * math.cos(sigma) + math.cos(U1) * math.sin(sigma) * math.cos(alpha12) ), \ -318 ((1-f) * math.sqrt( math.pow(Sinalpha, 2) + \ -319 pow(math.sin(U1) * math.sin(sigma) - math.cos(U1) * math.cos(sigma) * math.cos(alpha12), 2)))) -320 -321 -322 lembda = math.atan2( (math.sin(sigma) * math.sin(alpha12 )), (math.cos(U1) * math.cos(sigma) - \ -323 math.sin(U1) * math.sin(sigma) * math.cos(alpha12))) -324 -325 C = (f/16) * cosalpha_sq * (4 + f * (4 - 3 * cosalpha_sq )) -326 -327 omega = lembda - (1-C) * f * Sinalpha * \ -328 (sigma + C * math.sin(sigma) * (math.cos(two_sigma_m) + \ -329 C * math.cos(sigma) * (-1 + 2 * math.pow(math.cos(two_sigma_m),2) ))) -330 -331 lembda2 = lembda1 + omega -332 -333 alpha21 = math.atan2 ( Sinalpha, (-math.sin(U1) * math.sin(sigma) + \ -334 math.cos(U1) * math.cos(sigma) * math.cos(alpha12))) -335 -336 alpha21 = alpha21 + two_pi / 2.0 -337 if ( alpha21 < 0.0 ) : -338 alpha21 = alpha21 + two_pi -339 if ( alpha21 > two_pi ) : -340 alpha21 = alpha21 - two_pi -341 -342 -343 return phi2, lembda2, alpha21 -
344 -345 # END of Vincenty's Direct formulae -346 -347 ##--------------------------------------------------------------------------- -348 # Notes: -349 # -350 # * "The inverse formulae may give no solution over a line -351 # between two nearly antipodal points. This will occur when -352 # lembda ... is greater than pi in absolute value". (Vincenty, 1975) -353 # -354 # * In Vincenty (1975) L is used for the difference in longitude, -355 # however for consistency with other formulae in this Manual, -356 # omega is used here. -357 # -358 # * Variables specific to Vincenty's formulae are shown below, -359 # others common throughout the manual are shown in the Glossary. -360 # -361 # -362 # alpha = Azimuth of the geodesic at the equator -363 # U = Reduced latitude -364 # lembda = Difference in longitude on an auxiliary sphere (lembda1 & lembda2 -365 # are the geodetic longitudes of points 1 & 2) -366 # sigma = Angular distance on a sphere, from point 1 to point 2 -367 # sigma1 = Angular distance on a sphere, from the equator to point 1 -368 # sigma2 = Angular distance on a sphere, from the equator to point 2 -369 # sigma_m = Angular distance on a sphere, from the equator to the -370 # midpoint of the line from point 1 to point 2 -371 # u, A, B, C = Internal variables -372 # -373 # -374 # Sample Data -375 # -376 # Flinders Peak -377 # -37o57'03.72030" -378 # 144o25'29.52440" -379 # Buninyong -380 # -37o39'10.15610" -381 # 143o55'35.38390" -382 # Ellipsoidal Distance -383 # 54,972.271 m -384 # -385 # Forward Azimuth -386 # 306o52'05.37" -387 # -388 # Reverse Azimuth -389 # 127o10'25.07" -390 # -391 # -392 ##******************************************************************* -393 -394 # Test driver -395 -396 if __name__ == "__main__" : -397 -398 # WGS84 -399 -400 a = 6378137.0 -401 b = 6356752.3142 -402 f = (a-b)/a -403 -404 print "\n Ellipsoidal major axis = %12.3f metres\n" % ( a ) -405 print "\n Inverse flattening = %15.9f\n" % ( 1.0/f ) -406 -407 print "\n Test Flinders Peak to Buninyon" -408 print "\n ****************************** \n" -409 phi1 = -(( 3.7203 / 60. + 57) / 60. + 37 ) -410 lembda1 = ( 29.5244 / 60. + 25) / 60. + 144 -411 print "\n Flinders Peak = %12.6f, %13.6f \n" % ( phi1, lembda1 ) -412 deg = int(phi1) -413 minn = int(abs( ( phi1 - deg) * 60.0 )) -414 sec = abs(phi1 * 3600 - deg * 3600) - minn * 60 -415 print " Flinders Peak = %3i\xF8%3i\' %6.3f\", " % ( deg, minn, sec ), -416 deg = int(lembda1) -417 minn = int(abs( ( lembda1 - deg) * 60.0 )) -418 sec = abs(lembda1 * 3600 - deg * 3600) - minn * 60 -419 print " %3i\xF8%3i\' %6.3f\" \n" % ( deg, minn, sec ) -420 -421 phi2 = -(( 10.1561 / 60. + 39) / 60. + 37 ) -422 lembda2 = ( 35.3839 / 60. + 55) / 60. + 143 -423 print "\n Buninyon = %12.6f, %13.6f \n" % ( phi2, lembda2 ) -424 -425 deg = int(phi2) -426 minn = int(abs( ( phi2 - deg) * 60.0 )) -427 sec = abs(phi2 * 3600 - deg * 3600) - minn * 60 -428 print " Buninyon = %3i\xF8%3i\' %6.3f\", " % ( deg, minn, sec ), -429 deg = int(lembda2) -430 minn = int(abs( ( lembda2 - deg) * 60.0 )) -431 sec = abs(lembda2 * 3600 - deg * 3600) - minn * 60 -432 print " %3i\xF8%3i\' %6.3f\" \n" % ( deg, minn, sec ) -433 -434 dist, alpha12, alpha21 = vinc_dist ( f, a, math.radians(phi1), math.radians(lembda1), math.radians(phi2), math.radians(lembda2) ) -435 -436 alpha12 = math.degrees(alpha12) -437 alpha21 = math.degrees(alpha21) -438 -439 print "\n Ellipsoidal Distance = %15.3f metres\n should be 54972.271 m\n" % ( dist ) -440 print "\n Forward and back azimuths = %15.6f, %15.6f \n" % ( alpha12, alpha21 ) -441 deg = int(alpha12) -442 minn =int( abs(( alpha12 - deg) * 60.0 ) ) -443 sec = abs(alpha12 * 3600 - deg * 3600) - minn * 60 -444 print " Forward azimuth = %3i\xF8%3i\' %6.3f\"\n" % ( deg, minn, sec ) -445 deg = int(alpha21) -446 minn =int(abs( ( alpha21 - deg) * 60.0 )) -447 sec = abs(alpha21 * 3600 - deg * 3600) - minn * 60 -448 print " Reverse azimuth = %3i\xF8%3i\' %6.3f\"\n" % ( deg, minn, sec ) -449 -450 -451 # Test the direct function */ -452 phi1 = -(( 3.7203 / 60. + 57) / 60. + 37 ) -453 lembda1 = ( 29.5244 / 60. + 25) / 60. + 144 -454 dist = 54972.271 -455 alpha12 = ( 5.37 / 60. + 52) / 60. + 306 -456 phi2 = lembda2 = 0.0 -457 alpha21 = 0.0 -458 -459 phi2, lembda2, alpha21 = vinc_pt ( f, a, math.radians(phi1), math.radians(lembda1), math.radians(alpha12), dist ) -460 -461 phi2 = math.degrees(phi2) -462 lembda2 = math.degrees(lembda2) -463 alpha21 = math.degrees(alpha21) -464 -465 print "\n Projected point =%11.6f, %13.6f \n" % ( phi2, lembda2 ) -466 deg = int(phi2) -467 minn =int(abs( ( phi2 - deg) * 60.0 )) -468 sec = abs( phi2 * 3600 - deg * 3600) - minn * 60 -469 print " Projected Point = %3i\xF8%3i\' %6.3f\", " % ( deg, minn, sec ), -470 deg = int(lembda2) -471 minn =int(abs( ( lembda2 - deg) * 60.0 )) -472 sec = abs(lembda2 * 3600 - deg * 3600) - minn * 60 -473 print " %3i\xF8%3i\' %6.3f\"\n" % ( deg, minn, sec ) -474 print " Should be Buninyon \n" -475 print "\n Reverse azimuth = %10.6f \n" % ( alpha21 ) -476 deg = int(alpha21) -477 minn =int(abs( ( alpha21 - deg) * 60.0 )) -478 sec = abs(alpha21 * 3600 - deg * 3600) - minn * 60 -479 print " Reverse azimuth = %3i\xF8%3i\' %6.3f\"\n\n" % ( deg, minn, sec ) -480 -481 # lat/lon of New York -482 lat1 = 40.78 -483 lon1 = -73.98 -484 # lat/lon of London. -485 lat2 = 51.53 -486 lon2 = 0.08 -487 print 'New York to London:' -488 gc = GreatCircle((2*a+b)/3.,(2*a+b)/3.,lon1,lat1,lon2,lat2) -489 print 'geodesic distance using a sphere with WGS84 mean radius = ',gc.distance -490 print 'lon/lat for 10 equally spaced points along geodesic:' -491 lons,lats = gc.points(10) -492 for lon,lat in zip(lons,lats): -493 print lon,lat -494 gc = GreatCircle(a,b,lon1,lat1,lon2,lat2) -495 print 'geodesic distance using WGS84 ellipsoid = ',gc.distance -496 print 'lon/lat for 10 equally spaced points along geodesic:' -497 lons,lats = gc.points(10) -498 for lon,lat in zip(lons,lats): -499 print lon,lat -500 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.extern.greatcircle.GreatCircle-class.html b/pyroms/docs/pyroms.extern.greatcircle.GreatCircle-class.html deleted file mode 100644 index 957ae34..0000000 --- a/pyroms/docs/pyroms.extern.greatcircle.GreatCircle-class.html +++ /dev/null @@ -1,299 +0,0 @@ - - - - - pyroms.extern.greatcircle.GreatCircle - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Package extern :: - Module greatcircle :: - Class GreatCircle - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Class GreatCircle

source code

-
-object --+
-         |
-        GreatCircle
-
- -
-

formula for perfect sphere from Ed Williams' 'Aviation Formulary' - (http://williams.best.vwh.net/avform.htm)

-

code for ellipsoid posted to GMT mailing list by Jim Leven in Dec - 1999

-

Contact: Jeff Whitaker <jeffrey.s.whitaker@noaa.gov>

- - - - - - - - - - - - - - - - -
- - - - - -
Instance Methods[hide private]
-
-   - - - - - - -
__init__(self, - rmajor, - rminor, - lon1, - lat1, - lon2, - lat2)
- Define a great circle by specifying: rmajor - radius of major axis of - ellipsoid rminor - radius of minor axis of ellipsoid.
- source code - -
- -
-   - - - - - - -
points(self, - npoints)
- compute arrays of npoints equally spaced intermediate points along - the great circle.
- source code - -
- -
-

Inherited from object: - __delattr__, - __getattribute__, - __hash__, - __new__, - __reduce__, - __reduce_ex__, - __repr__, - __setattr__, - __str__ -

-
- - - - - - - - - -
- - - - - -
Properties[hide private]
-
-

Inherited from object: - __class__ -

-
- - - - - - -
- - - - - -
Method Details[hide private]
-
- -
- -
- - -
-

__init__(self, - rmajor, - rminor, - lon1, - lat1, - lon2, - lat2) -
(Constructor) -

-
source code  -
- -

Define a great circle by specifying: rmajor - radius of major axis of - ellipsoid rminor - radius of minor axis of ellipsoid. lon1 - starting - longitude of great circle lat1 - starting latitude lon2 - ending - longitude lat2 - ending latitude All must be given in degrees.

-

Instance variables: distance - distance along great circle in radians. - lon1,lat1,lon2,lat2 - start and end points (in radians).

-
-
Overrides: - object.__init__ -
-
-
-
- -
- -
- - -
-

points(self, - npoints) -

-
source code  -
- -

compute arrays of npoints equally spaced intermediate points along the - great circle.

-

input parameter npoints is the number of points to compute.

-

Returns lons, lats (lists with longitudes and latitudes of - intermediate points in degrees).

-

For example npoints=10 will return arrays lons,lats of 10 equally - spaced points along the great circle.

-
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.extern.kdtree-module.html b/pyroms/docs/pyroms.extern.kdtree-module.html deleted file mode 100644 index 594399c..0000000 --- a/pyroms/docs/pyroms.extern.kdtree-module.html +++ /dev/null @@ -1,301 +0,0 @@ - - - - - pyroms.extern.kdtree - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Package extern :: - Module kdtree - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module kdtree

source code

- - - - - - - - - - - - -
- - - - - -
Classes[hide private]
-
-   - - Rectangle
- Hyperrectangle class. -
-   - - KDTree
- kd-tree for quick nearest-neighbor lookup -
- - - - - - - - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
minkowski_distance_p(x, - y, - p=2)
- Compute the pth power of the L**p distance between x and y
- source code - -
- -
-   - - - - - - -
minkowski_distance(x, - y, - p=2)
- Compute the L**p distance between x and y
- source code - -
- -
-   - - - - - - -
distance_matrix(x, - y, - p=2, - threshold=1000000)
- Compute the distance matrix.
- source code - -
- -
- - - - - - -
- - - - - -
Function Details[hide private]
-
- -
- -
- - -
-

minkowski_distance_p(x, - y, - p=2) -

-
source code  -
- -

Compute the pth power of the L**p distance between x and y

-

For efficiency, this function computes the L**p distance but does not - extract the pth root. If p is 1 or infinity, this is equal to the actual - L**p distance.

-
-
-
-
- -
- -
- - -
-

distance_matrix(x, - y, - p=2, - threshold=1000000) -

-
source code  -
- -
-Compute the distance matrix.
-
-Computes the matrix of all pairwise distances.
-
-Parameters
-==========
-
-x : array-like, m by k
-y : array-like, n by k
-p : float 1<=p<=infinity
-    Which Minkowski p-norm to use.
-threshold : positive integer
-    If m*n*k>threshold use a python loop instead of creating
-    a very large temporary.
-
-Returns
-=======
-
-result : array-like, m by n
-
-
-
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.extern.kdtree-pysrc.html b/pyroms/docs/pyroms.extern.kdtree-pysrc.html deleted file mode 100644 index 7f7a377..0000000 --- a/pyroms/docs/pyroms.extern.kdtree-pysrc.html +++ /dev/null @@ -1,968 +0,0 @@ - - - - - pyroms.extern.kdtree - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Package extern :: - Module kdtree - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms.extern.kdtree

-
-  1  # Copyright Anne M. Archibald 2008 
-  2  # Released under the scipy license 
-  3  import numpy as np 
-  4  from heapq import heappush, heappop 
-  5  #  import scipy.sparse  ### removed method that uses this -rdh 
-  6   
-
7 -def minkowski_distance_p(x,y,p=2): -
8 """Compute the pth power of the L**p distance between x and y - 9 - 10 For efficiency, this function computes the L**p distance but does - 11 not extract the pth root. If p is 1 or infinity, this is equal to - 12 the actual L**p distance. - 13 """ - 14 x = np.asarray(x) - 15 y = np.asarray(y) - 16 if p==np.inf: - 17 return np.amax(np.abs(y-x),axis=-1) - 18 elif p==1: - 19 return np.sum(np.abs(y-x),axis=-1) - 20 else: - 21 return np.sum(np.abs(y-x)**p,axis=-1) -
22 -def minkowski_distance(x,y,p=2): -
23 """Compute the L**p distance between x and y""" - 24 x = np.asarray(x) - 25 y = np.asarray(y) - 26 if p==np.inf or p==1: - 27 return minkowski_distance_p(x,y,p) - 28 else: - 29 return minkowski_distance_p(x,y,p)**(1./p) -
30 -
31 -class Rectangle(object): -
32 """Hyperrectangle class. - 33 - 34 Represents a Cartesian product of intervals. - 35 """ -
36 - def __init__(self, maxes, mins): -
37 """Construct a hyperrectangle.""" - 38 self.maxes = np.maximum(maxes,mins).astype(np.float) - 39 self.mins = np.minimum(maxes,mins).astype(np.float) - 40 self.m, = self.maxes.shape -
41 -
42 - def __repr__(self): -
43 return "<Rectangle %s>" % zip(self.mins, self.maxes) -
44 -
45 - def volume(self): -
46 """Total volume.""" - 47 return np.prod(self.maxes-self.mins) -
48 -
49 - def split(self, d, split): -
50 """Produce two hyperrectangles by splitting along axis d. - 51 - 52 In general, if you need to compute maximum and minimum - 53 distances to the children, it can be done more efficiently - 54 by updating the maximum and minimum distances to the parent. - 55 """ # FIXME: do this - 56 mid = np.copy(self.maxes) - 57 mid[d] = split - 58 less = Rectangle(self.mins, mid) - 59 mid = np.copy(self.mins) - 60 mid[d] = split - 61 greater = Rectangle(mid, self.maxes) - 62 return less, greater -
63 -
64 - def min_distance_point(self, x, p=2.): -
65 """Compute the minimum distance between x and a point in the hyperrectangle.""" - 66 return minkowski_distance(0, np.maximum(0,np.maximum(self.mins-x,x-self.maxes)),p) -
67 -
68 - def max_distance_point(self, x, p=2.): -
69 """Compute the maximum distance between x and a point in the hyperrectangle.""" - 70 return minkowski_distance(0, np.maximum(self.maxes-x,x-self.mins),p) -
71 -
72 - def min_distance_rectangle(self, other, p=2.): -
73 """Compute the minimum distance between points in the two hyperrectangles.""" - 74 return minkowski_distance(0, np.maximum(0,np.maximum(self.mins-other.maxes,other.mins-self.maxes)),p) -
75 -
76 - def max_distance_rectangle(self, other, p=2.): -
77 """Compute the maximum distance between points in the two hyperrectangles.""" - 78 return minkowski_distance(0, np.maximum(self.maxes-other.mins,other.maxes-self.mins),p) -
79 - 80 -
81 -class KDTree(object): -
82 """kd-tree for quick nearest-neighbor lookup - 83 - 84 This class provides an index into a set of k-dimensional points - 85 which can be used to rapidly look up the nearest neighbors of any - 86 point. - 87 - 88 The algorithm used is described in Maneewongvatana and Mount 1999. - 89 The general idea is that the kd-tree is a binary trie, each of whose - 90 nodes represents an axis-aligned hyperrectangle. Each node specifies - 91 an axis and splits the set of points based on whether their coordinate - 92 along that axis is greater than or less than a particular value. - 93 - 94 During construction, the axis and splitting point are chosen by the - 95 "sliding midpoint" rule, which ensures that the cells do not all - 96 become long and thin. - 97 - 98 The tree can be queried for the r closest neighbors of any given point - 99 (optionally returning only those within some maximum distance of the -100 point). It can also be queried, with a substantial gain in efficiency, -101 for the r approximate closest neighbors. -102 -103 For large dimensions (20 is already large) do not expect this to run -104 significantly faster than brute force. High-dimensional nearest-neighbor -105 queries are a substantial open problem in computer science. -106 -107 The tree also supports all-neighbors queries, both with arrays of points -108 and with other kd-trees. These do use a reasonably efficient algorithm, -109 but the kd-tree is not necessarily the best data structure for this -110 sort of calculation. -111 """ -112 -
113 - def __init__(self, data, leafsize=10): -
114 """Construct a kd-tree. -115 -116 Parameters: -117 =========== -118 -119 data : array-like, shape (n,k) -120 The data points to be indexed. This array is not copied, and -121 so modifying this data will result in bogus results. -122 leafsize : positive integer -123 The number of points at which the algorithm switches over to -124 brute-force. -125 """ -126 self.data = np.asarray(data) -127 self.n, self.m = np.shape(self.data) -128 self.leafsize = int(leafsize) -129 if self.leafsize<1: -130 raise ValueError("leafsize must be at least 1") -131 self.maxes = np.amax(self.data,axis=0) -132 self.mins = np.amin(self.data,axis=0) -133 -134 self.tree = self.__build(np.arange(self.n), self.maxes, self.mins) -
135 -
136 - class node(object): -
137 pass -
138 - class leafnode(node): -
139 - def __init__(self, idx): -
140 self.idx = idx -141 self.children = len(idx) -
142 - class innernode(node): -
143 - def __init__(self, split_dim, split, less, greater): -
144 self.split_dim = split_dim -145 self.split = split -146 self.less = less -147 self.greater = greater -148 self.children = less.children+greater.children -
149 -
150 - def __build(self, idx, maxes, mins): -
151 if len(idx)<=self.leafsize: -152 return KDTree.leafnode(idx) -153 else: -154 data = self.data[idx] -155 #maxes = np.amax(data,axis=0) -156 #mins = np.amin(data,axis=0) -157 d = np.argmax(maxes-mins) -158 maxval = maxes[d] -159 minval = mins[d] -160 if maxval==minval: -161 # all points are identical; warn user? -162 return KDTree.leafnode(idx) -163 data = data[:,d] -164 -165 # sliding midpoint rule; see Maneewongvatana and Mount 1999 -166 # for arguments that this is a good idea. -167 split = (maxval+minval)/2 -168 less_idx = np.nonzero(data<=split)[0] -169 greater_idx = np.nonzero(data>split)[0] -170 if len(less_idx)==0: -171 split = np.amin(data) -172 less_idx = np.nonzero(data<=split)[0] -173 greater_idx = np.nonzero(data>split)[0] -174 if len(greater_idx)==0: -175 split = np.amax(data) -176 less_idx = np.nonzero(data<split)[0] -177 greater_idx = np.nonzero(data>=split)[0] -178 if len(less_idx)==0: -179 # _still_ zero? all must have the same value -180 assert np.all(data==data[0]), "Troublesome data array: %s" % data -181 split = data[0] -182 less_idx = np.arange(len(data)-1) -183 greater_idx = np.array([len(data)-1]) -184 -185 lessmaxes = np.copy(maxes) -186 lessmaxes[d] = split -187 greatermins = np.copy(mins) -188 greatermins[d] = split -189 return KDTree.innernode(d, split, -190 self.__build(idx[less_idx],lessmaxes,mins), -191 self.__build(idx[greater_idx],maxes,greatermins)) -
192 -
193 - def __query(self, x, k=1, eps=0, p=2, distance_upper_bound=np.inf): -
194 -195 side_distances = np.maximum(0,np.maximum(x-self.maxes,self.mins-x)) -196 if p!=np.inf: -197 side_distances**=p -198 min_distance = np.sum(side_distances) -199 else: -200 min_distance = np.amax(side_distances) -201 -202 # priority queue for chasing nodes -203 # entries are: -204 # minimum distance between the cell and the target -205 # distances between the nearest side of the cell and the target -206 # the head node of the cell -207 q = [(min_distance, -208 tuple(side_distances), -209 self.tree)] -210 # priority queue for the nearest neighbors -211 # furthest known neighbor first -212 # entries are (-distance**p, i) -213 neighbors = [] -214 -215 if eps==0: -216 epsfac=1 -217 elif p==np.inf: -218 epsfac = 1/(1+eps) -219 else: -220 epsfac = 1/(1+eps)**p -221 -222 if p!=np.inf and distance_upper_bound!=np.inf: -223 distance_upper_bound = distance_upper_bound**p -224 -225 while q: -226 min_distance, side_distances, node = heappop(q) -227 if isinstance(node, KDTree.leafnode): -228 # brute-force -229 data = self.data[node.idx] -230 ds = minkowski_distance_p(data,x[np.newaxis,:],p) -231 for i in range(len(ds)): -232 if ds[i]<distance_upper_bound: -233 if len(neighbors)==k: -234 heappop(neighbors) -235 heappush(neighbors, (-ds[i], node.idx[i])) -236 if len(neighbors)==k: -237 distance_upper_bound = -neighbors[0][0] -238 else: -239 # we don't push cells that are too far onto the queue at all, -240 # but since the distance_upper_bound decreases, we might get -241 # here even if the cell's too far -242 if min_distance>distance_upper_bound*epsfac: -243 # since this is the nearest cell, we're done, bail out -244 break -245 # compute minimum distances to the children and push them on -246 if x[node.split_dim]<node.split: -247 near, far = node.less, node.greater -248 else: -249 near, far = node.greater, node.less -250 -251 # near child is at the same distance as the current node -252 heappush(q,(min_distance, side_distances, near)) -253 -254 # far child is further by an amount depending only -255 # on the split value -256 sd = list(side_distances) -257 if p == np.inf: -258 min_distance = max(min_distance, abs(node.split-x[node.split_dim])) -259 elif p == 1: -260 sd[node.split_dim] = np.abs(node.split-x[node.split_dim]) -261 min_distance = min_distance - side_distances[node.split_dim] + sd[node.split_dim] -262 else: -263 sd[node.split_dim] = np.abs(node.split-x[node.split_dim])**p -264 min_distance = min_distance - side_distances[node.split_dim] + sd[node.split_dim] -265 -266 # far child might be too far, if so, don't bother pushing it -267 if min_distance<=distance_upper_bound*epsfac: -268 heappush(q,(min_distance, tuple(sd), far)) -269 -270 if p==np.inf: -271 return sorted([(-d,i) for (d,i) in neighbors]) -272 else: -273 return sorted([((-d)**(1./p),i) for (d,i) in neighbors]) -
274 -
275 - def query(self, x, k=1, eps=0, p=2, distance_upper_bound=np.inf): -
276 """query the kd-tree for nearest neighbors -277 -278 Parameters: -279 =========== -280 -281 x : array-like, last dimension self.m -282 An array of points to query. -283 k : integer -284 The number of nearest neighbors to return. -285 eps : nonnegative float -286 Return approximate nearest neighbors; the kth returned value -287 is guaranteed to be no further than (1+eps) times the -288 distance to the real kth nearest neighbor. -289 p : float, 1<=p<=infinity -290 Which Minkowski p-norm to use. -291 1 is the sum-of-absolute-values "Manhattan" distance -292 2 is the usual Euclidean distance -293 infinity is the maximum-coordinate-difference distance -294 distance_upper_bound : nonnegative float -295 Return only neighbors within this distance. This is used to prune -296 tree searches, so if you are doing a series of nearest-neighbor -297 queries, it may help to supply the distance to the nearest neighbor -298 of the most recent point. -299 -300 Returns: -301 ======== -302 -303 d : array of floats -304 The distances to the nearest neighbors. -305 If x has shape tuple+(self.m,), then d has shape tuple if -306 k is one, or tuple+(k,) if k is larger than one. Missing -307 neighbors are indicated with infinite distances. If k is None, -308 then d is an object array of shape tuple, containing lists -309 of distances. In either case the hits are sorted by distance -310 (nearest first). -311 i : array of integers -312 The locations of the neighbors in self.data. i is the same -313 shape as d. -314 """ -315 x = np.asarray(x) -316 if np.shape(x)[-1] != self.m: -317 raise ValueError("x must consist of vectors of length %d but has shape %s" % (self.m, np.shape(x))) -318 if p<1: -319 raise ValueError("Only p-norms with 1<=p<=infinity permitted") -320 retshape = np.shape(x)[:-1] -321 if retshape!=(): -322 if k>1: -323 dd = np.empty(retshape+(k,),dtype=np.float) -324 dd.fill(np.inf) -325 ii = np.empty(retshape+(k,),dtype=np.int) -326 ii.fill(self.n) -327 elif k==1: -328 dd = np.empty(retshape,dtype=np.float) -329 dd.fill(np.inf) -330 ii = np.empty(retshape,dtype=np.int) -331 ii.fill(self.n) -332 elif k is None: -333 dd = np.empty(retshape,dtype=np.object) -334 ii = np.empty(retshape,dtype=np.object) -335 else: -336 raise ValueError("Requested %s nearest neighbors; acceptable numbers are integers greater than or equal to one, or None") -337 for c in np.ndindex(retshape): -338 hits = self.__query(x[c], k=k, p=p, distance_upper_bound=distance_upper_bound) -339 if k>1: -340 for j in range(len(hits)): -341 dd[c+(j,)], ii[c+(j,)] = hits[j] -342 elif k==1: -343 if len(hits)>0: -344 dd[c], ii[c] = hits[0] -345 else: -346 dd[c] = np.inf -347 ii[c] = self.n -348 elif k is None: -349 dd[c] = [d for (d,i) in hits] -350 ii[c] = [i for (d,i) in hits] -351 return dd, ii -352 else: -353 hits = self.__query(x, k=k, p=p, distance_upper_bound=distance_upper_bound) -354 if k==1: -355 if len(hits)>0: -356 return hits[0] -357 else: -358 return np.inf, self.n -359 elif k>1: -360 dd = np.empty(k,dtype=np.float) -361 dd.fill(np.inf) -362 ii = np.empty(k,dtype=np.int) -363 ii.fill(self.n) -364 for j in range(len(hits)): -365 dd[j], ii[j] = hits[j] -366 return dd, ii -367 elif k is None: -368 return [d for (d,i) in hits], [i for (d,i) in hits] -369 else: -370 raise ValueError("Requested %s nearest neighbors; acceptable numbers are integers greater than or equal to one, or None") -
371 -372 -
373 - def __query_ball_point(self, x, r, p=2., eps=0): -
374 R = Rectangle(self.maxes, self.mins) -375 -376 def traverse_checking(node, rect): -377 if rect.min_distance_point(x,p)>=r/(1.+eps): -378 return [] -379 elif rect.max_distance_point(x,p)<r*(1.+eps): -380 return traverse_no_checking(node) -381 elif isinstance(node, KDTree.leafnode): -382 d = self.data[node.idx] -383 return node.idx[minkowski_distance(d,x,p)<=r].tolist() -384 else: -385 less, greater = rect.split(node.split_dim, node.split) -386 return traverse_checking(node.less, less)+traverse_checking(node.greater, greater) -
387 def traverse_no_checking(node): -388 if isinstance(node, KDTree.leafnode): -389 -390 return node.idx.tolist() -391 else: -392 return traverse_no_checking(node.less)+traverse_no_checking(node.greater) -
393 -394 return traverse_checking(self.tree, R) -395 -
396 - def query_ball_point(self, x, r, p=2., eps=0): -
397 """Find all points within r of x -398 -399 Parameters -400 ========== -401 -402 x : array_like, shape tuple + (self.m,) -403 The point or points to search for neighbors of -404 r : positive float -405 The radius of points to return -406 p : float 1<=p<=infinity -407 Which Minkowski p-norm to use -408 eps : nonnegative float -409 Approximate search. Branches of the tree are not explored -410 if their nearest points are further than r/(1+eps), and branches -411 are added in bulk if their furthest points are nearer than r*(1+eps). -412 -413 Returns -414 ======= -415 -416 results : list or array of lists -417 If x is a single point, returns a list of the indices of the neighbors -418 of x. If x is an array of points, returns an object array of shape tuple -419 containing lists of neighbors. -420 -421 -422 Note: if you have many points whose neighbors you want to find, you may save -423 substantial amounts of time by putting them in a KDTree and using query_ball_tree. -424 """ -425 x = np.asarray(x) -426 if x.shape[-1]!=self.m: -427 raise ValueError("Searching for a %d-dimensional point in a %d-dimensional KDTree" % (x.shape[-1],self.m)) -428 if len(x.shape)==1: -429 return self.__query_ball_point(x,r,p,eps) -430 else: -431 retshape = x.shape[:-1] -432 result = np.empty(retshape,dtype=np.object) -433 for c in np.ndindex(retshape): -434 result[c] = self.__query_ball_point(x[c], r, p=p, eps=eps) -435 return result -
436 -
437 - def query_ball_tree(self, other, r, p=2., eps=0): -
438 """Find all pairs of points whose distance is at most r -439 -440 Parameters -441 ========== -442 -443 other : KDTree -444 The tree containing points to search against -445 r : positive float -446 The maximum distance -447 p : float 1<=p<=infinity -448 Which Minkowski norm to use -449 eps : nonnegative float -450 Approximate search. Branches of the tree are not explored -451 if their nearest points are further than r/(1+eps), and branches -452 are added in bulk if their furthest points are nearer than r*(1+eps). -453 -454 Returns -455 ======= -456 -457 results : list of lists -458 For each element self.data[i] of this tree, results[i] is a list of the -459 indices of its neighbors in other.data. -460 """ -461 results = [[] for i in range(self.n)] -462 def traverse_checking(node1, rect1, node2, rect2): -463 if rect1.min_distance_rectangle(rect2, p)>r/(1.+eps): -464 return -465 elif rect1.max_distance_rectangle(rect2, p)<r*(1.+eps): -466 traverse_no_checking(node1, node2) -467 elif isinstance(node1, KDTree.leafnode): -468 if isinstance(node2, KDTree.leafnode): -469 d = other.data[node2.idx] -470 for i in node1.idx: -471 results[i] += node2.idx[minkowski_distance(d,self.data[i],p)<=r].tolist() -472 else: -473 less, greater = rect2.split(node2.split_dim, node2.split) -474 traverse_checking(node1,rect1,node2.less,less) -475 traverse_checking(node1,rect1,node2.greater,greater) -476 elif isinstance(node2, KDTree.leafnode): -477 less, greater = rect1.split(node1.split_dim, node1.split) -478 traverse_checking(node1.less,less,node2,rect2) -479 traverse_checking(node1.greater,greater,node2,rect2) -480 else: -481 less1, greater1 = rect1.split(node1.split_dim, node1.split) -482 less2, greater2 = rect2.split(node2.split_dim, node2.split) -483 traverse_checking(node1.less,less1,node2.less,less2) -484 traverse_checking(node1.less,less1,node2.greater,greater2) -485 traverse_checking(node1.greater,greater1,node2.less,less2) -486 traverse_checking(node1.greater,greater1,node2.greater,greater2) -
487 -488 def traverse_no_checking(node1, node2): -489 if isinstance(node1, KDTree.leafnode): -490 if isinstance(node2, KDTree.leafnode): -491 for i in node1.idx: -492 results[i] += node2.idx.tolist() -493 else: -494 traverse_no_checking(node1, node2.less) -495 traverse_no_checking(node1, node2.greater) -496 else: -497 traverse_no_checking(node1.less, node2) -498 traverse_no_checking(node1.greater, node2) -499 -500 traverse_checking(self.tree, Rectangle(self.maxes, self.mins), -501 other.tree, Rectangle(other.maxes, other.mins)) -502 return results -503 -504 -
505 - def count_neighbors(self, other, r, p=2.): -
506 """Count how many nearby pairs can be formed. -507 -508 Count the number of pairs (x1,x2) can be formed, with x1 drawn -509 from self and x2 drawn from other, and where distance(x1,x2,p)<=r. -510 This is the "two-point correlation" described in Gray and Moore 2000, -511 "N-body problems in statistical learning", and the code here is based -512 on their algorithm. -513 -514 Parameters -515 ========== -516 -517 other : KDTree -518 -519 r : float or one-dimensional array of floats -520 The radius to produce a count for. Multiple radii are searched with a single -521 tree traversal. -522 p : float, 1<=p<=infinity -523 Which Minkowski p-norm to use -524 -525 Returns -526 ======= -527 -528 result : integer or one-dimensional array of integers -529 The number of pairs. Note that this is internally stored in a numpy int, -530 and so may overflow if very large (two billion). -531 """ -532 -533 def traverse(node1, rect1, node2, rect2, idx): -534 min_r = rect1.min_distance_rectangle(rect2,p) -535 max_r = rect1.max_distance_rectangle(rect2,p) -536 c_greater = r[idx]>max_r -537 result[idx[c_greater]] += node1.children*node2.children -538 idx = idx[(min_r<=r[idx]) & (r[idx]<=max_r)] -539 if len(idx)==0: -540 return -541 -542 if isinstance(node1,KDTree.leafnode): -543 if isinstance(node2,KDTree.leafnode): -544 ds = minkowski_distance(self.data[node1.idx][:,np.newaxis,:], -545 other.data[node2.idx][np.newaxis,:,:], -546 p).ravel() -547 ds.sort() -548 result[idx] += np.searchsorted(ds,r[idx],side='right') -549 else: -550 less, greater = rect2.split(node2.split_dim, node2.split) -551 traverse(node1, rect1, node2.less, less, idx) -552 traverse(node1, rect1, node2.greater, greater, idx) -553 else: -554 if isinstance(node2,KDTree.leafnode): -555 less, greater = rect1.split(node1.split_dim, node1.split) -556 traverse(node1.less, less, node2, rect2, idx) -557 traverse(node1.greater, greater, node2, rect2, idx) -558 else: -559 less1, greater1 = rect1.split(node1.split_dim, node1.split) -560 less2, greater2 = rect2.split(node2.split_dim, node2.split) -561 traverse(node1.less,less1,node2.less,less2,idx) -562 traverse(node1.less,less1,node2.greater,greater2,idx) -563 traverse(node1.greater,greater1,node2.less,less2,idx) -564 traverse(node1.greater,greater1,node2.greater,greater2,idx) -
565 R1 = Rectangle(self.maxes, self.mins) -566 R2 = Rectangle(other.maxes, other.mins) -567 if np.shape(r) == (): -568 r = np.array([r]) -569 result = np.zeros(1,dtype=int) -570 traverse(self.tree, R1, other.tree, R2, np.arange(1)) -571 return result[0] -572 elif len(np.shape(r))==1: -573 r = np.asarray(r) -574 n, = r.shape -575 result = np.zeros(n,dtype=int) -576 traverse(self.tree, R1, other.tree, R2, np.arange(n)) -577 return result -578 else: -579 raise ValueError("r must be either a single value or a one-dimensional array of values") -580 -581 ############################################################### -582 # Commented this routine out because of scipy.sparse dependence -583 # -rdh -584 ############################################################### -585 # def sparse_distance_matrix(self, other, max_distance, p=2.): -586 # """Compute a sparse distance matrix -587 # -588 # Computes a distance matrix between two KDTrees, leaving as zero -589 # any distance greater than max_distance. -590 # -591 # Parameters -592 # ========== -593 # -594 # other : KDTree -595 # -596 # max_distance : positive float -597 # -598 # Returns -599 # ======= -600 # -601 # result : dok_matrix -602 # Sparse matrix representing the results in "dictionary of keys" format. -603 # """ -604 # result = scipy.sparse.dok_matrix((self.n,other.n)) -605 # -606 # def traverse(node1, rect1, node2, rect2): -607 # if rect1.min_distance_rectangle(rect2, p)>max_distance: -608 # return -609 # elif isinstance(node1, KDTree.leafnode): -610 # if isinstance(node2, KDTree.leafnode): -611 # for i in node1.idx: -612 # for j in node2.idx: -613 # d = minkowski_distance(self.data[i],other.data[j],p) -614 # if d<=max_distance: -615 # result[i,j] = d -616 # else: -617 # less, greater = rect2.split(node2.split_dim, node2.split) -618 # traverse(node1,rect1,node2.less,less) -619 # traverse(node1,rect1,node2.greater,greater) -620 # elif isinstance(node2, KDTree.leafnode): -621 # less, greater = rect1.split(node1.split_dim, node1.split) -622 # traverse(node1.less,less,node2,rect2) -623 # traverse(node1.greater,greater,node2,rect2) -624 # else: -625 # less1, greater1 = rect1.split(node1.split_dim, node1.split) -626 # less2, greater2 = rect2.split(node2.split_dim, node2.split) -627 # traverse(node1.less,less1,node2.less,less2) -628 # traverse(node1.less,less1,node2.greater,greater2) -629 # traverse(node1.greater,greater1,node2.less,less2) -630 # traverse(node1.greater,greater1,node2.greater,greater2) -631 # traverse(self.tree, Rectangle(self.maxes, self.mins), -632 # other.tree, Rectangle(other.maxes, other.mins)) -633 # -634 # return result -635 -636 -
637 -def distance_matrix(x,y,p=2,threshold=1000000): -
638 """Compute the distance matrix. -639 -640 Computes the matrix of all pairwise distances. -641 -642 Parameters -643 ========== -644 -645 x : array-like, m by k -646 y : array-like, n by k -647 p : float 1<=p<=infinity -648 Which Minkowski p-norm to use. -649 threshold : positive integer -650 If m*n*k>threshold use a python loop instead of creating -651 a very large temporary. -652 -653 Returns -654 ======= -655 -656 result : array-like, m by n -657 -658 -659 """ -660 -661 x = np.asarray(x) -662 m, k = x.shape -663 y = np.asarray(y) -664 n, kk = y.shape -665 -666 if k != kk: -667 raise ValueError("x contains %d-dimensional vectors but y contains %d-dimensional vectors" % (k, kk)) -668 -669 if m*n*k <= threshold: -670 return minkowski_distance(x[:,np.newaxis,:],y[np.newaxis,:,:],p) -671 else: -672 result = np.empty((m,n),dtype=np.float) #FIXME: figure out the best dtype -673 if m<n: -674 for i in range(m): -675 result[i,:] = minkowski_distance(x[i],y,p) -676 else: -677 for j in range(n): -678 result[:,j] = minkowski_distance(x,y[j],p) -679 return result -
680 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.extern.kdtree.KDTree-class.html b/pyroms/docs/pyroms.extern.kdtree.KDTree-class.html deleted file mode 100644 index 3042cc3..0000000 --- a/pyroms/docs/pyroms.extern.kdtree.KDTree-class.html +++ /dev/null @@ -1,665 +0,0 @@ - - - - - pyroms.extern.kdtree.KDTree - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Package extern :: - Module kdtree :: - Class KDTree - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Class KDTree

source code

-
-object --+
-         |
-        KDTree
-
- -
-

kd-tree for quick nearest-neighbor lookup

-

This class provides an index into a set of k-dimensional points which - can be used to rapidly look up the nearest neighbors of any point.

-

The algorithm used is described in Maneewongvatana and Mount 1999. The - general idea is that the kd-tree is a binary trie, each of whose nodes - represents an axis-aligned hyperrectangle. Each node specifies an axis - and splits the set of points based on whether their coordinate along that - axis is greater than or less than a particular value.

-

During construction, the axis and splitting point are chosen by the - "sliding midpoint" rule, which ensures that the cells do not - all become long and thin.

-

The tree can be queried for the r closest neighbors of any given point - (optionally returning only those within some maximum distance of the - point). It can also be queried, with a substantial gain in efficiency, - for the r approximate closest neighbors.

-

For large dimensions (20 is already large) do not expect this to run - significantly faster than brute force. High-dimensional nearest-neighbor - queries are a substantial open problem in computer science.

-

The tree also supports all-neighbors queries, both with arrays of - points and with other kd-trees. These do use a reasonably efficient - algorithm, but the kd-tree is not necessarily the best data structure for - this sort of calculation.

- - - - - - - - - - - - - - - - -
- - - - - -
Nested Classes[hide private]
-
-   - - node -
-   - - leafnode -
-   - - innernode -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - -
Instance Methods[hide private]
-
-   - - - - - - -
__init__(self, - data, - leafsize=10)
- Construct a kd-tree.
- source code - -
- -
-   - - - - - - -
__build(self, - idx, - maxes, - mins) - source code - -
- -
-   - - - - - - -
__query(self, - x, - k=1, - eps=0, - p=2, - distance_upper_bound=inf) - source code - -
- -
-   - - - - - - -
query(self, - x, - k=1, - eps=0, - p=2, - distance_upper_bound=inf)
- query the kd-tree for nearest neighbors
- source code - -
- -
-   - - - - - - -
__query_ball_point(self, - x, - r, - p=2.0, - eps=0) - source code - -
- -
-   - - - - - - -
query_ball_point(self, - x, - r, - p=2.0, - eps=0)
- Find all points within r of x
- source code - -
- -
-   - - - - - - -
query_ball_tree(self, - other, - r, - p=2.0, - eps=0)
- Find all pairs of points whose distance is at most r
- source code - -
- -
-   - - - - - - -
count_neighbors(self, - other, - r, - p=2.0)
- Count how many nearby pairs can be formed.
- source code - -
- -
-

Inherited from object: - __delattr__, - __getattribute__, - __hash__, - __new__, - __reduce__, - __reduce_ex__, - __repr__, - __setattr__, - __str__ -

-
- - - - - - - - - -
- - - - - -
Properties[hide private]
-
-

Inherited from object: - __class__ -

-
- - - - - - -
- - - - - -
Method Details[hide private]
-
- -
- -
- - -
-

__init__(self, - data, - leafsize=10) -
(Constructor) -

-
source code  -
- -
-Construct a kd-tree.
-
-Parameters:
-===========
-
-data : array-like, shape (n,k)
-    The data points to be indexed. This array is not copied, and
-    so modifying this data will result in bogus results.
-leafsize : positive integer
-    The number of points at which the algorithm switches over to
-    brute-force.
-
-
-
-
Overrides: - object.__init__ -
-
-
-
- -
- -
- - -
-

query(self, - x, - k=1, - eps=0, - p=2, - distance_upper_bound=inf) -

-
source code  -
- -
-query the kd-tree for nearest neighbors
-
-Parameters:
-===========
-
-x : array-like, last dimension self.m
-    An array of points to query.
-k : integer
-    The number of nearest neighbors to return.
-eps : nonnegative float
-    Return approximate nearest neighbors; the kth returned value
-    is guaranteed to be no further than (1+eps) times the
-    distance to the real kth nearest neighbor.
-p : float, 1<=p<=infinity
-    Which Minkowski p-norm to use.
-    1 is the sum-of-absolute-values "Manhattan" distance
-    2 is the usual Euclidean distance
-    infinity is the maximum-coordinate-difference distance
-distance_upper_bound : nonnegative float
-    Return only neighbors within this distance. This is used to prune
-    tree searches, so if you are doing a series of nearest-neighbor
-    queries, it may help to supply the distance to the nearest neighbor
-    of the most recent point.
-
-Returns:
-========
-
-d : array of floats
-    The distances to the nearest neighbors.
-    If x has shape tuple+(self.m,), then d has shape tuple if
-    k is one, or tuple+(k,) if k is larger than one.  Missing
-    neighbors are indicated with infinite distances.  If k is None,
-    then d is an object array of shape tuple, containing lists
-    of distances. In either case the hits are sorted by distance
-    (nearest first).
-i : array of integers
-    The locations of the neighbors in self.data. i is the same
-    shape as d.
-
-
-
-
-
-
- -
- -
- - -
-

query_ball_point(self, - x, - r, - p=2.0, - eps=0) -

-
source code  -
- -
-Find all points within r of x
-
-Parameters
-==========
-
-x : array_like, shape tuple + (self.m,)
-    The point or points to search for neighbors of
-r : positive float
-    The radius of points to return
-p : float 1<=p<=infinity
-    Which Minkowski p-norm to use
-eps : nonnegative float
-    Approximate search. Branches of the tree are not explored
-    if their nearest points are further than r/(1+eps), and branches
-    are added in bulk if their furthest points are nearer than r*(1+eps).
-
-Returns
-=======
-
-results : list or array of lists
-    If x is a single point, returns a list of the indices of the neighbors
-    of x. If x is an array of points, returns an object array of shape tuple
-    containing lists of neighbors.
-
-
-Note: if you have many points whose neighbors you want to find, you may save
-substantial amounts of time by putting them in a KDTree and using query_ball_tree.
-
-
-
-
-
-
- -
- -
- - -
-

query_ball_tree(self, - other, - r, - p=2.0, - eps=0) -

-
source code  -
- -
-Find all pairs of points whose distance is at most r
-
-Parameters
-==========
-
-other : KDTree
-    The tree containing points to search against
-r : positive float
-    The maximum distance
-p : float 1<=p<=infinity
-    Which Minkowski norm to use
-eps : nonnegative float
-    Approximate search. Branches of the tree are not explored
-    if their nearest points are further than r/(1+eps), and branches
-    are added in bulk if their furthest points are nearer than r*(1+eps).
-
-Returns
-=======
-
-results : list of lists
-    For each element self.data[i] of this tree, results[i] is a list of the
-    indices of its neighbors in other.data.
-
-
-
-
-
-
- -
- -
- - -
-

count_neighbors(self, - other, - r, - p=2.0) -

-
source code  -
- -
-Count how many nearby pairs can be formed.
-
-Count the number of pairs (x1,x2) can be formed, with x1 drawn
-from self and x2 drawn from other, and where distance(x1,x2,p)<=r.
-This is the "two-point correlation" described in Gray and Moore 2000,
-"N-body problems in statistical learning", and the code here is based
-on their algorithm.
-
-Parameters
-==========
-
-other : KDTree
-
-r : float or one-dimensional array of floats
-    The radius to produce a count for. Multiple radii are searched with a single
-    tree traversal.
-p : float, 1<=p<=infinity
-    Which Minkowski p-norm to use
-
-Returns
-=======
-
-result : integer or one-dimensional array of integers
-    The number of pairs. Note that this is internally stored in a numpy int,
-    and so may overflow if very large (two billion).
-
-
-
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.extern.kdtree.KDTree.innernode-class.html b/pyroms/docs/pyroms.extern.kdtree.KDTree.innernode-class.html deleted file mode 100644 index b0d40b4..0000000 --- a/pyroms/docs/pyroms.extern.kdtree.KDTree.innernode-class.html +++ /dev/null @@ -1,243 +0,0 @@ - - - - - pyroms.extern.kdtree.KDTree.innernode - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Package extern :: - Module kdtree :: - Class KDTree :: - Class innernode - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Class innernode

source code

-
- object --+    
-          |    
-KDTree.node --+
-              |
-             KDTree.innernode
-
- -
- - - - - - - - - - - - -
- - - - - -
Instance Methods[hide private]
-
-   - - - - - - -
__init__(self, - split_dim, - split, - less, - greater)
- x.__init__(...) initializes x; see x.__class__.__doc__ for signature
- source code - -
- -
-

Inherited from object: - __delattr__, - __getattribute__, - __hash__, - __new__, - __reduce__, - __reduce_ex__, - __repr__, - __setattr__, - __str__ -

-
- - - - - - - - - -
- - - - - -
Properties[hide private]
-
-

Inherited from object: - __class__ -

-
- - - - - - -
- - - - - -
Method Details[hide private]
-
- -
- -
- - -
-

__init__(self, - split_dim, - split, - less, - greater) -
(Constructor) -

-
source code  -
- -

x.__init__(...) initializes x; see x.__class__.__doc__ for - signature

-
-
Overrides: - object.__init__ -
(inherited documentation)
- -
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.extern.kdtree.KDTree.leafnode-class.html b/pyroms/docs/pyroms.extern.kdtree.KDTree.leafnode-class.html deleted file mode 100644 index b9ababa..0000000 --- a/pyroms/docs/pyroms.extern.kdtree.KDTree.leafnode-class.html +++ /dev/null @@ -1,237 +0,0 @@ - - - - - pyroms.extern.kdtree.KDTree.leafnode - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Package extern :: - Module kdtree :: - Class KDTree :: - Class leafnode - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Class leafnode

source code

-
- object --+    
-          |    
-KDTree.node --+
-              |
-             KDTree.leafnode
-
- -
- - - - - - - - - - - - -
- - - - - -
Instance Methods[hide private]
-
-   - - - - - - -
__init__(self, - idx)
- x.__init__(...) initializes x; see x.__class__.__doc__ for signature
- source code - -
- -
-

Inherited from object: - __delattr__, - __getattribute__, - __hash__, - __new__, - __reduce__, - __reduce_ex__, - __repr__, - __setattr__, - __str__ -

-
- - - - - - - - - -
- - - - - -
Properties[hide private]
-
-

Inherited from object: - __class__ -

-
- - - - - - -
- - - - - -
Method Details[hide private]
-
- -
- -
- - -
-

__init__(self, - idx) -
(Constructor) -

-
source code  -
- -

x.__init__(...) initializes x; see x.__class__.__doc__ for - signature

-
-
Overrides: - object.__init__ -
(inherited documentation)
- -
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.extern.kdtree.KDTree.node-class.html b/pyroms/docs/pyroms.extern.kdtree.KDTree.node-class.html deleted file mode 100644 index 91b45ba..0000000 --- a/pyroms/docs/pyroms.extern.kdtree.KDTree.node-class.html +++ /dev/null @@ -1,179 +0,0 @@ - - - - - pyroms.extern.kdtree.KDTree.node - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Package extern :: - Module kdtree :: - Class KDTree :: - Class node - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Class node

source code

-
-object --+
-         |
-        KDTree.node
-
- -
Known Subclasses:
-
- -
- -
- - - - - - - - - -
- - - - - -
Instance Methods[hide private]
-
-

Inherited from object: - __delattr__, - __getattribute__, - __hash__, - __init__, - __new__, - __reduce__, - __reduce_ex__, - __repr__, - __setattr__, - __str__ -

-
- - - - - - - - - -
- - - - - -
Properties[hide private]
-
-

Inherited from object: - __class__ -

-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.extern.kdtree.Rectangle-class.html b/pyroms/docs/pyroms.extern.kdtree.Rectangle-class.html deleted file mode 100644 index 06be2a3..0000000 --- a/pyroms/docs/pyroms.extern.kdtree.Rectangle-class.html +++ /dev/null @@ -1,417 +0,0 @@ - - - - - pyroms.extern.kdtree.Rectangle - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Package extern :: - Module kdtree :: - Class Rectangle - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Class Rectangle

source code

-
-object --+
-         |
-        Rectangle
-
- -
-

Hyperrectangle class.

-

Represents a Cartesian product of intervals.

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - -
Instance Methods[hide private]
-
-   - - - - - - -
__init__(self, - maxes, - mins)
- Construct a hyperrectangle.
- source code - -
- -
-   - - - - - - -
__repr__(self)
- repr(x)
- source code - -
- -
-   - - - - - - -
volume(self)
- Total volume.
- source code - -
- -
-   - - - - - - -
split(self, - d, - split)
- Produce two hyperrectangles by splitting along axis d.
- source code - -
- -
-   - - - - - - -
min_distance_point(self, - x, - p=2.0)
- Compute the minimum distance between x and a point in the - hyperrectangle.
- source code - -
- -
-   - - - - - - -
max_distance_point(self, - x, - p=2.0)
- Compute the maximum distance between x and a point in the - hyperrectangle.
- source code - -
- -
-   - - - - - - -
min_distance_rectangle(self, - other, - p=2.0)
- Compute the minimum distance between points in the two - hyperrectangles.
- source code - -
- -
-   - - - - - - -
max_distance_rectangle(self, - other, - p=2.0)
- Compute the maximum distance between points in the two - hyperrectangles.
- source code - -
- -
-

Inherited from object: - __delattr__, - __getattribute__, - __hash__, - __new__, - __reduce__, - __reduce_ex__, - __setattr__, - __str__ -

-
- - - - - - - - - -
- - - - - -
Properties[hide private]
-
-

Inherited from object: - __class__ -

-
- - - - - - -
- - - - - -
Method Details[hide private]
-
- -
- -
- - -
-

__init__(self, - maxes, - mins) -
(Constructor) -

-
source code  -
- -

Construct a hyperrectangle.

-
-
Overrides: - object.__init__ -
-
-
-
- -
- -
- - -
-

__repr__(self) -
(Representation operator) -

-
source code  -
- -

repr(x)

-
-
Overrides: - object.__repr__ -
(inherited documentation)
- -
-
-
- -
- -
- - -
-

split(self, - d, - split) -

-
source code  -
- -

Produce two hyperrectangles by splitting along axis d.

-

In general, if you need to compute maximum and minimum distances to - the children, it can be done more efficiently by updating the maximum and - minimum distances to the parent.

-
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.extern.pupynere-module.html b/pyroms/docs/pyroms.extern.pupynere-module.html deleted file mode 100644 index c7239d7..0000000 --- a/pyroms/docs/pyroms.extern.pupynere-module.html +++ /dev/null @@ -1,275 +0,0 @@ - - - - - pyroms.extern.pupynere - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Package extern :: - Module pupynere - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module pupynere

source code

-

NetCDF reader.

-

Pupynere implements a PUre PYthon NEtcdf REader.

- -
-

Author: - Roberto De Almeida <rob@pydap.org> -

-
- - - - - - - - - - - -
- - - - - -
Classes[hide private]
-
-   - - NetCDFFile
- A NetCDF file parser. -
-   - - NetCDFVariable -
- - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
_test() - source code - -
- -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - -
Variables[hide private]
-
-   - - ABSENT = '\x00\x00\x00\x00\x00\x00\x00\x00' -
-   - - ZERO = '\x00\x00\x00\x00' -
-   - - NC_BYTE = '\x00\x00\x00\x01' -
-   - - NC_CHAR = '\x00\x00\x00\x02' -
-   - - NC_SHORT = '\x00\x00\x00\x03' -
-   - - NC_INT = '\x00\x00\x00\x04' -
-   - - NC_FLOAT = '\x00\x00\x00\x05' -
-   - - NC_DOUBLE = '\x00\x00\x00\x06' -
-   - - NC_DIMENSION = '\x00\x00\x00\n' -
-   - - NC_VARIABLE = '\x00\x00\x00\x0b' -
-   - - NC_ATTRIBUTE = '\x00\x00\x00\x0c' -
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.extern.pupynere-pysrc.html b/pyroms/docs/pyroms.extern.pupynere-pysrc.html deleted file mode 100644 index 8d08cb5..0000000 --- a/pyroms/docs/pyroms.extern.pupynere-pysrc.html +++ /dev/null @@ -1,365 +0,0 @@ - - - - - pyroms.extern.pupynere - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Package extern :: - Module pupynere - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms.extern.pupynere

-
-  1  """NetCDF reader. 
-  2   
-  3  Pupynere implements a PUre PYthon NEtcdf REader. 
-  4  """ 
-  5   
-  6  __author__ = "Roberto De Almeida <rob@pydap.org>" 
-  7   
-  8   
-  9  import struct 
- 10  import itertools 
- 11  import mmap 
- 12   
- 13  from numpy import ndarray, zeros, array 
- 14   
- 15   
- 16  ABSENT       = '\x00' * 8 
- 17  ZERO         = '\x00' * 4 
- 18  NC_BYTE      = '\x00\x00\x00\x01'  
- 19  NC_CHAR      = '\x00\x00\x00\x02' 
- 20  NC_SHORT     = '\x00\x00\x00\x03' 
- 21  NC_INT       = '\x00\x00\x00\x04' 
- 22  NC_FLOAT     = '\x00\x00\x00\x05' 
- 23  NC_DOUBLE    = '\x00\x00\x00\x06' 
- 24  NC_DIMENSION = '\x00\x00\x00\n' 
- 25  NC_VARIABLE  = '\x00\x00\x00\x0b' 
- 26  NC_ATTRIBUTE = '\x00\x00\x00\x0c' 
- 27   
- 28   
-
29 -class NetCDFFile(object): -
30 """A NetCDF file parser.""" - 31 -
32 - def __init__(self, file): -
33 self._buffer = open(file, 'rb') - 34 self._parse() -
35 -
36 - def read(self, size=-1): -
37 """Alias for reading the file buffer.""" - 38 return self._buffer.read(size) -
39 -
40 - def _parse(self): -
41 """Initial parsing of the header.""" - 42 # Check magic bytes. - 43 assert self.read(3) == 'CDF' - 44 - 45 # Read version byte. - 46 byte = self.read(1) - 47 self.version_byte = struct.unpack('>b', byte)[0] - 48 - 49 # Read header info. - 50 self._numrecs() - 51 self._dim_array() - 52 self._gatt_array() - 53 self._var_array() -
54 -
55 - def _numrecs(self): -
56 """Read number of records.""" - 57 self._nrecs = self._unpack_int() -
58 -
59 - def _dim_array(self): -
60 """Read a dict with dimensions names and sizes.""" - 61 assert self.read(4) in [ZERO, NC_DIMENSION] - 62 count = self._unpack_int() - 63 - 64 self.dimensions = {} - 65 self._dims = [] - 66 for dim in range(count): - 67 name = self._read_string() - 68 length = self._unpack_int() - 69 if length == 0: length = None # record dimension - 70 self.dimensions[name] = length - 71 self._dims.append(name) # preserve dim order -
72 -
73 - def _gatt_array(self): -
74 """Read global attributes.""" - 75 self.attributes = self._att_array() - 76 - 77 # Update __dict__ for compatibility with S.IO.N - 78 self.__dict__.update(self.attributes) -
79 -
80 - def _att_array(self): -
81 """Read a dict with attributes.""" - 82 assert self.read(4) in [ZERO, NC_ATTRIBUTE] - 83 count = self._unpack_int() - 84 - 85 # Read attributes. - 86 attributes = {} - 87 for attribute in range(count): - 88 name = self._read_string() - 89 nc_type = self._unpack_int() - 90 n = self._unpack_int() - 91 - 92 # Read value for attributes. - 93 attributes[name] = self._read_values(n, nc_type) - 94 - 95 return attributes -
96 -
97 - def _var_array(self): -
98 """Read all variables.""" - 99 assert self.read(4) in [ZERO, NC_VARIABLE] -100 -101 # Read size of each record, in bytes. -102 self._read_recsize() -103 -104 # Read variables. -105 self.variables = {} -106 count = self._unpack_int() -107 for variable in range(count): -108 name = self._read_string() -109 self.variables[name] = self._read_var() -
110 -
111 - def _read_recsize(self): -
112 """Read all variables and compute record bytes.""" -113 pos = self._buffer.tell() -114 -115 recsize = 0 -116 count = self._unpack_int() -117 for variable in range(count): -118 name = self._read_string() -119 n = self._unpack_int() -120 isrec = False -121 for i in range(n): -122 dimid = self._unpack_int() -123 name = self._dims[dimid] -124 dim = self.dimensions[name] -125 if dim is None and i == 0: -126 isrec = True -127 attributes = self._att_array() -128 nc_type = self._unpack_int() -129 vsize = self._unpack_int() -130 begin = [self._unpack_int, self._unpack_int64][self.version_byte-1]() -131 -132 if isrec: recsize += vsize -133 -134 self._recsize = recsize -135 self._buffer.seek(pos) -
136 -
137 - def _read_var(self): -
138 dimensions = [] -139 shape = [] -140 n = self._unpack_int() -141 isrec = False -142 for i in range(n): -143 dimid = self._unpack_int() -144 name = self._dims[dimid] -145 dimensions.append(name) -146 dim = self.dimensions[name] -147 if dim is None and i == 0: -148 dim = self._nrecs -149 isrec = True -150 shape.append(dim) -151 dimensions = tuple(dimensions) -152 shape = tuple(shape) -153 -154 attributes = self._att_array() -155 nc_type = self._unpack_int() -156 vsize = self._unpack_int() -157 -158 # Read offset. -159 begin = [self._unpack_int, self._unpack_int64][self.version_byte-1]() -160 -161 return NetCDFVariable(self._buffer.fileno(), nc_type, vsize, begin, shape, dimensions, attributes, isrec, self._recsize) -
162 -
163 - def _read_values(self, n, nc_type): -
164 bytes = [1, 1, 2, 4, 4, 8] -165 typecodes = ['b', 'c', 'h', 'i', 'f', 'd'] -166 -167 count = n * bytes[nc_type-1] -168 values = self.read(count) -169 padding = self.read((4 - (count % 4)) % 4) -170 -171 typecode = typecodes[nc_type-1] -172 if nc_type != 2: # not char -173 values = struct.unpack('>%s' % (typecode * n), values) -174 values = array(values, dtype=typecode) -175 else: -176 # Remove EOL terminator. -177 if values.endswith('\x00'): values = values[:-1] -178 -179 return values -
180 -
181 - def _unpack_int(self): -
182 return struct.unpack('>i', self.read(4))[0] -
183 _unpack_int32 = _unpack_int -184 -
185 - def _unpack_int64(self): -
186 return struct.unpack('>q', self.read(8))[0] -
187 -
188 - def _read_string(self): -
189 count = struct.unpack('>i', self.read(4))[0] -190 s = self.read(count) -191 # Remove EOL terminator. -192 if s.endswith('\x00'): s = s[:-1] -193 padding = self.read((4 - (count % 4)) % 4) -194 return s -
195 -
196 - def close(self): -
197 self._buffer.close() -
198 -199 -
200 -class NetCDFVariable(object): -
201 - def __init__(self, fileno, nc_type, vsize, begin, shape, dimensions, attributes, isrec=False, recsize=0): -
202 self._nc_type = nc_type -203 self._vsize = vsize -204 self._begin = begin -205 self.shape = shape -206 self.dimensions = dimensions -207 self.attributes = attributes # for ``dap.plugins.netcdf`` -208 self.__dict__.update(attributes) -209 self._is_record = isrec -210 -211 # Number of bytes and type. -212 self._bytes = [1, 1, 2, 4, 4, 8][self._nc_type-1] -213 type_ = ['i', 'S', 'i', 'i', 'f', 'f'][self._nc_type-1] -214 dtype = '>%s%d' % (type_, self._bytes) -215 bytes = self._begin + self._vsize -216 -217 if isrec: -218 # Record variables are not stored contiguosly on disk, so we -219 # need to create a separate array for each record. -220 self.__array_data__ = zeros(shape, dtype) -221 bytes += (shape[0] - 1) * recsize -222 for n in range(shape[0]): -223 offset = self._begin + (n * recsize) -224 mm = mmap.mmap(fileno, bytes, access=mmap.ACCESS_READ) -225 self.__array_data__[n] = ndarray.__new__(ndarray, shape[1:], dtype=dtype, buffer=mm, offset=offset, order=0) -226 else: -227 # Create buffer and data. -228 mm = mmap.mmap(fileno, bytes, access=mmap.ACCESS_READ) -229 self.__array_data__ = ndarray.__new__(ndarray, shape, dtype=dtype, buffer=mm, offset=self._begin, order=0) -230 -231 # N-D array interface -232 self.__array_interface__ = {'shape' : shape, -233 'typestr': dtype, -234 'data' : self.__array_data__, -235 'version': 3, -236 } -
237 -
238 - def __getitem__(self, index): -
239 return self.__array_data__.__getitem__(index) -
240 -
241 - def getValue(self): -
242 """For scalars.""" -243 return self.__array_data__.item() -
244 -
245 - def typecode(self): -
246 return ['b', 'c', 'h', 'i', 'f', 'd'][self._nc_type-1] -
247 -248 -
249 -def _test(): -
250 import doctest -251 doctest.testmod() -
252 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.extern.pupynere.NetCDFFile-class.html b/pyroms/docs/pyroms.extern.pupynere.NetCDFFile-class.html deleted file mode 100644 index 76a14d0..0000000 --- a/pyroms/docs/pyroms.extern.pupynere.NetCDFFile-class.html +++ /dev/null @@ -1,487 +0,0 @@ - - - - - pyroms.extern.pupynere.NetCDFFile - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Package extern :: - Module pupynere :: - Class NetCDFFile - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Class NetCDFFile

source code

-
-object --+
-         |
-        NetCDFFile
-
- -
-

A NetCDF file parser.

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - -
Instance Methods[hide private]
-
-   - - - - - - -
__init__(self, - file)
- x.__init__(...) initializes x; see x.__class__.__doc__ for signature
- source code - -
- -
-   - - - - - - -
read(self, - size=-1)
- Alias for reading the file buffer.
- source code - -
- -
-   - - - - - - -
_parse(self)
- Initial parsing of the header.
- source code - -
- -
-   - - - - - - -
_numrecs(self)
- Read number of records.
- source code - -
- -
-   - - - - - - -
_dim_array(self)
- Read a dict with dimensions names and sizes.
- source code - -
- -
-   - - - - - - -
_gatt_array(self)
- Read global attributes.
- source code - -
- -
-   - - - - - - -
_att_array(self)
- Read a dict with attributes.
- source code - -
- -
-   - - - - - - -
_var_array(self)
- Read all variables.
- source code - -
- -
-   - - - - - - -
_read_recsize(self)
- Read all variables and compute record bytes.
- source code - -
- -
-   - - - - - - -
_read_var(self) - source code - -
- -
-   - - - - - - -
_read_values(self, - n, - nc_type) - source code - -
- -
-   - - - - - - -
_unpack_int(self) - source code - -
- -
-   - - - - - - -
_unpack_int32(self) - source code - -
- -
-   - - - - - - -
_unpack_int64(self) - source code - -
- -
-   - - - - - - -
_read_string(self) - source code - -
- -
-   - - - - - - -
close(self) - source code - -
- -
-

Inherited from object: - __delattr__, - __getattribute__, - __hash__, - __new__, - __reduce__, - __reduce_ex__, - __repr__, - __setattr__, - __str__ -

-
- - - - - - - - - -
- - - - - -
Properties[hide private]
-
-

Inherited from object: - __class__ -

-
- - - - - - -
- - - - - -
Method Details[hide private]
-
- -
- -
- - -
-

__init__(self, - file) -
(Constructor) -

-
source code  -
- -

x.__init__(...) initializes x; see x.__class__.__doc__ for - signature

-
-
Overrides: - object.__init__ -
(inherited documentation)
- -
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.extern.pupynere.NetCDFVariable-class.html b/pyroms/docs/pyroms.extern.pupynere.NetCDFVariable-class.html deleted file mode 100644 index c558a8f..0000000 --- a/pyroms/docs/pyroms.extern.pupynere.NetCDFVariable-class.html +++ /dev/null @@ -1,300 +0,0 @@ - - - - - pyroms.extern.pupynere.NetCDFVariable - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Package extern :: - Module pupynere :: - Class NetCDFVariable - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Class NetCDFVariable

source code

-
-object --+
-         |
-        NetCDFVariable
-
- -
- - - - - - - - - - - - - - - - - - - - - -
- - - - - -
Instance Methods[hide private]
-
-   - - - - - - -
__init__(self, - fileno, - nc_type, - vsize, - begin, - shape, - dimensions, - attributes, - isrec=False, - recsize=0)
- x.__init__(...) initializes x; see x.__class__.__doc__ for signature
- source code - -
- -
-   - - - - - - -
__getitem__(self, - index) - source code - -
- -
-   - - - - - - -
getValue(self)
- For scalars.
- source code - -
- -
-   - - - - - - -
typecode(self) - source code - -
- -
-

Inherited from object: - __delattr__, - __getattribute__, - __hash__, - __new__, - __reduce__, - __reduce_ex__, - __repr__, - __setattr__, - __str__ -

-
- - - - - - - - - -
- - - - - -
Properties[hide private]
-
-

Inherited from object: - __class__ -

-
- - - - - - -
- - - - - -
Method Details[hide private]
-
- -
- -
- - -
-

__init__(self, - fileno, - nc_type, - vsize, - begin, - shape, - dimensions, - attributes, - isrec=False, - recsize=0) -
(Constructor) -

-
source code  -
- -

x.__init__(...) initializes x; see x.__class__.__doc__ for - signature

-
-
Overrides: - object.__init__ -
(inherited documentation)
- -
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.grid-module.html b/pyroms/docs/pyroms.grid-module.html deleted file mode 100644 index 9aa9774..0000000 --- a/pyroms/docs/pyroms.grid-module.html +++ /dev/null @@ -1,404 +0,0 @@ - - - - - pyroms.grid - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Module grid - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module grid

source code

- - - - - - - - - - - - -
- - - - - -
Classes[hide private]
-
-   - - ROMS_Grid
- grd = ROMS_Grid(hgrid, vgrid) -
-   - - ROMS_gridinfo
- gridinfo = ROMS_gridinfo(gridid,grid_file=None,hist_file=None) -
- - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
print_ROMS_gridinfo(gridid)
- return the grid information for gridid
- source code - -
- -
-   - - - - - - -
list_ROMS_gridid()
- return the list of the defined gridid
- source code - -
- -
-   - - - - - - -
get_ROMS_hgrid(gridid)
- hgrid = get_ROMS_hgrid(gridid)
- source code - -
- -
-   - - - - - - -
get_ROMS_vgrid(gridid, - zeta=None)
- vgrid = get_ROMS_vgrid(gridid)
- source code - -
- -
-   - - - - - - -
get_ROMS_grid(gridid, - zeta=None, - hist_file=None, - grid_file=None)
- grd = get_ROMS_grid(gridid,hist_file=None,grid_file=None)
- source code - -
- -
-   - - - - - - -
write_ROMS_grid(grd, - filename='ocean_grd.nc')
- Write ROMS_CGrid class on a NetCDF file.
- source code - -
- -
- - - - - - - - - -
- - - - - -
Variables[hide private]
-
-   - - gridid_dictionary = {} -
- - - - - - -
- - - - - -
Function Details[hide private]
-
- -
- -
- - -
-

get_ROMS_hgrid(gridid) -

-
source code  -
- -

hgrid = get_ROMS_hgrid(gridid)

-

Load ROMS horizontal grid object

-
-
-
-
- -
- -
- - -
-

get_ROMS_vgrid(gridid, - zeta=None) -

-
source code  -
- -

vgrid = get_ROMS_vgrid(gridid)

-

Load ROMS vertical grid object. vgrid is a s_coordinate or a - z_coordinate object, depending on gridid.grdtype. vgrid.z_r and vgrid.z_w - (vgrid.z for a z_coordinate object) can be indexed in order to retreive - the actual depths. The free surface time serie zeta can be provided as an - optional argument. Note that the values of zeta are not calculated until - z is indexed, so a netCDF variable for zeta may be passed, even if the - file is large, as only the values that are required will be retrieved - from the file.

-
-
-
-
- -
- -
- - -
-

get_ROMS_grid(gridid, - zeta=None, - hist_file=None, - grid_file=None) -

-
source code  -
- -
-
-grd = get_ROMS_grid(gridid,hist_file=None,grid_file=None)
-
-Load ROMS grid object.
-
-gridid is a string with the name of the grid in it.  If hist_file
-   and grid_file are not passed into the function, or are set to
-   None, then gridid is used to get the grid data from the
-   gridid.txt file.
-
-   if hist_file and grid_file are given, and they are the file
-   paths to a ROMS history file and grid file respectively, the
-   grid information will be extracted from those files, and gridid
-   will be used to name that grid for the rest of the python
-   session.
-
-grd.vgrid is a s_coordinate or
-a z_coordinate object, depending on gridid.grdtype.
-grd.vgrid.z_r and grd.vgrid.z_w (grd.vgrid.z for a 
-z_coordinate object) can be indexed in order to retreive the 
-actual depths. The free surface time serie zeta can be provided 
-as an optional argument. Note that the values of zeta are not 
-calculated until z is indexed, so a netCDF variable for zeta may 
-be passed, even if the file is large, as only the values that 
-are required will be retrieved from the file.
-
-
-
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.grid-pysrc.html b/pyroms/docs/pyroms.grid-pysrc.html deleted file mode 100644 index bcf3e15..0000000 --- a/pyroms/docs/pyroms.grid-pysrc.html +++ /dev/null @@ -1,700 +0,0 @@ - - - - - pyroms.grid - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Module grid - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms.grid

-
-  1  # encoding: utf-8 
-  2   
-  3  import sys 
-  4  import os 
-  5  import numpy as np 
-  6  from mpl_toolkits.basemap import Basemap 
-  7  from datetime import datetime 
-  8  try: 
-  9    import netCDF4 as netCDF 
- 10  except: 
- 11    import netCDF3 as netCDF 
- 12   
- 13  import pyroms 
- 14  from pyroms.hgrid import * 
- 15  from pyroms.vgrid import * 
- 16  from pyroms.grid import * 
- 17  from pyroms import io 
- 18   
- 19  #define a dictionary that will remember gridid's that are defined from 
- 20  #a history and grid file. Because this is defined in this models name 
- 21  #space, it will remain persistent.  They keys are the gridid, and the 
- 22  #values are ROMS_gridinfo objects. 
- 23  gridid_dictionary={} 
- 24   
-
25 -class ROMS_Grid(object): -
26 """ - 27 grd = ROMS_Grid(hgrid, vgrid) - 28 - 29 ROMS Grid object combining horizontal and vertical grid - 30 """ - 31 -
32 - def __init__(self, name, hgrid=CGrid, vgrid=s_coordinate): -
33 self.name = name - 34 self.hgrid = hgrid - 35 self.vgrid = vgrid -
36 - 37 -
38 -class ROMS_gridinfo(object): -
39 ''' - 40 gridinfo = ROMS_gridinfo(gridid,grid_file=None,hist_file=None) - 41 - 42 Return an object with grid information for gridid. - 43 - 44 There are two ways to define the grid information. If grid_file - 45 and hist_file are not passed to the object when it is created, the - 46 information is retrieved from gridid.txt. - 47 To add new grid please edit your gridid.txt. You need to define - 48 an environment variable PYROMS_GRIDID_FILE pointing to your - 49 gridid.txt file. Just copy an existing grid and modify the - 50 definition accordingly to your case (Be carefull with - 51 space and blank line). - 52 - 53 If grid_file is the path to a ROMS grid file, and hist_file is the - 54 path to a ROMS history file, then the grid information will be - 55 read from those files. Gridid can then be used to refer to this - 56 grid information so that the grid and history files do not be - 57 included in subsequent calls. - 58 ''' - 59 -
60 - def __init__(self, gridid,grid_file=None,hist_file=None): -
61 #first determine if the information for the gridid has already been obtained. - 62 if gridid in gridid_dictionary: - 63 #print 'CJMP> gridid found in gridid_dictionary, grid retrieved from dictionary' - 64 saved_self=gridid_dictionary[gridid] - 65 for attrib in saved_self.__dict__.keys(): - 66 setattr(self,attrib,getattr(saved_self,attrib)) - 67 else: - 68 #nope, we need to get the information from gridid.txt or from - 69 #the grid and history files from the model - 70 self.id = gridid - 71 self._get_grid_info(grid_file,hist_file) - 72 - 73 #now save the data in the dictionary, so we don't need to get it again - 74 gridid_dictionary[gridid]=self -
75 -
76 - def _get_grid_info(self,grid_file,hist_file): -
77 - 78 #check if the grid_file and hist_files are both null; if so get data from gridid.txt - 79 if (type(grid_file)==type(None))&(type(hist_file)==type(None)): - 80 #print 'CJMP> gridid not in dictionary, data will be retrieved from gridid.txt' - 81 gridid_file = os.getenv("PYROMS_GRIDID_FILE") - 82 data = open(gridid_file,'r') - 83 lines = data.readlines() - 84 data.close() - 85 - 86 line_nb = 0 - 87 info = [] - 88 for line in lines: - 89 s = line.split() - 90 if s[0] == 'id': - 91 if s[2] == self.id: - 92 for l in range(line_nb, line_nb+5): - 93 s = lines[l].split() - 94 info.append(s[2]) - 95 line_nb = line_nb + 1 - 96 if info[4] == 'roms': - 97 for l in range(line_nb, line_nb+4): - 98 s = lines[l].split() - 99 info.append(s[2]) -100 if info[4] == 'z': -101 s = lines[line_nb].split() -102 info.append(s[3:-1]) -103 while s[-1:] == ['\\']: -104 line_nb = line_nb + 1 -105 s = lines[line_nb].split() -106 info.append(s[:-1]) -107 line_nb = line_nb + 1 -108 -109 if info == []: -110 raise ValueError, 'Unknow gridid. Please check your gridid.txt file' -111 -112 if info[4] == 'roms': -113 self.name = info[1] -114 self.grdfile = info[2] -115 self.N = np.int(info[3]) -116 self.grdtype = info[4] -117 self.Vtrans = np.int(info[5]) -118 self.theta_s = np.float(info[6]) -119 self.theta_b = np.float(info[7]) -120 self.Tcline = np.float(info[8]) -121 -122 elif info[4] == 'z': -123 nline = len(info) -124 dep = info[5] -125 for line in range(6,nline): -126 dep = dep + info[line] -127 dep = np.array(dep, dtype=np.float) -128 -129 self.name = info[1] -130 self.grdfile = info[2] -131 self.N = np.int(info[3]) -132 self.grdtype = info[4] -133 self.depth = dep -134 -135 else: -136 raise ValueError, 'Unknow grid type. Please check your gridid.txt file' -137 -138 else: #lets get the grid information from the history and grid files -139 #print 'CJMP> getting grid info from ROMS history and grid files' -140 assert type(grid_file)!=type(None), 'if specify history file you must specify grid file' -141 assert type(hist_file)!=type(None), 'if specify grid file you must specify history file' -142 -143 #open history file and get necessary grid information from it. -144 hist=netCDF.Dataset(hist_file,'r') -145 -146 #put data into ROMS_gridinfo object -147 self.name=self.id -148 self.grdfile=grid_file -149 self.N=len(hist.dimensions['s_rho']) -150 self.grdtype='roms' -151 -152 #now write this to deal with both ROMS 3 and 2 -153 try: -154 self.Vtrans=np.float(hist.Vtransform) -155 self.theta_s=np.float(hist.theta_s) -156 self.theta_b=np.float(hist.theta_b) -157 self.Tcline=np.float(hist.Tcline) -158 except AttributeError: -159 try: -160 self.Vtrans=np.float(hist.variables['Vtransform'][:]) -161 except: -162 print 'variable Vtransform not found in history file. Defaulting to Vtranform=1' -163 self.Vtrans=1 -164 self.theta_s=np.float(hist.variables['theta_s'][:]) -165 self.theta_b=np.float(hist.variables['theta_b'][:]) -166 self.Tcline=np.float(hist.variables['Tcline'][:]) -
167 -168 -192 -193 -
194 -def list_ROMS_gridid(): -
195 """ -196 list_ROMS_gridid() -197 -198 return the list of the defined gridid -199 """ -200 -201 gridid_file = os.getenv("PYROMS_GRIDID_FILE") -202 data = open(gridid_file,'r') -203 lines = data.readlines() -204 data.close() -205 -206 gridid_list = [] -207 for line in lines: -208 s = line.split() -209 if s[0] == 'id': -210 gridid_list.append(s[2]) -211 -212 print 'List of defined gridid : ', gridid_list -
213 -214 -
215 -def get_ROMS_hgrid(gridid): -
216 """ -217 hgrid = get_ROMS_hgrid(gridid) -218 -219 Load ROMS horizontal grid object -220 """ -221 -222 gridinfo = ROMS_gridinfo(gridid) -223 grdfile = gridinfo.grdfile -224 -225 nc = io.Dataset(grdfile) -226 -227 #Check for cartesian or geographical grid -228 spherical = nc.variables['spherical'][:] -229 -230 #Get horizontal grid -231 if spherical == 'F': -232 #cartesian grid -233 print 'Load cartesian grid from file' -234 if 'x_vert' in nc.variables.keys() and 'y_vert' in nc.variables.keys(): -235 x_vert = nc.variables['x_vert'][:] -236 y_vert = nc.variables['y_vert'][:] -237 elif 'x_rho' in nc.variables.keys() and 'y_rho' in nc.variables.keys() \ -238 and 'pm' in nc.variables.keys() and 'pn' in nc.variables.keys(): -239 x_rho = nc.variables['x_rho'][:] -240 y_rho = nc.variables['y_rho'][:] -241 pm = nc.variables['pm'][:] -242 pn = nc.variables['pn'][:] -243 try: angle = nc.variables['angle'][:] -244 except: angle = np.zeros(x_rho.shape) -245 #compute verts from rho point, pm, pn, angle -246 x_vert, y_vert = rho_to_vert(x_rho, y_rho, pm, pn, angle) -247 else: -248 raise ValueError, 'NetCDF file must contain x_vert and y_vert \ -249 or x_rho, y_rho, pm, pn and angle for a cartesian grid' -250 -251 if 'x_rho' in nc.variables.keys() and 'y_rho' in nc.variables.keys() and \ -252 'x_u' in nc.variables.keys() and 'y_u' in nc.variables.keys() and \ -253 'x_v' in nc.variables.keys() and 'y_v' in nc.variables.keys() and \ -254 'x_psi' in nc.variables.keys() and 'y_psi' in nc.variables.keys(): -255 x_rho = nc.variables['x_rho'][:] -256 y_rho = nc.variables['y_rho'][:] -257 x_u = nc.variables['x_u'][:] -258 y_u = nc.variables['y_u'][:] -259 x_v = nc.variables['x_v'][:] -260 y_v = nc.variables['y_v'][:] -261 x_psi = nc.variables['x_psi'][:] -262 y_psi = nc.variables['y_psi'][:] -263 else: -264 x_rho = None -265 y_rho = None -266 x_u = None -267 y_u = None -268 x_v = None -269 y_v = None -270 x_psi = None -271 y_psi = None -272 -273 if 'pm' in nc.variables.keys() and 'pn' in nc.variables.keys(): -274 pm = nc.variables['pm'][:] -275 dx = 1. / pm -276 pn = nc.variables['pn'][:] -277 dy = 1. / pn -278 else: -279 dx = None -280 dy = None -281 -282 if 'dndx' in nc.variables.keys() and 'dmde' in nc.variables.keys(): -283 dndx = nc.variables['dndx'][:] -284 dmde = nc.variables['dmde'][:] -285 else: -286 dndx = None -287 dmde = None -288 -289 if 'angle' in nc.variables.keys(): -290 angle = nc.variables['angle'][:] -291 else: -292 angle = None -293 -294 #Get cartesian grid -295 hgrd = CGrid(x_vert, y_vert, x_rho=x_rho, y_rho=y_rho, \ -296 x_u=x_u, y_u=y_u, x_v=x_v, y_v=y_v, \ -297 x_psi=x_psi, y_psi=y_psi, dx=dx, dy=dy, \ -298 dndx=dndx, dmde=dmde, angle_rho=angle) -299 -300 else: -301 #geographical grid -302 print 'Load geographical grid from file' -303 proj = Basemap(projection='merc', resolution=None, lat_0=0, lon_0=0) -304 if 'lon_vert' in nc.variables.keys() and 'lat_vert' in nc.variables.keys(): -305 lon_vert = nc.variables['lon_vert'][:] -306 lat_vert = nc.variables['lat_vert'][:] -307 elif 'lon_rho' in nc.variables.keys() and 'lon_rho' in nc.variables.keys() \ -308 and 'lon_psi' in nc.variables.keys() and 'lat_psi' in nc.variables.keys(): -309 lon_rho = nc.variables['lon_rho'][:] -310 lat_rho = nc.variables['lat_rho'][:] -311 lon_psi = nc.variables['lon_psi'][:] -312 lat_psi = nc.variables['lat_psi'][:] -313 #compute verts from rho and psi point -314 lon_vert, lat_vert = rho_to_vert_geo(lon_rho, lat_rho, lon_psi, lat_psi) -315 else: -316 raise ValueError, 'NetCDF file must contain lon_vert and lat_vert \ -317 or lon_rho, lat_rho, lon_psi, lat_psi for a geographical grid' -318 -319 if 'lon_rho' in nc.variables.keys() and 'lat_rho' in nc.variables.keys() and \ -320 'lon_u' in nc.variables.keys() and 'lat_u' in nc.variables.keys() and \ -321 'lon_v' in nc.variables.keys() and 'lat_v' in nc.variables.keys() and \ -322 'lon_psi' in nc.variables.keys() and 'lat_psi' in nc.variables.keys(): -323 lon_rho = nc.variables['lon_rho'][:] -324 lat_rho = nc.variables['lat_rho'][:] -325 lon_u = nc.variables['lon_u'][:] -326 lat_u = nc.variables['lat_u'][:] -327 lon_v = nc.variables['lon_v'][:] -328 lat_v = nc.variables['lat_v'][:] -329 lon_psi = nc.variables['lon_psi'][:] -330 lat_psi = nc.variables['lat_psi'][:] -331 else: -332 lon_rho = None -333 lat_rho = None -334 lon_u = None -335 lat_u = None -336 lon_v = None -337 lat_v = None -338 lon_psi = None -339 lat_psi = None -340 -341 if 'pm' in nc.variables.keys() and 'pn' in nc.variables.keys(): -342 pm = nc.variables['pm'][:] -343 dx = 1. / pm -344 pn = nc.variables['pn'][:] -345 dy = 1. / pn -346 else: -347 dx = None -348 dy = None -349 -350 if 'dndx' in nc.variables.keys() and 'dmde' in nc.variables.keys(): -351 dndx = nc.variables['dndx'][:] -352 dmde = nc.variables['dmde'][:] -353 else: -354 dndx = None -355 dmde = None -356 -357 if 'angle' in nc.variables.keys(): -358 angle = nc.variables['angle'][:] -359 else: -360 angle = None -361 -362 #Get geographical grid -363 hgrd = CGrid_geo(lon_vert, lat_vert, proj, \ -364 lon_rho=lon_rho, lat_rho=lat_rho, \ -365 lon_u=lon_u, lat_u=lat_u, lon_v=lon_v, lat_v=lat_v, \ -366 lon_psi=lon_psi, lat_psi=lat_psi, dx=dx, dy=dy, \ -367 dndx=dndx, dmde=dmde, angle_rho=angle) -368 -369 #load the mask -370 try: -371 hgrd.mask_rho = np.array(nc.variables['mask_rho'][:]) -372 except: -373 hgrd.mask_rho = np.ones(hgrd.mask_rho.shape) -374 -375 return hgrd -
376 -377 -
378 -def get_ROMS_vgrid(gridid, zeta=None): -
379 """ -380 vgrid = get_ROMS_vgrid(gridid) -381 -382 Load ROMS vertical grid object. vgrid is a s_coordinate or -383 a z_coordinate object, depending on gridid.grdtype. -384 vgrid.z_r and vgrid.z_w (vgrid.z for a z_coordinate object) -385 can be indexed in order to retreive the actual depths. The -386 free surface time serie zeta can be provided as an optional -387 argument. Note that the values of zeta are not calculated -388 until z is indexed, so a netCDF variable for zeta may be passed, -389 even if the file is large, as only the values that are required -390 will be retrieved from the file. -391 """ -392 -393 gridinfo = ROMS_gridinfo(gridid) -394 grdfile = gridinfo.grdfile -395 -396 nc = io.Dataset(grdfile) -397 -398 #Get vertical grid -399 try: -400 h = nc.variables['h'][:] -401 except: -402 raise ValueError, 'NetCDF file must contain the bathymetry h' -403 -404 if gridinfo.grdtype == 'roms': -405 Vtrans = gridinfo.Vtrans -406 theta_b = gridinfo.theta_b -407 theta_s = gridinfo.theta_s -408 Tcline = gridinfo.Tcline -409 N = gridinfo.N -410 if Vtrans == 1: -411 vgrid = s_coordinate(h, theta_b, theta_s, Tcline, N, zeta=zeta) -412 elif Vtrans == 2: -413 vgrid = s_coordinate_2(h, theta_b, theta_s, Tcline, N, zeta=zeta) -414 else: -415 raise Warning, 'Unknow vertical transformation Vtrans' -416 -417 elif gridinfo.grdtype == 'z': -418 N = gridinfo.N -419 depth = gridinfo.depth -420 vgrid = z_coordinate(h, depth, N) -421 -422 else: -423 raise ValueError, 'Unknow grid type' -424 -425 return vgrid -
426 -427 -
428 -def get_ROMS_grid(gridid, zeta=None, hist_file=None,grid_file=None): -
429 """ -430 grd = get_ROMS_grid(gridid,hist_file=None,grid_file=None) -431 -432 Load ROMS grid object. -433 -434 gridid is a string with the name of the grid in it. If hist_file -435 and grid_file are not passed into the function, or are set to -436 None, then gridid is used to get the grid data from the -437 gridid.txt file. -438 -439 if hist_file and grid_file are given, and they are the file -440 paths to a ROMS history file and grid file respectively, the -441 grid information will be extracted from those files, and gridid -442 will be used to name that grid for the rest of the python -443 session. -444 -445 grd.vgrid is a s_coordinate or -446 a z_coordinate object, depending on gridid.grdtype. -447 grd.vgrid.z_r and grd.vgrid.z_w (grd.vgrid.z for a -448 z_coordinate object) can be indexed in order to retreive the -449 actual depths. The free surface time serie zeta can be provided -450 as an optional argument. Note that the values of zeta are not -451 calculated until z is indexed, so a netCDF variable for zeta may -452 be passed, even if the file is large, as only the values that -453 are required will be retrieved from the file. -454 """ -455 -456 #in this first call to ROMS_gridinfo, we pass in the history file -457 #and gridfile info. If hist_file and grid_file are defined, the -458 #grid info will be extracted from those files and will able to be -459 #accessed later by gridid -460 gridinfo = ROMS_gridinfo(gridid,hist_file=hist_file,grid_file=grid_file) -461 name = gridinfo.name -462 -463 #we need not pass in hist_file and grid_file here, because the -464 #gridinfo file will already have been initialized by the call to -465 #ROMS_gridinfo above. -466 hgrd = get_ROMS_hgrid(gridid) -467 vgrid = get_ROMS_vgrid(gridid, zeta=zeta) -468 -469 #Get ROMS grid -470 return ROMS_Grid(name, hgrd, vgrid) -
471 -472 -
473 -def write_ROMS_grid(grd, filename='ocean_grd.nc'): -
474 """ -475 write_ROMS_grid(grd, filename) -476 -477 Write ROMS_CGrid class on a NetCDF file. -478 """ -479 -480 if grd.hgrid.lon_rho is not None: -481 Mm, Lm = grd.hgrid.lon_rho.shape -482 else: -483 Mm, Lm = grd.hgrid.x_rho.shape -484 -485 -486 # Write ROMS grid to file -487 nc = netCDF.Dataset(filename, 'w', format='NETCDF3_CLASSIC') -488 nc.Description = 'ROMS grid' -489 nc.Author = 'pyroms.grid.write_grd' -490 nc.Created = datetime.now().isoformat() -491 nc.type = 'ROMS grid file' -492 -493 nc.createDimension('xi_rho', Lm) -494 nc.createDimension('xi_u', Lm-1) -495 nc.createDimension('xi_v', Lm) -496 nc.createDimension('xi_psi', Lm-1) -497 -498 nc.createDimension('eta_rho', Mm) -499 nc.createDimension('eta_u', Mm) -500 nc.createDimension('eta_v', Mm-1) -501 nc.createDimension('eta_psi', Mm-1) -502 -503 if grd.hgrid.x_vert is not None: -504 nc.createDimension('xi_vert', Lm+1) -505 nc.createDimension('eta_vert', Mm+1) -506 -507 if hasattr(grd.vgrid, 's_rho') is True and grd.vgrid.s_rho is not None: -508 N, = grd.vgrid.s_rho.shape -509 nc.createDimension('s_rho', N) -510 nc.createDimension('s_w', N+1) -511 -512 -513 def write_nc_var(var, name, dimensions, long_name=None, units=None): -514 nc.createVariable(name, 'f8', dimensions) -515 if long_name is not None: -516 nc.variables[name].long_name = long_name -517 if units is not None: -518 nc.variables[name].units = units -519 nc.variables[name][:] = var -520 print ' ... wrote ', name -
521 -522 if hasattr(grd.vgrid, 's_rho') is True and grd.vgrid.s_rho is not None: -523 write_nc_var(grd.vgrid.theta_s, 'theta_s', (), 'S-coordinate surface control parameter') -524 write_nc_var(grd.vgrid.theta_b, 'theta_b', (), 'S-coordinate bottom control parameter') -525 write_nc_var(grd.vgrid.Tcline, 'Tcline', (), 'S-coordinate surface/bottom layer width', 'meter') -526 write_nc_var(grd.vgrid.hc, 'hc', (), 'S-coordinate parameter, critical depth', 'meter') -527 write_nc_var(grd.vgrid.s_rho, 's_rho', ('s_rho'), 'S-coordinate at RHO-points') -528 write_nc_var(grd.vgrid.s_w, 's_w', ('s_w'), 'S-coordinate at W-points') -529 write_nc_var(grd.vgrid.Cs_r, 'Cs_r', ('s_rho'), 'S-coordinate stretching curves at RHO-points') -530 write_nc_var(grd.vgrid.Cs_w, 'Cs_w', ('s_w'), 'S-coordinate stretching curves at W-points') -531 -532 write_nc_var(grd.vgrid.h, 'h', ('eta_rho', 'xi_rho'), 'bathymetry at RHO-points', 'meter') -533 write_nc_var(grd.hgrid.f, 'f', ('eta_rho', 'xi_rho'), 'Coriolis parameter at RHO-points', 'second-1') -534 write_nc_var(1./grd.hgrid.dx, 'pm', ('eta_rho', 'xi_rho'), 'curvilinear coordinate metric in XI', 'meter-1') -535 write_nc_var(1./grd.hgrid.dy, 'pn', ('eta_rho', 'xi_rho'), 'curvilinear coordinate metric in ETA', 'meter-1') -536 write_nc_var(grd.hgrid.dmde, 'dmde', ('eta_rho', 'xi_rho'), 'XI derivative of inverse metric factor pn', 'meter') -537 write_nc_var(grd.hgrid.dndx, 'dndx', ('eta_rho', 'xi_rho'), 'ETA derivative of inverse metric factor pm', 'meter') -538 write_nc_var(grd.hgrid.xl, 'xl', (), 'domain length in the XI-direction', 'meter') -539 write_nc_var(grd.hgrid.el, 'el', (), 'domain length in the ETA-direction', 'meter') -540 -541 if grd.hgrid.x_rho is not None: -542 write_nc_var(grd.hgrid.x_rho, 'x_rho', ('eta_rho', 'xi_rho'), 'x location of RHO-points', 'meter') -543 write_nc_var(grd.hgrid.x_rho, 'y_rho', ('eta_rho', 'xi_rho'), 'y location of RHO-points', 'meter') -544 write_nc_var(grd.hgrid.x_u, 'x_u', ('eta_u', 'xi_u'), 'x location of U-points', 'meter') -545 write_nc_var(grd.hgrid.x_u, 'y_u', ('eta_u', 'xi_u'), 'y location of U-points', 'meter') -546 write_nc_var(grd.hgrid.x_v, 'x_v', ('eta_v', 'xi_v'), 'x location of V-points', 'meter') -547 write_nc_var(grd.hgrid.x_v, 'y_v', ('eta_v', 'xi_v'), 'y location of V-points', 'meter') -548 write_nc_var(grd.hgrid.x_psi, 'x_psi', ('eta_psi', 'xi_psi'), 'x location of PSI-points', 'meter') -549 write_nc_var(grd.hgrid.x_psi, 'y_psi', ('eta_psi', 'xi_psi'), 'y location of PSI-points', 'meter') -550 -551 if grd.hgrid.lon_rho is not None: -552 write_nc_var(grd.hgrid.lon_rho, 'lon_rho', ('eta_rho', 'xi_rho'), 'longitude of RHO-points', 'degree_east') -553 write_nc_var(grd.hgrid.lat_rho, 'lat_rho', ('eta_rho', 'xi_rho'), 'latitude of RHO-points', 'degree_north') -554 write_nc_var(grd.hgrid.lon_u, 'lon_u', ('eta_u', 'xi_u'), 'longitude of U-points', 'degree_east') -555 write_nc_var(grd.hgrid.lat_u, 'lat_u', ('eta_u', 'xi_u'), 'latitude of U-points', 'degree_north') -556 write_nc_var(grd.hgrid.lon_v, 'lon_v', ('eta_v', 'xi_v'), 'longitude of V-points', 'degree_east') -557 write_nc_var(grd.hgrid.lat_v, 'lat_v', ('eta_v', 'xi_v'), 'latitude of V-points', 'degree_north') -558 write_nc_var(grd.hgrid.lon_psi, 'lon_psi', ('eta_psi', 'xi_psi'), 'longitude of PSI-points', 'degree_east') -559 write_nc_var(grd.hgrid.lat_psi, 'lat_psi', ('eta_psi', 'xi_psi'), 'latitude of PSI-points', 'degree_north') -560 -561 if grd.hgrid.x_vert is not None: -562 write_nc_var(grd.hgrid.x_vert, 'x_vert', ('eta_vert', 'xi_vert'), 'x location of cell verticies', 'meter') -563 write_nc_var(grd.hgrid.y_vert, 'y_vert', ('eta_vert', 'xi_vert'), 'y location of cell verticies', 'meter') -564 -565 if grd.hgrid.lon_vert is not None: -566 write_nc_var(grd.hgrid.lon_vert, 'lon_vert', ('eta_vert', 'xi_vert'), 'longitude of cell verticies', 'degree_east') -567 write_nc_var(grd.hgrid.lat_vert, 'lat_vert', ('eta_vert', 'xi_vert'), 'latitude of cell verticies', 'degree_north') -568 -569 nc.createVariable('spherical', 'c') -570 nc.variables['spherical'].long_name = 'Grid type logical switch' -571 nc.variables['spherical'][:] = grd.hgrid.spherical -572 print ' ... wrote ', 'spherical' -573 -574 write_nc_var(grd.hgrid.angle_rho, 'angle', ('eta_rho', 'xi_rho'), 'angle between XI-axis and EAST', 'radians') -575 -576 write_nc_var(grd.hgrid.mask_rho, 'mask_rho', ('eta_rho', 'xi_rho'), 'mask on RHO-points') -577 write_nc_var(grd.hgrid.mask_u, 'mask_u', ('eta_u', 'xi_u'), 'mask on U-points') -578 write_nc_var(grd.hgrid.mask_v, 'mask_v', ('eta_v', 'xi_v'), 'mask on V-points') -579 write_nc_var(grd.hgrid.mask_psi, 'mask_psi', ('eta_psi', 'xi_psi'), 'mask on psi-points') -580 -581 nc.close() -582 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.grid.ROMS_Grid-class.html b/pyroms/docs/pyroms.grid.ROMS_Grid-class.html deleted file mode 100644 index 8fa9b59..0000000 --- a/pyroms/docs/pyroms.grid.ROMS_Grid-class.html +++ /dev/null @@ -1,240 +0,0 @@ - - - - - pyroms.grid.ROMS_Grid - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Module grid :: - Class ROMS_Grid - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Class ROMS_Grid

source code

-
-object --+
-         |
-        ROMS_Grid
-
- -
-

grd = ROMS_Grid(hgrid, vgrid)

-

ROMS Grid object combining horizontal and vertical grid

- - - - - - - - - - - - - -
- - - - - -
Instance Methods[hide private]
-
-   - - - - - - -
__init__(self, - name, - hgrid=<class 'pyroms.hgrid.CGrid'>, - vgrid=<class 'pyroms.vgrid.s_coordinate'>)
- x.__init__(...) initializes x; see x.__class__.__doc__ for signature
- source code - -
- -
-

Inherited from object: - __delattr__, - __getattribute__, - __hash__, - __new__, - __reduce__, - __reduce_ex__, - __repr__, - __setattr__, - __str__ -

-
- - - - - - - - - -
- - - - - -
Properties[hide private]
-
-

Inherited from object: - __class__ -

-
- - - - - - -
- - - - - -
Method Details[hide private]
-
- -
- -
- - -
-

__init__(self, - name, - hgrid=<class 'pyroms.hgrid.CGrid'>, - vgrid=<class 'pyroms.vgrid.s_coordinate'>) -
(Constructor) -

-
source code  -
- -

x.__init__(...) initializes x; see x.__class__.__doc__ for - signature

-
-
Overrides: - object.__init__ -
(inherited documentation)
- -
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.grid.ROMS_gridinfo-class.html b/pyroms/docs/pyroms.grid.ROMS_gridinfo-class.html deleted file mode 100644 index 3867d67..0000000 --- a/pyroms/docs/pyroms.grid.ROMS_gridinfo-class.html +++ /dev/null @@ -1,270 +0,0 @@ - - - - - pyroms.grid.ROMS_gridinfo - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Module grid :: - Class ROMS_gridinfo - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Class ROMS_gridinfo

source code

-
-object --+
-         |
-        ROMS_gridinfo
-
- -
-

gridinfo = ROMS_gridinfo(gridid,grid_file=None,hist_file=None)

-

Return an object with grid information for gridid.

-

There are two ways to define the grid information. If grid_file and - hist_file are not passed to the object when it is created, the - information is retrieved from gridid.txt. To add new grid please edit - your gridid.txt. You need to define an environment variable - PYROMS_GRIDID_FILE pointing to your gridid.txt file. Just copy an - existing grid and modify the definition accordingly to your case (Be - carefull with space and blank line).

-

If grid_file is the path to a ROMS grid file, and hist_file is the - path to a ROMS history file, then the grid information will be read from - those files. Gridid can then be used to refer to this grid information - so that the grid and history files do not be included in subsequent - calls.

- - - - - - - - - - - - - - - - -
- - - - - -
Instance Methods[hide private]
-
-   - - - - - - -
__init__(self, - gridid, - grid_file=None, - hist_file=None)
- x.__init__(...) initializes x; see x.__class__.__doc__ for signature
- source code - -
- -
-   - - - - - - -
_get_grid_info(self, - grid_file, - hist_file) - source code - -
- -
-

Inherited from object: - __delattr__, - __getattribute__, - __hash__, - __new__, - __reduce__, - __reduce_ex__, - __repr__, - __setattr__, - __str__ -

-
- - - - - - - - - -
- - - - - -
Properties[hide private]
-
-

Inherited from object: - __class__ -

-
- - - - - - -
- - - - - -
Method Details[hide private]
-
- -
- -
- - -
-

__init__(self, - gridid, - grid_file=None, - hist_file=None) -
(Constructor) -

-
source code  -
- -

x.__init__(...) initializes x; see x.__class__.__doc__ for - signature

-
-
Overrides: - object.__init__ -
(inherited documentation)
- -
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.hgrid-module.html b/pyroms/docs/pyroms.hgrid-module.html deleted file mode 100644 index 047d3a4..0000000 --- a/pyroms/docs/pyroms.hgrid-module.html +++ /dev/null @@ -1,367 +0,0 @@ - - - - - pyroms.hgrid - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Module hgrid - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module hgrid

source code

-
-Tools for creating and working with Arikawa C-Grids
-
-
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - -
Classes[hide private]
-
-   - - BoundaryInteractor
- Interactive grid creation -
-   - - _Focus_x
- Return a transformed, uniform grid, focused in the x-direction -
-   - - _Focus_y
- Return a transformed, uniform grid, focused in the y-direction -
-   - - Focus
- Return a container for a sequence of Focus objects -
-   - - CGrid
- Curvilinear Arakawa C-Grid -
-   - - CGrid_geo
- Curvilinear Arakawa C-grid defined in geographic coordinates -
-   - - Gridgen
- docstring for Gridgen -
-   - - edit_mask_mesh
- Interactive mask editor -
-   - - get_position_from_map
- Get cell index position Interactively -
- - - - - - - - - - - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
_approximate_erf(x)
- Return approximate solution to error function...
- source code - -
- -
-   - - - - - - -
rho_to_vert(xr, - yr, - pm, - pn, - ang) - source code - -
- -
-   - - - - - - -
rho_to_vert_geo(lonr, - latr, - lonp, - latp) - source code - -
- -
-   - - - - - - -
uvp_masks(rmask)
- return u-, v-, and psi-masks based on input rho-mask
- source code - -
- -
- - - - - - -
- - - - - -
Function Details[hide private]
-
- -
- -
- - -
-

_approximate_erf(x) -

-
source code  -
- -
-
-Return approximate solution to error function
-see http://en.wikipedia.org/wiki/Error_function
-
-
-
-
-
-
- -
- -
- - -
-

uvp_masks(rmask) -

-
source code  -
- -
-
-return u-, v-, and psi-masks based on input rho-mask
-
-Parameters
-----------
-
-rmask : ndarray
-    mask at CGrid rho-points
-
-Returns
--------
-(umask, vmask, pmask) : ndarrays
-    masks at u-, v-, and psi-points
-
-
-
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.hgrid-pysrc.html b/pyroms/docs/pyroms.hgrid-pysrc.html deleted file mode 100644 index 363a7b3..0000000 --- a/pyroms/docs/pyroms.hgrid-pysrc.html +++ /dev/null @@ -1,2552 +0,0 @@ - - - - - pyroms.hgrid - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Module hgrid - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms.hgrid

-
-   1  # encoding: utf-8 
-   2  '''Tools for creating and working with Arikawa C-Grids''' 
-   3  __docformat__ = "restructuredtext en" 
-   4   
-   5  import os 
-   6  import sys 
-   7  import ctypes 
-   8  import cPickle 
-   9  from warnings import warn 
-  10  from copy import deepcopy 
-  11   
-  12  import numpy as np 
-  13  import matplotlib.pyplot as plt 
-  14  from matplotlib.artist import Artist 
-  15  from matplotlib.patches import Polygon, CirclePolygon 
-  16  from matplotlib.lines import Line2D 
-  17  #from matplotlib.numerix.mlab import amin 
-  18  from matplotlib.mlab import dist_point_to_segment 
-  19  from matplotlib.nxutils import points_inside_poly 
-  20   
-  21  from mpl_toolkits.basemap import Basemap 
-  22  from mpl_toolkits.basemap import pyproj 
-  23   
-  24  try: 
-  25      import scipy.spatial.cKDTree as KDTree 
-  26  except: 
-  27      #  no scipy 
-  28      from pyroms.extern import KDTree 
-  29   
-  30  import pyroms 
-  31  from pyroms.vgrid import * 
-  32  from pyroms.extern import GreatCircle 
-  33   
-
34 -class BoundaryInteractor(object): -
35 """ - 36 Interactive grid creation - 37 - 38 bry = BoundaryClick(x=[], y=[], beta=None, ax=gca(), **gridgen_options) - 39 - 40 The initial boundary polygon points (x and y) are - 41 counterclockwise, starting in the upper left corner of the - 42 boundary. - 43 - 44 Key commands: - 45 - 46 t : toggle visibility of verticies - 47 d : delete a vertex - 48 i : insert a vertex at a point on the polygon line - 49 - 50 p : set vertex as beta=1 (a Positive turn, marked with green triangle) - 51 m : set vertex as beta=1 (a Negative turn, marked with red triangle) - 52 z : set vertex as beta=0 (no corner, marked with a black dot) - 53 - 54 G : generate grid from the current boundary using gridgen - 55 T : toggle visability of the current grid - 56 - 57 Methods: - 58 - 59 bry.dump(bry_file) - 60 Write the current boundary informtion (bry.x, bry.y, bry.beta) to - 61 a cPickle file bry_file. - 62 - 63 bry.load(bry_file) - 64 Read in boundary informtion (x, y, beta) from the cPickle file - 65 bry_file. - 66 - 67 bry.remove_grid() - 68 Remove gridlines from axes. - 69 - 70 Attributes: - 71 bry.x : the X boundary points - 72 bry.y : the Y boundary points - 73 bry.verts : the verticies of the grid - 74 bry.grd : the CGrid object - 75 - 76 """ - 77 - 78 _showverts = True - 79 _showbetas = True - 80 _showgrid = True - 81 _epsilon = 5 # max pixel distance to count as a vertex hit - 82 -
83 - def _update_beta_lines(self): -
84 """Update m/pline by finding the points where self.beta== -/+ 1""" - 85 x, y = zip(*self._poly.xy) - 86 num_points = len(x)-1 # the first and last point are repeated - 87 - 88 xp = [x[n] for n in range(num_points) if self.beta[n]==1] - 89 yp = [y[n] for n in range(num_points) if self.beta[n]==1] - 90 self._pline.set_data(xp, yp) - 91 - 92 xm = [x[n] for n in range(num_points) if self.beta[n]==-1] - 93 ym = [y[n] for n in range(num_points) if self.beta[n]==-1] - 94 self._mline.set_data(xm, ym) - 95 - 96 xz = [x[n] for n in range(num_points) if self.beta[n]==0] - 97 yz = [y[n] for n in range(num_points) if self.beta[n]==0] - 98 self._zline.set_data(xz, yz) - 99 - 100 if len(x)-1 < self.gridgen_options['ul_idx']: - 101 self.gridgen_options['ul_idx'] = len(x)-1 - 102 xs = x[self.gridgen_options['ul_idx']] - 103 ys = y[self.gridgen_options['ul_idx']] - 104 self._sline.set_data(xs, ys) -
105 -
106 - def remove_grid(self): -
107 """Remove a generated grid from the BoundaryClick figure""" - 108 if hasattr(self, '_gridlines'): - 109 for line in self._gridlines: - 110 self._ax.lines.remove(line) - 111 delattr(self, '_gridlines') -
112 -
113 - def _draw_callback(self, event): -
114 self._background = self._canvas.copy_from_bbox(self._ax.bbox) - 115 self._ax.draw_artist(self._poly) - 116 self._ax.draw_artist(self._pline) - 117 self._ax.draw_artist(self._mline) - 118 self._ax.draw_artist(self._zline) - 119 self._ax.draw_artist(self._sline) - 120 self._ax.draw_artist(self._line) - 121 self._canvas.blit(self._ax.bbox) -
122 -
123 - def _poly_changed(self, poly): -
124 'this method is called whenever the polygon object is called' - 125 # only copy the artist props to the line (except visibility) - 126 vis = self._line.get_visible() - 127 Artist.update_from(self._line, poly) - 128 self._line.set_visible(vis) # don't use the poly visibility state -
129 -
130 - def _get_ind_under_point(self, event): -
131 'get the index of the vertex under point if within epsilon tolerance' - 132 try: - 133 x, y = zip(*self._poly.xy) - 134 - 135 # display coords - 136 xt, yt = self._poly.get_transform().numerix_x_y(x, y) - 137 d = np.sqrt((xt-event.x)**2 + (yt-event.y)**2) - 138 indseq = np.nonzero(np.equal(d, np.amin(d))) - 139 ind = indseq[0] - 140 - 141 if d[ind]>=self._epsilon: - 142 ind = None - 143 - 144 return ind - 145 except: - 146 # display coords - 147 xy = np.asarray(self._poly.xy) - 148 xyt = self._poly.get_transform().transform(xy) - 149 xt, yt = xyt[:, 0], xyt[:, 1] - 150 d = np.sqrt((xt-event.x)**2 + (yt-event.y)**2) - 151 indseq = np.nonzero(np.equal(d, np.amin(d)))[0] - 152 ind = indseq[0] - 153 - 154 if d[ind]>=self._epsilon: - 155 ind = None - 156 - 157 return ind -
158 -
159 - def _button_press_callback(self, event): -
160 'whenever a mouse button is pressed' - 161 # if not self._showverts: return - 162 if event.inaxes==None: return - 163 if event.button != 1: return - 164 self._ind = self._get_ind_under_point(event) -
165 -
166 - def _button_release_callback(self, event): -
167 'whenever a mouse button is released' - 168 # if not self._showverts: return - 169 if event.button != 1: return - 170 self._ind = None -
171 -
172 - def _key_press_callback(self, event): -
173 'whenever a key is pressed' - 174 if not event.inaxes: return - 175 if event.key=='shift': return - 176 - 177 if event.key=='t': - 178 self._showbetas = not self._showbetas - 179 self._line.set_visible(self._showbetas) - 180 self._pline.set_visible(self._showbetas) - 181 self._mline.set_visible(self._showbetas) - 182 self._zline.set_visible(self._showbetas) - 183 self._sline.set_visible(self._showbetas) - 184 elif event.key=='d': - 185 ind = self._get_ind_under_point(event) - 186 if ind is not None: - 187 self._poly.xy = [tup for i,tup in enumerate(self._poly.xy) \ - 188 if i!=ind] - 189 self._line.set_data(zip(*self._poly.xy)) - 190 self.beta = [beta for i,beta in enumerate(self.beta) \ - 191 if i!=ind] - 192 elif event.key=='p': - 193 ind = self._get_ind_under_point(event) - 194 if ind is not None: - 195 self.beta[ind] = 1.0 - 196 elif event.key=='m': - 197 ind = self._get_ind_under_point(event) - 198 if ind is not None: - 199 self.beta[ind] = -1.0 - 200 elif event.key=='z': - 201 ind = self._get_ind_under_point(event) - 202 if ind is not None: - 203 self.beta[ind] = 0.0 - 204 elif event.key=='s': - 205 ind = self._get_ind_under_point(event) - 206 if ind is not None: - 207 self.gridgen_options['ul_idx'] = ind - 208 elif event.key=='i': - 209 xys = self._poly.get_transform().transform(self._poly.xy) - 210 p = event.x, event.y # display coords - 211 for i in range(len(xys)-1): - 212 s0 = xys[i] - 213 s1 = xys[i+1] - 214 d = dist_point_to_segment(p, s0, s1) - 215 if d<=self._epsilon: - 216 self._poly.xy = np.array( - 217 list(self._poly.xy[:i+1]) + - 218 [(event.xdata, event.ydata)] + - 219 list(self._poly.xy[i+1:])) - 220 self._line.set_data(zip(*self._poly.xy)) - 221 self.beta.insert(i+1, 0) - 222 break - 223 s0 = xys[-1] - 224 s1 = xys[0] - 225 d = dist_point_to_segment(p, s0, s1) - 226 if d<=self._epsilon: - 227 self._poly.xy = np.array( - 228 list(self._poly.xy) + - 229 [(event.xdata, event.ydata)]) - 230 self._line.set_data(zip(*self._poly.xy)) - 231 self.beta.append(0) - 232 elif event.key=='G' or event.key == '1': - 233 options = deepcopy(self.gridgen_options) - 234 shp = options.pop('shp') - 235 if self.proj is None: - 236 x = self.x - 237 y = self.y - 238 self.grd = Gridgen(x, y, self.beta, shp, - 239 proj=self.proj, **options) - 240 else: - 241 lon, lat = self.proj(self.x, self.y, inverse=True) - 242 self.grd = Gridgen(lon, lat, self.beta, shp, - 243 proj=self.proj, **options) - 244 self.remove_grid() - 245 self._showgrid = True - 246 gridlineprops = {'linestyle':'-', 'color':'k', 'lw':0.2} - 247 self._gridlines = [] - 248 for line in self._ax._get_lines(*(self.grd.x, self.grd.y), - 249 **gridlineprops): - 250 self._ax.add_line(line) - 251 self._gridlines.append(line) - 252 for line in self._ax._get_lines(*(self.grd.x.T, self.grd.y.T), - 253 **gridlineprops): - 254 self._ax.add_line(line) - 255 self._gridlines.append(line) - 256 elif event.key=='T' or event.key == '2': - 257 self._showgrid = not self._showgrid - 258 if hasattr(self, '_gridlines'): - 259 for line in self._gridlines: - 260 line.set_visible(self._showgrid) - 261 - 262 self._update_beta_lines() - 263 self._draw_callback(event) - 264 self._canvas.draw() -
265 -
266 - def _motion_notify_callback(self, event): -
267 'on mouse movement' - 268 # if not self._showverts: return - 269 if self._ind is None: return - 270 if event.inaxes is None: return - 271 if event.button != 1: return - 272 x,y = event.xdata, event.ydata - 273 self._poly.xy[self._ind] = x, y - 274 if self._ind == 0: - 275 self._poly.xy[-1] = x, y - 276 - 277 x, y = zip(*self._poly.xy) - 278 self._line.set_data(x[:-1], y[:-1]) - 279 self._update_beta_lines() - 280 - 281 self._canvas.restore_region(self._background) - 282 self._ax.draw_artist(self._poly) - 283 self._ax.draw_artist(self._pline) - 284 self._ax.draw_artist(self._mline) - 285 self._ax.draw_artist(self._zline) - 286 self._ax.draw_artist(self._sline) - 287 self._ax.draw_artist(self._line) - 288 self._canvas.blit(self._ax.bbox) -
289 - 290 -
291 - def __init__(self, x, y=None, beta=None, ax=None, proj=None, - 292 **gridgen_options): -
293 - 294 if isinstance(x, str): - 295 bry_dict = np.load(x) - 296 x = bry_dict['x'] - 297 y = bry_dict['y'] - 298 beta = bry_dict['beta'] - 299 - 300 assert len(x) >= 4, 'Boundary must have at least four points.' - 301 - 302 if ax is None: - 303 ax = plt.gca() - 304 - 305 self._ax = ax - 306 - 307 self.proj = proj - 308 - 309 # Set default gridgen option, and copy over specified options. - 310 self.gridgen_options = {'ul_idx': 0, 'shp': (32, 32)} - 311 - 312 for key, value in gridgen_options.iteritems(): - 313 self.gridgen_options[key] = gridgen_options[key] - 314 - 315 x = list(x); y = list(y) - 316 assert len(x)==len(y), 'arrays must be equal length' - 317 - 318 if beta is None: - 319 self.beta = [0 for xi in x] - 320 else: - 321 assert len(x)==len(beta), 'beta must have same length as x and y' - 322 self.beta = list(beta) - 323 - 324 self._line = Line2D(x, y, animated=True, - 325 ls='-', color='k', alpha=0.5, lw=1) - 326 self._ax.add_line(self._line) - 327 - 328 self._canvas = self._line.figure.canvas - 329 - 330 self._poly = Polygon(self.verts, alpha=0.1, fc='k', animated=True) - 331 self._ax.add_patch(self._poly) - 332 - 333 # Link in the lines that will show the beta values - 334 # pline for positive turns, mline for negative (minus) turns - 335 # otherwize zline (zero) for straight sections - 336 self._pline = Line2D([], [], marker='^', ms=12, mfc='g',\ - 337 animated=True, lw=0) - 338 self._mline = Line2D([], [], marker='v', ms=12, mfc='r',\ - 339 animated=True, lw=0) - 340 self._zline = Line2D([], [], marker='o', mfc='k', animated=True, lw=0) - 341 self._sline = Line2D([], [], marker='s', mfc='k', animated=True, lw=0) - 342 - 343 self._update_beta_lines() - 344 self._ax.add_artist(self._pline) - 345 self._ax.add_artist(self._mline) - 346 self._ax.add_artist(self._zline) - 347 self._ax.add_artist(self._sline) - 348 - 349 # get the canvas and connect the callback events - 350 cid = self._poly.add_callback(self._poly_changed) - 351 self._ind = None # the active vert - 352 - 353 self._canvas.mpl_connect('draw_event', self._draw_callback) - 354 self._canvas.mpl_connect('button_press_event',\ - 355 self._button_press_callback) - 356 self._canvas.mpl_connect('key_press_event', self._key_press_callback) - 357 self._canvas.mpl_connect('button_release_event',\ - 358 self._button_release_callback) - 359 self._canvas.mpl_connect('motion_notify_event',\ - 360 self._motion_notify_callback) -
361 -
362 - def save_bry(self, bry_file='bry.pickle'): -
363 f = open(bry_file, 'wb') - 364 bry_dict = {'x': self.x, 'y': self.y, 'beta': self.beta} - 365 cPickle.dump(bry_dict, f, protocol=-1) - 366 f.close() -
367 -
368 - def load_bry(self, bry_file='bry.pickle'): -
369 bry_dict = np.load(bry_file) - 370 x = bry_dict['x'] - 371 y = bry_dict['y'] - 372 self._line.set_data(x, y) - 373 self.beta = bry_dict['beta'] - 374 if hasattr(self, '_poly'): - 375 self._poly.xy = zip(x, y) - 376 self._update_beta_lines() - 377 self._draw_callback(None) - 378 self._canvas.draw() -
379 -
380 - def save_grid(self, grid_file='grid.pickle'): -
381 f = open(grid_file, 'wb') - 382 cPickle.dump(self.grd, f, protocol=-1) - 383 f.close() -
384 -
385 - def _get_verts(self): return zip(self.x, self.y) -
386 verts = property(_get_verts) -
387 - def get_xdata(self): return self._line.get_xdata() -
388 x = property(get_xdata) -
389 - def get_ydata(self): return self._line.get_ydata() -
390 y = property(get_ydata) -
391 - 392 - 393 -
394 -def _approximate_erf(x): -
395 ''' - 396 Return approximate solution to error function - 397 see http://en.wikipedia.org/wiki/Error_function - 398 ''' - 399 a = -(8*(np.pi-3.0)/(3.0*np.pi*(np.pi-4.0))) - 400 return np.sign(x) * \ - 401 np.sqrt(1.0 - np.exp( -x**2*(4.0/np.pi+a*x*x)/(1.0+a*x*x) )) -
402 - 403 -
404 -class _Focus_x(object): -
405 """ - 406 Return a transformed, uniform grid, focused in the x-direction - 407 - 408 This class may be called with a uniform grid, with limits from [0, 1], to - 409 create a focused grid in the x-directions centered about xo. The output - 410 grid is also uniform from [0, 1] in both x and y. - 411 - 412 Parameters - 413 ---------- - 414 xo : float - 415 Location about which to focus grid - 416 factor : float - 417 amount to focus grid. Creates cell sizes that are factor smaller in - 418 the focused - 419 region. - 420 Rx : float - 421 Lateral extent of focused region, similar to a lateral spatial scale - 422 for the focusing region. - 423 - 424 Returns - 425 ------- - 426 foc : class - 427 The class may be called with arguments of a grid. The returned - 428 transformed grid (x, y) will be focused as per the parameters above. - 429 """ - 430 -
431 - def __init__(self, xo, factor=2.0, Rx=0.1): -
432 self.xo = xo - 433 self.factor = factor - 434 self.Rx = Rx -
435 -
436 - def __call__(self, x, y): -
437 x = np.asarray(x) - 438 y = np.asarray(y) - 439 assert not np.any(x>1.0) or not np.any(x<0.0) \ - 440 or not np.any(y>1.0) or not np.any(x<0.0), \ - 441 'x and y must both be within the range [0, 1].' - 442 - 443 alpha = 1.0 - 1.0/self.factor - 444 def xf(x): - 445 return x - 0.5*( np.sqrt(np.pi)*self.Rx*alpha - 446 *_approximate_erf((x-self.xo)/self.Rx) ) -
447 - 448 xf0 = xf(0.0); xf1 = xf(1.0) - 449 - 450 return (xf(x)-xf0)/(xf1-xf0), y -
451 -
452 -class _Focus_y(object): -
453 """ - 454 Return a transformed, uniform grid, focused in the y-direction - 455 - 456 This class may be called with a uniform grid, with limits from [0, 1], - 457 to create a focused grid in the y-directions centered about yo. - 458 The output grid is also uniform from [0, 1] in both x and y. - 459 - 460 Parameters - 461 ---------- - 462 yo : float - 463 Location about which to focus grid - 464 factor : float - 465 amount to focus grid. Creates cell sizes that are factor - 466 smaller in the focused region. - 467 Ry : float - 468 Lateral extent of focused region, similar to a lateral - 469 spatial scale for the focusing region. - 470 - 471 Returns - 472 ------- - 473 foc : class - 474 The class may be called with arguments of a grid. The returned - 475 transformed grid (x, y) will be focused as per the parameters above. - 476 """ - 477 -
478 - def __init__(self, yo, factor=2.0, Ry=0.1): -
479 self.yo = yo - 480 self.factor = factor - 481 self.Ry = Ry -
482 -
483 - def __call__(self, x, y): -
484 x = np.asarray(x) - 485 y = np.asarray(y) - 486 assert not np.any(x>1.0) or not np.any(x<0.0) \ - 487 or not np.any(y>1.0) or not np.any(x<0.0), \ - 488 'x and y must both be within the range [0, 1].' - 489 - 490 alpha = 1.0 - 1.0/self.factor - 491 - 492 def yf(y): - 493 return y - 0.5*( np.sqrt(np.pi)*self.Ry*alpha - 494 *_approximate_erf((y-self.yo)/self.Ry) ) -
495 - 496 yf0 = yf(0.0); yf1 = yf(1.0) - 497 - 498 return x, (yf(y)-yf0)/(yf1-yf0) -
499 -
500 -class Focus(object): -
501 """ - 502 Return a container for a sequence of Focus objects - 503 - 504 foc = Focus() - 505 - 506 The sequence is populated by using the 'add_focus_x' and 'add_focus_y' - 507 methods. These methods define a point ('xo' or 'yo'), around witch to - 508 focus, a focusing factor of 'focus', and x and y extent of focusing given - 509 by Rx or Ry. The region of focusing will be approximately Gausian, and the - 510 resolution will be increased by approximately the value of factor. - 511 - 512 Methods - 513 ------- - 514 foc.add_focus_x(xo, factor=2.0, Rx=0.1) - 515 foc.add_focus_y(yo, factor=2.0, Ry=0.1) - 516 - 517 Calls to the object return transformed coordinates: - 518 xf, yf = foc(x, y) - 519 where x and y must be within [0, 1], and are typically a uniform, - 520 normalized grid. The focused grid will be the result of applying each of - 521 the focus elements in the sequence they are added to the series. - 522 - 523 - 524 EXAMPLES - 525 -------- - 526 - 527 >>> foc = pyroms.grid.Focus() - 528 >>> foc.add_focus_x(0.2, factor=3.0, Rx=0.2) - 529 >>> foc.add_focus_y(0.6, factor=5.0, Ry=0.35) - 530 - 531 >>> x, y = np.mgrid[0:1:3j,0:1:3j] - 532 >>> xf, yf = foc(x, y) - 533 - 534 >>> print xf - 535 [[ 0. 0. 0. ] - 536 [ 0.36594617 0.36594617 0.36594617] - 537 [ 1. 1. 1. ]] - 538 >>> print yf - 539 [[ 0. 0.62479833 1. ] - 540 [ 0. 0.62479833 1. ] - 541 [ 0. 0.62479833 1. ]] - 542 """ -
543 - def __init__(self): -
544 self._focuspoints = [] -
545 -
546 - def add_focus_x(self, xo, factor=2.0, Rx=0.1): -
547 """docstring for add_point""" - 548 self._focuspoints.append(_Focus_x(xo, factor, Rx)) -
549 -
550 - def add_focus_y(self, yo, factor=2.0, Ry=0.1): -
551 """docstring for add_point""" - 552 self._focuspoints.append(_Focus_y(yo, factor, Ry)) -
553 -
554 - def __call__(self, x, y): -
555 """docstring for __call__""" - 556 for focuspoint in self._focuspoints: - 557 x, y = focuspoint(x, y) - 558 return x, y -
559 - 560 - 561 -
562 -class CGrid(object): -
563 """ - 564 Curvilinear Arakawa C-Grid - 565 - 566 The basis for the CGrid class are two arrays defining the verticies of the - 567 grid in Cartesian (for geographic coordinates, see CGrid_geo). An optional - 568 mask may be defined on the cell centers. Other Arakawa C-grid properties, - 569 such as the locations of the cell centers (rho-points), cell edges (u and - 570 v velocity points), cell widths (dx and dy) and other metrics (angle, - 571 dmde, and dndx) are all calculated internally from the vertex points. - 572 - 573 Input vertex arrays may be either type np.array or np.ma.MaskedArray. If - 574 masked arrays are used, the mask will be a combination of the specified - 575 mask (if given) and the masked locations. - 576 - 577 EXAMPLES: - 578 -------- - 579 - 580 >>> x, y = mgrid[0.0:7.0, 0.0:8.0] - 581 >>> x = np.ma.masked_where( (x<3) & (y<3), x) - 582 >>> y = np.ma.MaskedArray(y, x.mask) - 583 >>> grd = pyroms.grid.CGrid(x, y) - 584 >>> print grd.x_rho - 585 [[-- -- -- 0.5 0.5 0.5 0.5] - 586 [-- -- -- 1.5 1.5 1.5 1.5] - 587 [-- -- -- 2.5 2.5 2.5 2.5] - 588 [3.5 3.5 3.5 3.5 3.5 3.5 3.5] - 589 [4.5 4.5 4.5 4.5 4.5 4.5 4.5] - 590 [5.5 5.5 5.5 5.5 5.5 5.5 5.5]] - 591 >>> print grd.mask - 592 [[ 0. 0. 0. 1. 1. 1. 1.] - 593 [ 0. 0. 0. 1. 1. 1. 1.] - 594 [ 0. 0. 0. 1. 1. 1. 1.] - 595 [ 1. 1. 1. 1. 1. 1. 1.] - 596 [ 1. 1. 1. 1. 1. 1. 1.] - 597 [ 1. 1. 1. 1. 1. 1. 1.]] - 598 """ - 599 -
600 - def __init__(self, x_vert, y_vert, x_rho=None, y_rho=None, x_u=None, y_u=None, x_v=None, y_v=None, \ - 601 x_psi=None, y_psi=None, dx=None, dy=None, dndx=None, dmde=None, angle_rho=None): -
602 - 603 assert np.ndim(x_vert)==2 and np.ndim(y_vert)==2 and np.shape(x_vert)==np.shape(y_vert), \ - 604 'x and y must be 2D arrays of the same size.' - 605 - 606 if np.any(np.isnan(x_vert)) or np.any(np.isnan(y_vert)): - 607 x_vert = np.ma.masked_where( (isnan(x_vert)) | (isnan(y_vert)) , x_vert) - 608 y_vert = np.ma.masked_where( (isnan(x_vert)) | (isnan(y_vert)) , y_vert) - 609 - 610 self.x_vert = x_vert - 611 self.y_vert = y_vert - 612 - 613 self.f = None - 614 self.spherical = 'F' - 615 - 616 mask_shape = tuple([n-1 for n in self.x_vert.shape]) - 617 self.mask_rho = np.ones(mask_shape, dtype='d') - 618 - 619 # If maskedarray is given for verticies, modify the mask such that - 620 # non-existant grid points are masked. A cell requires all four - 621 # verticies to be defined as a water point. - 622 if isinstance(self.x_vert, np.ma.MaskedArray): - 623 mask = (self.x_vert.mask[:-1,:-1] | self.x_vert.mask[1:,:-1] | \ - 624 self.x_vert.mask[:-1,1:] | self.x_vert.mask[1:,1:]) - 625 self.mask_rho = np.asarray(~(~np.bool_(self.mask_rho) | mask), dtype='d') - 626 - 627 if isinstance(self.y_vert, np.ma.MaskedArray): - 628 mask = (self.y_vert.mask[:-1,:-1] | self.y_vert.mask[1:,:-1] | \ - 629 self.y_vert.mask[:-1,1:] | self.y_vert.mask[1:,1:]) - 630 self.mask_rho = np.asarray(~(~np.bool_(self.mask_rho) | mask), dtype='d') - 631 - 632 if x_rho is None or y_rho is None or x_u is None or y_u is None or \ - 633 x_v is None or y_v is None or x_psi is None or y_psi is None: - 634 self._calculate_subgrids() - 635 else: - 636 self.x_rho = x_rho - 637 self.y_rho = y_rho - 638 self.x_u = x_u - 639 self.y_u = y_u - 640 self.x_v = x_v - 641 self.y_v = y_v - 642 self.x_psi = x_psi - 643 self.y_psi = y_psi - 644 - 645 if dx is None or dy is None: - 646 self._calculate_metrics() - 647 else: - 648 self.dx = dx - 649 self.dy = dy - 650 - 651 self.xl = np.maximum(self.dx[0,:].sum(), self.dx[-1,:].sum()) - 652 self.el = np.maximum(self.dy[:,0].sum(), self.dy[:,-1].sum()) - 653 - 654 if dndx is None or dmde is None: - 655 self._calculate_derivative_metrics() - 656 else: - 657 self.dndx = dndx - 658 self.dmde = dmde - 659 - 660 if angle_rho is None: - 661 self._calculate_angle_rho() - 662 else: - 663 self.angle_rho = angle_rho - 664 - 665 self._calculate_angle() -
666 - 667 -
668 - def _calculate_subgrids(self): -
669 self.x_rho = 0.25*(self.x_vert[1:,1:]+self.x_vert[1:,:-1]+ \ - 670 self.x_vert[:-1,1:]+self.x_vert[:-1,:-1]) - 671 self.y_rho = 0.25*(self.y_vert[1:,1:]+self.y_vert[1:,:-1]+ \ - 672 self.y_vert[:-1,1:]+self.y_vert[:-1,:-1]) - 673 self.x_u = 0.5*(self.x_vert[:-1,1:-1] + self.x_vert[1:,1:-1]) - 674 self.y_u = 0.5*(self.y_vert[:-1,1:-1] + self.y_vert[1:,1:-1]) - 675 self.x_v = 0.5*(self.x_vert[1:-1,:-1] + self.x_vert[1:-1,1:]) - 676 self.y_v = 0.5*(self.y_vert[1:-1,:-1] + self.y_vert[1:-1,1:]) - 677 self.x_psi = self.x_vert[1:-1,1:-1] - 678 self.y_psi = self.y_vert[1:-1,1:-1] -
679 -
680 - def _calculate_metrics(self): -
681 'Calculates pm, pn, dndx, dmde from x_vert and y_vert' - 682 x_temp = 0.5*(self.x_vert[1:,:]+self.x_vert[:-1,:]) - 683 y_temp = 0.5*(self.y_vert[1:,:]+self.y_vert[:-1,:]) - 684 self.dx = np.sqrt(np.diff(x_temp, axis=1)**2 + np.diff(y_temp, axis=1)**2) - 685 x_temp = 0.5*(self.x_vert[:,1:]+self.x_vert[:,:-1]) - 686 y_temp = 0.5*(self.y_vert[:,1:]+self.y_vert[:,:-1]) - 687 self.dy = np.sqrt(np.diff(x_temp, axis=0)**2 + np.diff(y_temp, axis=0)**2) -
688 - 690 if isinstance(self.dy, np.ma.MaskedArray): - 691 self.dndx = np.ma.zeros(self.x_rho.shape, dtype='d') - 692 else: - 693 self.dndx = np.zeros(self.x_rho.shape, dtype='d') - 694 - 695 if isinstance(self.dx, np.ma.MaskedArray): - 696 self.dmde = np.ma.zeros(self.x_rho.shape, dtype='d') - 697 else: - 698 self.dmde = np.zeros(self.x_rho.shape, dtype='d') - 699 - 700 self.dndx[1:-1,1:-1] = 0.5*(self.dy[1:-1,2:] - self.dy[1:-1,:-2]) - 701 self.dmde[1:-1,1:-1] = 0.5*(self.dx[2:,1:-1] - self.dx[:-2,1:-1]) -
702 -
703 - def _calculate_angle(self): -
704 if isinstance(self.x_vert, np.ma.MaskedArray) or \ - 705 isinstance(self.y_vert, np.ma.MaskedArray): - 706 self.angle = np.ma.zeros(self.x_vert.shape, dtype='d') - 707 else: - 708 self.angle = np.zeros(self.x_vert.shape, dtype='d') - 709 - 710 angle_ud = np.arctan2(np.diff(self.y_vert, axis=1), np.diff(self.x_vert, axis=1)) - 711 angle_lr = np.arctan2(np.diff(self.y_vert, axis=0), np.diff(self.x_vert, axis=0)) - np.pi/2.0 - 712 # domain center - 713 self.angle[1:-1,1:-1] = 0.25*(angle_ud[1:-1,1:]+angle_ud[1:-1,:-1]\ - 714 +angle_lr[1:,1:-1]+angle_lr[:-1,1:-1]) - 715 # edges - 716 self.angle[0,1:-1] = (1.0/3.0)*(angle_lr[0,1:-1]+angle_ud[0,1:]+angle_ud[0,:-1]) - 717 self.angle[-1,1:-1] = (1.0/3.0)*(angle_lr[-1,1:-1]+angle_ud[-1,1:]+angle_ud[-1,:-1]) - 718 self.angle[1:-1,0] = (1.0/3.0)*(angle_ud[1:-1,0]+angle_lr[1:,0]+angle_lr[:-1,0]) - 719 self.angle[1:-1,-1] = (1.0/3.0)*(angle_ud[1:-1,-1]+angle_lr[1:,-1]+angle_lr[:-1,-1]) - 720 #conrers - 721 self.angle[0,0] = 0.5*(angle_lr[0,0]+angle_ud[0,0]) - 722 self.angle[0,-1] = 0.5*(angle_lr[0,-1]+angle_ud[0,-1]) - 723 self.angle[-1,0] = 0.5*(angle_lr[-1,0]+angle_ud[-1,0]) - 724 self.angle[-1,-1] = 0.5*(angle_lr[-1,-1]+angle_ud[-1,-1]) - 725 -
726 - def _calculate_angle_rho(self): -
727 self.angle_rho = np.arctan2(np.diff(0.5*(self.y_vert[1:,:]+self.y_vert[:-1,:])), \ - 728 np.diff(0.5*(self.x_vert[1:,:]+self.x_vert[:-1,:]))) - 729 -
730 - def calculate_orthogonality(self): -
731 ''' - 732 Calculate orthogonality error in radians - 733 ''' - 734 z = self.x_vert + 1j*self.y_vert - 735 du = np.diff(z, axis=1); du = (du/abs(du))[:-1,:] - 736 dv = np.diff(z, axis=0); dv = (dv/abs(dv))[:,:-1] - 737 ang1 = np.arccos(du.real*dv.real + du.imag*dv.imag) - 738 du = np.diff(z, axis=1); du = (du/abs(du))[1:,:] - 739 dv = np.diff(z, axis=0); dv = (dv/abs(dv))[:,:-1] - 740 ang2 = np.arccos(du.real*dv.real + du.imag*dv.imag) - 741 du = np.diff(z, axis=1); du = (du/abs(du))[:-1,:] - 742 dv = np.diff(z, axis=0); dv = (dv/abs(dv))[:,1:] - 743 ang3 = np.arccos(du.real*dv.real + du.imag*dv.imag) - 744 du = np.diff(z, axis=1); du = (du/abs(du))[1:,:] - 745 dv = np.diff(z, axis=0); dv = (dv/abs(dv))[:,1:] - 746 ang4 = np.arccos(du.real*dv.real + du.imag*dv.imag) - 747 ang = np.mean([abs(ang1), abs(ang2), abs(ang3), abs(ang4)], axis=0) - 748 ang = (ang-np.pi/2.0) - 749 return ang -
750 -
751 - def mask_polygon(self, polyverts, mask_value=0.0): -
752 """ - 753 Mask Cartesian points contained within the polygon defined by polyverts - 754 - 755 A cell is masked if the cell center (x_rho, y_rho) is within the - 756 polygon. Other sub-masks (mask_u, mask_v, and mask_psi) are updated - 757 automatically. - 758 - 759 mask_value [=0.0] may be specified to alter the value of the mask set - 760 within the polygon. E.g., mask_value=1 for water points. - 761 """ - 762 - 763 polyverts = np.asarray(polyverts) - 764 assert polyverts.ndim == 2, \ - 765 'polyverts must be a 2D array, or a similar sequence' - 766 assert polyverts.shape[1] == 2, \ - 767 'polyverts must be two columns of points' - 768 assert polyverts.shape[0] > 2, \ - 769 'polyverts must contain at least 3 points' - 770 - 771 mask = self.mask_rho - 772 inside = points_inside_poly( - 773 np.vstack( (self.x_rho.flatten(), self.y_rho.flatten()) ).T, - 774 polyverts) - 775 if np.any(inside): - 776 self.mask_rho.flat[inside] = mask_value -
777 -
778 - def _get_mask_u(self): -
779 return self.mask_rho[:,1:]*self.mask_rho[:,:-1] -
780 -
781 - def _get_mask_v(self): -
782 return self.mask_rho[1:,:]*self.mask_rho[:-1,:] -
783 -
784 - def _get_mask_psi(self): -
785 return self.mask_rho[1:,1:]*self.mask_rho[:-1,1:]* \ - 786 self.mask_rho[1:,:-1]*self.mask_rho[:-1,:-1] -
787 -
788 - def _set_mask_rho(self, mask_rho): -
789 self.mask_rho = mask_rho -
790 - 791 x = property(lambda self: self.x_vert, None, None, 'Return x_vert') - 792 y = property(lambda self: self.y_vert, None, None, 'Return x_vert') - 793 mask = property(lambda self: self.mask_rho, _set_mask_rho, None, 'Return mask_rho') - 794 mask_u = property(_get_mask_u, None, None, 'Return mask_u') - 795 mask_v = property(_get_mask_v, None, None, 'Return mask_v') - 796 mask_psi = property(_get_mask_psi, None, None, 'Return mask_psi') - 797 - 798 -
799 -class CGrid_geo(CGrid): -
800 """ - 801 Curvilinear Arakawa C-grid defined in geographic coordinates - 802 - 803 For a geographic grid, a projection may be specified, or The default - 804 projection for will be defined by the matplotlib.toolkits.Basemap - 805 projection: - 806 - 807 proj = Basemap(projection='merc', resolution=None, lat_ts=0.0) - 808 - 809 For a geographic grid, the cell widths are determined by the great - 810 circle distances. Angles, however, are defined using the projected - 811 coordinates, so a projection that conserves angles must be used. This - 812 means typically either Mercator (projection='merc') or Lambert - 813 Conformal Conic (projection='lcc'). - 814 """ -
815 - def _calculate_metrics(self): -
816 # calculate metrics based on x and y grid - 817 super(CGrid_geo, self)._calculate_metrics() - 818 - 819 # optionally calculate dx and dy based on great circle distances - 820 # for more accurate cell sizes. - 821 if self.use_gcdist: - 822 geod = pyproj.Geod(ellps=self.ellipse) - 823 az_forward, az_back, dx = geod.inv(self.lon[:,1:], self.lat[:,1:], \ - 824 self.lon[:,:-1], self.lat[:,:-1]) - 825 self.dx = 0.5*(dx[1:,:]+dx[:-1,:]) - 826 self.pm = 1.0/self.dx - 827 az_forward, az_back, dy = geod.inv(self.lon[1:,:], self.lat[1:,:], \ - 828 self.lon[:-1,:], self.lat[:-1,:]) - 829 self.dy = 0.5*(dy[:,1:]+dy[:,:-1]) - 830 self.pn = 1.0/self.dy -
831 - 832 -
834 if isinstance(self.dy, np.ma.MaskedArray): - 835 self.dndx = np.ma.zeros(self.dy.shape, dtype='d') - 836 else: - 837 self.dndx = np.zeros(self.dy.shape, dtype='d') - 838 - 839 if isinstance(self.dx, np.ma.MaskedArray): - 840 self.dmde = np.ma.zeros(self.dx.shape, dtype='d') - 841 else: - 842 self.dmde = np.zeros(self.dx.shape, dtype='d') - 843 - 844 self.dndx[1:-1,1:-1] = 0.5*(self.dy[1:-1,2:] - self.dy[1:-1,:-2]) - 845 self.dmde[1:-1,1:-1] = 0.5*(self.dx[2:,1:-1] - self.dx[:-2,1:-1]) -
846 -
847 - def _calculate_angle_rho(self): -
848 if isinstance(self.lon, np.ma.MaskedArray) or \ - 849 isinstance(self.lat, np.ma.MaskedArray): - 850 self.angle_rho = np.ma.zeros(self.lon.shape, dtype='d') - 851 else: - 852 self.angle_rho = np.zeros(self.lon.shape, dtype='d') - 853 - 854 # calculate metrics based on x and y grid - 855 super(CGrid_geo, self)._calculate_angle_rho() - 856 - 857 # optionally calculate dx and dy based on great circle distances - 858 # for more accurate cell sizes. - 859 if self.use_gcdist: - 860 geod = pyproj.Geod(ellps=self.ellipse) - 861 az_forward, az_back, dx = geod.inv(self.lon[:,:-1], self.lat[:,:-1], \ - 862 self.lon[:,1:], self.lat[:,1:]) - 863 - 864 angle = 0.5 * (az_forward[1:,:] + az_forward[:-1,:]) - 865 self.angle_rho = (90 - angle) * np.pi/180. -
866 - 867 -
868 - def __init__(self, lon_vert, lat_vert, proj, use_gcdist=True, ellipse='WGS84', \ - 869 lon_rho=None, lat_rho=None, lon_u=None, lat_u=None, \ - 870 lon_v=None, lat_v=None, lon_psi=None, lat_psi=None, dx=None, dy=None, \ - 871 dndx=None, dmde=None, angle_rho=None): -
872 - 873 x, y = proj(lon_vert, lat_vert) - 874 self.lon_vert = lon_vert - 875 self.lat_vert = lat_vert - 876 self.proj = proj - 877 - 878 self.use_gcdist = use_gcdist - 879 self.ellipse = ellipse - 880 - 881 if lon_rho is None or lat_rho is None or lon_u is None or lat_u is None or \ - 882 lon_v is None or lat_v is None or lon_psi is None or lat_psi is None: - 883 - 884 super(CGrid_geo, self).__init__(x, y) - 885 - 886 self.lon_rho, self.lat_rho = self.proj(self.x_rho, self.y_rho, - 887 inverse=True) - 888 self.lon_u, self.lat_u = self.proj(self.x_u, self.y_u, inverse=True) - 889 self.lon_v, self.lat_v = self.proj(self.x_v, self.y_v, inverse=True) - 890 self.lon_psi, self.lat_psi = self.proj(self.x_psi, self.y_psi, - 891 inverse=True) - 892 else: - 893 self.lon_rho = lon_rho - 894 self.lat_rho = lat_rho - 895 self.lon_u = lon_u - 896 self.lat_u = lat_u - 897 self.lon_v = lon_v - 898 self.lat_v = lat_v - 899 self.lon_psi = lon_psi - 900 self.lat_psi = lat_psi - 901 #calculate cartesian position - 902 self.x_vert, self.y_vert = proj(lon_vert, lat_vert) - 903 self.x_rho, self.y_rho = proj(lon_rho, lat_rho) - 904 self.x_u, self.y_u = proj(lon_u, lat_u) - 905 self.x_v, self.y_v = proj(lon_v, lat_v) - 906 self.x_psi, self.y_psi = proj(lon_psi, lat_psi) - 907 - 908 if dx is None or dy is None: - 909 self._calculate_metrics() - 910 else: - 911 self.dx = dx - 912 self.dy = dy - 913 - 914 self.xl = np.maximum(self.dx[0,:].sum(), self.dx[-1,:].sum()) - 915 self.el = np.maximum(self.dy[:,0].sum(), self.dy[:,-1].sum()) - 916 - 917 if dndx is None or dmde is None: - 918 self._calculate_derivative_metrics() - 919 else: - 920 self.dndx = dndx - 921 self.dmde = dmde - 922 - 923 if angle_rho is None: - 924 self._calculate_angle_rho() - 925 else: - 926 self.angle_rho = angle_rho - 927 - 928 self.f = 2.0 * 7.29e-5 * np.sin(self.lat_rho * np.pi / 180.0) - 929 self.spherical = 'T' -
930 - 931 -
932 - def mask_polygon_geo(lonlat_verts, mask_value=0.0): -
933 lon, lat = zip(*lonlat_verts) - 934 x, y = proj(lon, lat, inverse=True) - 935 self.mask_polygon(zip(x, y), mask_value) -
936 - 937 lon = property(lambda self: self.lon_vert, None, None, 'Shorthand for lon_vert') - 938 lat = property(lambda self: self.lat_vert, None, None, 'Shorthand for lat_vert') -
939 - 940 - 941 -
942 -class Gridgen(CGrid): -
943 """ - 944 docstring for Gridgen - 945 """ - 946 - 947 -
948 - def generate_grid(self): -
949 - 950 if self._gn is not None: - 951 self._libgridgen.gridnodes_destroy(self._gn) - 952 - 953 nbry = len(self.xbry) - 954 - 955 nsigmas = ctypes.c_int(0) - 956 sigmas = ctypes.c_void_p(0) - 957 nrect = ctypes.c_int(0) - 958 xrect = ctypes.c_void_p(0) - 959 yrect = ctypes.c_void_p(0) - 960 - 961 if self.focus is None: - 962 ngrid = ctypes.c_int(0) - 963 xgrid = ctypes.POINTER(ctypes.c_double)() - 964 ygrid = ctypes.POINTER(ctypes.c_double)() - 965 else: - 966 y, x = np.mgrid[0:1:self.ny*1j, 0:1:self.nx*1j] - 967 xgrid, ygrid = self.focus(x, y) - 968 ngrid = ctypes.c_int(xgrid.size) - 969 xgrid = (ctypes.c_double * xgrid.size)(*xgrid.flatten()) - 970 ygrid = (ctypes.c_double * ygrid.size)(*ygrid.flatten()) - 971 - 972 self._gn = self._libgridgen.gridgen_generategrid2( - 973 ctypes.c_int(nbry), - 974 (ctypes.c_double * nbry)(*self.xbry), - 975 (ctypes.c_double * nbry)(*self.ybry), - 976 (ctypes.c_double * nbry)(*self.beta), - 977 ctypes.c_int(self.ul_idx), - 978 ctypes.c_int(self.nx), - 979 ctypes.c_int(self.ny), - 980 ngrid, - 981 xgrid, - 982 ygrid, - 983 ctypes.c_int(self.nnodes), - 984 ctypes.c_int(self.newton), - 985 ctypes.c_double(self.precision), - 986 ctypes.c_int(self.checksimplepoly), - 987 ctypes.c_int(self.thin), - 988 ctypes.c_int(self.nppe), - 989 ctypes.c_int(self.verbose), - 990 ctypes.byref(nsigmas), - 991 ctypes.byref(sigmas), - 992 ctypes.byref(nrect), - 993 ctypes.byref(xrect), - 994 ctypes.byref(yrect) ) - 995 - 996 x = self._libgridgen.gridnodes_getx(self._gn) - 997 x = np.asarray([x[0][i] for i in range(self.ny*self.nx)]) - 998 # x = np.asarray([x[j][i] for j in range(self.ny) for i in range(self.nx)]) - 999 x.shape = (self.ny, self.nx) -1000 -1001 y = self._libgridgen.gridnodes_gety(self._gn) -1002 y = np.asarray([y[0][i] for i in range(self.ny*self.nx)]) -1003 # y = np.asarray([y[j][i] for j in range(self.ny) for i in range(self.nx)]) -1004 y.shape = (self.ny, self.nx) -1005 -1006 if np.any(np.isnan(x)) or np.any(np.isnan(y)): -1007 x = np.ma.masked_where(np.isnan(x), x) -1008 y = np.ma.masked_where(np.isnan(y), y) -1009 -1010 # if self.proj is not None: -1011 # lon, lat = self.proj(x, y, inverse=True) -1012 # super(Gridgen, self).__init__(lon, lat, proj=self.proj) -1013 # else: -1014 super(Gridgen, self).__init__(x, y) -
1015 -1016 -1017 -
1018 - def __init__(self, xbry, ybry, beta, shape, ul_idx=0, \ -1019 focus=None, proj=None, \ -1020 nnodes=14, precision=1.0e-12, nppe=3, \ -1021 newton=True, thin=True, checksimplepoly=True, verbose=False): -
1022 -1023 #self._libgridgen = np.ctypeslib.load_library('libgridgen',__file__) -1024 self._libgridgen = np.ctypeslib.load_library('libgridgen', pyroms.__path__[0]) -1025 -1026 # In MacOSX, use of c_void_p does not return proper structure. -1027 # (An integer address is returned and subsequent use results in a -1028 # Segmentation Fault) -1029 # All structures have to be declared. -1030 -1031 # NODETYPE is enum(erated) -1032 (NT_NONE, NT_DD, NT_CEN, NT_COR) = (0, 1, 2, 3) -1033 -1034 class GRIDSTATS(ctypes.Structure): -1035 _fields_ = [ -1036 ("mdo", ctypes.c_double), -1037 ("imdo", ctypes.c_int), -1038 ("jmdo", ctypes.c_int), -1039 ("ado", ctypes.c_double), -1040 ("mar", ctypes.c_double), -1041 ("aar", ctypes.c_double) -1042 ] -
1043 -1044 class GRIDNODES(ctypes.Structure): -1045 _fields_ = [ -1046 ("nx", ctypes.c_int), -1047 ("ny", ctypes.c_int), -1048 ("gx", ctypes.POINTER(ctypes.POINTER(ctypes.c_double))), -1049 ("gy", ctypes.POINTER(ctypes.POINTER(ctypes.c_double))), -1050 ("type", ctypes.c_int), -1051 ("validated", ctypes.c_int), -1052 ("stats", ctypes.POINTER(GRIDSTATS)), -1053 ("nextpoint", ctypes.c_int) -1054 ] -
1055 -1056 class EXTENT(ctypes.Structure): -1057 _fields_ = [ -1058 ("xmin", ctypes.c_double), -1059 ("xmax", ctypes.c_double), -1060 ("ymin", ctypes.c_double), -1061 ("ymax", ctypes.c_double) -1062 ] -1063 -1064 class POLY(ctypes.Structure): -1065 _fields_ = [ -1066 ("n", ctypes.c_int), -1067 ("nallocated", ctypes.c_int), -1068 ("e", EXTENT), -1069 ("x", ctypes.POINTER(ctypes.c_double)), -1070 ("y", ctypes.POINTER(ctypes.c_double)) -1071 ] -1072 -1073 # SUBGRID -1074 # A forward declaration of this structure is used -1075 # (1) Defined it first with pass -1076 # (2) Define the fields next -1077 -1078 # SUBGRID (1) -1079 class SUBGRID(ctypes.Structure): -1080 pass -1081 -1082 class GRIDMAP(ctypes.Structure): -1083 _fields_ = [ -1084 ("bound", ctypes.POINTER(POLY)), -1085 ("trunk", ctypes.POINTER(SUBGRID)), -1086 ("nleaves", ctypes.c_int), -1087 ("nce1", ctypes.c_int), -1088 ("nce2", ctypes.c_int), -1089 ("gx", ctypes.POINTER(ctypes.POINTER(ctypes.c_double))), -1090 ("gy", ctypes.POINTER(ctypes.POINTER(ctypes.c_double))), -1091 ("sign", ctypes.c_int) -1092 ] -1093 -1094 # SUBGRID (2) -1095 SUBGRID._fields_ = [ -1096 ("gmap", ctypes.POINTER(GRIDMAP)), -1097 ("bound", ctypes.POINTER(POLY)), -1098 ("mini", ctypes.c_int), -1099 ("maxi", ctypes.c_int), -1100 ("minj", ctypes.c_int), -1101 ("maxj", ctypes.c_int), -1102 ("half1", ctypes.POINTER(SUBGRID)), -1103 ("half2", ctypes.POINTER(SUBGRID)) -1104 ] -1105 -1106 #self._libgridgen.gridgen_generategrid2.restype = ctypes.c_void_p -1107 self._libgridgen.gridgen_generategrid2.restype = ctypes.POINTER(GRIDNODES) -1108 self._libgridgen.gridnodes_getx.restype = ctypes.POINTER(ctypes.POINTER(ctypes.c_double)) -1109 self._libgridgen.gridnodes_gety.restype = ctypes.POINTER(ctypes.POINTER(ctypes.c_double)) -1110 self._libgridgen.gridnodes_getnce1.restype = ctypes.c_int -1111 self._libgridgen.gridnodes_getnce2.restype = ctypes.c_int -1112 #self._libgridgen.gridmap_build.restype = ctypes.c_void_p -1113 self._libgridgen.gridmap_build.restype = ctypes.POINTER(GRIDMAP) -1114 -1115 self.xbry = np.asarray(xbry, dtype='d') -1116 self.ybry = np.asarray(ybry, dtype='d') -1117 self.beta = np.asarray(beta, dtype='d') -1118 assert self.beta.sum() == 4.0, 'sum of beta must be 4.0' -1119 self.shape = shape -1120 self.ny = shape[0] -1121 self.nx = shape[1] -1122 self.ul_idx = ul_idx -1123 self.focus = focus -1124 self.nnodes = nnodes -1125 self.precision = precision -1126 self.nppe = nppe -1127 self.newton = newton -1128 self.thin = thin -1129 self.checksimplepoly = checksimplepoly -1130 self.verbose = verbose -1131 -1132 self.proj = proj -1133 if self.proj is not None: -1134 self.xbry, self.ybry = proj(self.xbry, self.ybry) -1135 -1136 self._gn = None -1137 self.generate_grid() -1138 -
1139 - def __del__(self): -
1140 """delete gridnode object upon deletion""" -1141 self._libgridgen.gridnodes_destroy(self._gn) -
1142 -1143 -
1144 -def rho_to_vert(xr, yr, pm, pn, ang): -
1145 Mp, Lp = xr.shape -1146 x = np.empty((Mp+1, Lp+1), dtype='d') -1147 y = np.empty((Mp+1, Lp+1), dtype='d') -1148 x[1:-1, 1:-1] = 0.25*(xr[1:,1:]+xr[1:,:-1]+xr[:-1,1:]+xr[:-1,:-1]) -1149 y[1:-1, 1:-1] = 0.25*(yr[1:,1:]+yr[1:,:-1]+yr[:-1,1:]+yr[:-1,:-1]) -1150 -1151 # east side -1152 theta = 0.5*(ang[:-1,-1]+ang[1:,-1]) -1153 dx = 0.5*(1.0/pm[:-1,-1]+1.0/pm[1:,-1]) -1154 dy = 0.5*(1.0/pn[:-1,-1]+1.0/pn[1:,-1]) -1155 x[1:-1,-1] = x[1:-1,-2] + dx*np.cos(theta) -1156 y[1:-1,-1] = y[1:-1,-2] + dx*np.sin(theta) -1157 -1158 # west side -1159 theta = 0.5*(ang[:-1,0]+ang[1:,0]) -1160 dx = 0.5*(1.0/pm[:-1,0]+1.0/pm[1:,0]) -1161 dy = 0.5*(1.0/pn[:-1,0]+1.0/pn[1:,0]) -1162 x[1:-1,0] = x[1:-1,1] - dx*np.cos(theta) -1163 y[1:-1,0] = y[1:-1,1] - dx*np.sin(theta) -1164 -1165 # north side -1166 theta = 0.5*(ang[-1,:-1]+ang[-1,1:]) -1167 dx = 0.5*(1.0/pm[-1,:-1]+1.0/pm[-1,1:]) -1168 dy = 0.5*(1.0/pn[-1,:-1]+1.0/pn[-1,1:]) -1169 x[-1,1:-1] = x[-2,1:-1] - dy*np.sin(theta) -1170 y[-1,1:-1] = y[-2,1:-1] + dy*np.cos(theta) -1171 -1172 # here we are now going to the south side.. -1173 theta = 0.5*(ang[0,:-1]+ang[0,1:]) -1174 dx = 0.5*(1.0/pm[0,:-1]+1.0/pm[0,1:]) -1175 dy = 0.5*(1.0/pn[0,:-1]+1.0/pn[0,1:]) -1176 x[0,1:-1] = x[1,1:-1] + dy*np.sin(theta) -1177 y[0,1:-1] = y[1,1:-1] - dy*np.cos(theta) -1178 -1179 #Corners -1180 x[0,0] = 4.0*xr[0,0]-x[1,0]-x[0,1]-x[1,1] -1181 x[-1,0] = 4.0*xr[-1,0]-x[-2,0]-x[-1,1]-x[-2,1] -1182 x[0,-1] = 4.0*xr[0,-1]-x[0,-2]-x[1,-1]-x[1,-2] -1183 x[-1,-1] = 4.0*xr[-1,-1]-x[-2,-2]-x[-2,-1]-x[-1,-2] -1184 -1185 y[0,0] = 4.0*yr[0,0]-y[1,0]-y[0,1]-y[1,1] -1186 y[-1,0] = 4.0*yr[-1,0]-y[-2,0]-y[-1,1]-y[-2,1] -1187 y[0,-1] = 4.0*yr[0,-1]-y[0,-2]-y[1,-1]-y[1,-2] -1188 y[-1,-1] = 4.0*yr[-1,-1]-y[-2,-2]-y[-2,-1]-y[-1,-2] -1189 -1190 return x, y -
1191 -1192 -
1193 -def rho_to_vert_geo(lonr, latr, lonp, latp): -
1194 Mm, Lm = lonr.shape -1195 lon = np.zeros((Mm+1,Lm+1)) -1196 lat = np.zeros((Mm+1,Lm+1)) -1197 -1198 lon[1:-1, 1:-1] = lonp[:,:] -1199 lat[1:-1, 1:-1] = latp[:,:] -1200 -1201 #North edge -1202 lon[Mm,0:-2] = lonr[Mm-1,0:-1] - ( lonp[Mm-2,:] - lonr[Mm-1,0:-1] ) -1203 lon[Mm,-2:] = lonr[Mm-1,-2:] - ( lonp[Mm-2,-2:] - lonr[Mm-1,-2:] ) -1204 lat[Mm,0:-2] = latr[Mm-1,0:-1] - ( latp[Mm-2,:] - latr[Mm-1,0:-1] ) -1205 lat[Mm,-2:] = latr[Mm-1,-2:] - ( latp[Mm-2,-2:] - latr[Mm-1,-2:] ) -1206 -1207 #South edge -1208 lon[0,0:-2] = lonr[0,0:-1] - ( lonp[0,:] - lonr[0,0:-1] ) -1209 lon[0,-2:] = lonr[0,-2:] - ( lonp[0,-2:] - lonr[0,-2:] ) -1210 lat[0,0:-2] = latr[0,0:-1] - ( latp[0,:] - latr[0,0:-1] ) -1211 lat[0,-2:] = latr[0,-2:] - ( latp[0,-2:] - latr[0,-2:] ) -1212 -1213 #East edge -1214 lon[0:-2,Lm] = lonr[0:-1,Lm-1] - ( lonp[:,Lm-2] - lonr[0:-1,Lm-1] ) -1215 lon[-2:,Lm] = lonr[-2:,Lm-1] - ( lonp[-2:,Lm-2] - lonr[-2:,Lm-1] ) -1216 lat[0:-2,Lm] = latr[0:-1,Lm-1] - ( latp[:,Lm-2] - latr[0:-1,Lm-1] ) -1217 lat[-2:,Lm] = latr[-2:,Lm-1] - ( latp[-2:,Lm-2] - latr[-2:,Lm-1] ) -1218 -1219 #West edge -1220 lon[0:-2,0] = lonr[0:-1,0] - ( lonp[:,0] - lonr[0:-1,0] ) -1221 lon[-2:,0] = lonr[-2:,0] - ( lonp[-2:,0] - lonr[-2:,0] ) -1222 lat[0:-2,0] = latr[0:-1,0] - ( latp[:,0] - latr[0:-1,0] ) -1223 lat[-2:,0] = latr[-2:,0] - ( latp[-2:,0] - latr[-2:,0] ) -1224 -1225 return lon, lat -
1226 -1227 -
1228 -class edit_mask_mesh(object): -
1229 """ -1230 Interactive mask editor -1231 -1232 edit_mask_mesh(grd, proj) -1233 -1234 Edit grd mask. Mask/Unsmask cell by a simple click on the cell. -1235 Mask modification are store in mask_change.txt for further use. -1236 -1237 Key commands: -1238 e : toggle between Editing/Viewing mode -1239 """ -1240 -
1241 - def _on_key(self, event): -
1242 if event.key == 'e': -1243 self._clicking = not self._clicking -1244 plt.title('Editing %s -- click "e" to toggle' % self._clicking) -1245 plt.draw() -
1246 -
1247 - def _on_click(self, event): -
1248 x, y = event.xdata, event.ydata -1249 if event.button==1 and event.inaxes is not None and self._clicking == True: -1250 d = (x-self._xc)**2 + (y-self._yc)**2 -1251 if isinstance(self.xv, np.ma.MaskedArray): -1252 idx = np.argwhere(d[~self._xc.mask] == d.min()) -1253 else: -1254 idx = np.argwhere(d.flatten() == d.min()) -1255 self._mask[idx] = float(not self._mask[idx]) -1256 i, j = np.argwhere(d == d.min())[0] -1257 self.mask[i, j] = float(not self.mask[i, j]) -1258 #open output file -1259 f = open('mask_change.txt','a') -1260 value = (i, j, self.mask[i, j]) -1261 s = str(value) -1262 f.write(s + '\n') -1263 #close file -1264 f.close() -1265 self._pc.set_array(self._mask) -1266 self._pc.changed() -1267 plt.draw() -
1268 -
1269 - def __init__(self, grd, proj=None, **kwargs): -
1270 -1271 if type(grd).__name__ == 'ROMS_Grid': -1272 try: -1273 xv = grd.hgrid.lon_vert -1274 yv = grd.hgrid.lat_vert -1275 mask = grd.hgrid.mask_rho -1276 except: -1277 xv = grd.hgrid.x_vert -1278 yv = grd.hgrid.y_vert -1279 mask = grd.hgrid.mask_rho -1280 -1281 if type(grd).__name__ == 'CGrid_geo': -1282 try: -1283 xv = grd.lon_vert -1284 yv = grd.lat_vert -1285 mask = grd.mask_rho -1286 except: -1287 xv = grd.x_vert -1288 yv = grd.y_vert -1289 mask = grd.mask_rho -1290 -1291 assert xv.shape == yv.shape, 'xv and yv must have the same shape' -1292 for dx, dq in zip(xv.shape, mask.shape): -1293 assert dx==dq+1, \ -1294 '''xv and yv must be cell verticies -1295 (i.e., one cell bigger in each dimension)''' -1296 -1297 self.xv = xv -1298 self.yv = yv -1299 -1300 self.mask = mask -1301 -1302 self.proj = proj -1303 -1304 land_color = kwargs.pop('land_color', (0.6, 1.0, 0.6)) -1305 sea_color = kwargs.pop('sea_color', (0.6, 0.6, 1.0)) -1306 -1307 cm = plt.matplotlib.colors.ListedColormap([land_color, sea_color], -1308 name='land/sea') -1309 -1310 if self.proj is None: -1311 self._pc = plt.pcolor(xv, yv, mask, cmap=cm, vmin=0, vmax=1, edgecolor='k', **kwargs) -1312 else: -1313 xv, yv = self.proj(xv, yv) -1314 self._pc = Basemap.pcolor(self.proj, xv, yv, mask, cmap=cm, vmin=0, vmax=1, edgecolor='k', **kwargs) -1315 self.proj.drawcoastlines() -1316 -1317 self._xc = 0.25*(xv[1:,1:]+xv[1:,:-1]+xv[:-1,1:]+xv[:-1,:-1]) -1318 self._yc = 0.25*(yv[1:,1:]+yv[1:,:-1]+yv[:-1,1:]+yv[:-1,:-1]) -1319 -1320 if isinstance(self.xv, np.ma.MaskedArray): -1321 self._mask = mask[~self._xc.mask] -1322 else: -1323 self._mask = mask.flatten() -1324 -1325 plt.connect('button_press_event', self._on_click) -1326 plt.connect('key_press_event', self._on_key) -1327 self._clicking = False -1328 plt.title('Editing %s -- click "e" to toggle' % self._clicking) -1329 plt.draw() -
1330 -1331 -
1332 -def uvp_masks(rmask): -
1333 ''' -1334 return u-, v-, and psi-masks based on input rho-mask -1335 -1336 Parameters -1337 ---------- -1338 -1339 rmask : ndarray -1340 mask at CGrid rho-points -1341 -1342 Returns -1343 ------- -1344 (umask, vmask, pmask) : ndarrays -1345 masks at u-, v-, and psi-points -1346 ''' -1347 rmask = np.asarray(rmask) -1348 assert rmask.ndim == 2, 'rmask must be a 2D array' -1349 assert np.all((rmask==0)|(rmask==1)), 'rmask array must contain only ones and zeros.' -1350 -1351 umask = rmask[:, :-1] * rmask[:, 1:] -1352 vmask = rmask[:-1, :] * rmask[1:, :] -1353 pmask = rmask[:-1, :-1] * rmask[:-1, 1:] * rmask[1:, :-1] * rmask[1:, 1:] -1354 -1355 return umask, vmask, pmask -
1356 -1357 -1358 -1359 if __name__ == '__main__': -1360 geographic = False -1361 if geographic: -1362 from mpl_toolkits.basemap import Basemap -1363 proj = Basemap(projection='lcc', -1364 resolution='i', -1365 llcrnrlon=-72.0, -1366 llcrnrlat= 40.0, -1367 urcrnrlon=-63.0, -1368 urcrnrlat=47.0, -1369 lat_0=43.0, -1370 lon_0=-62.5) -1371 -1372 lon = (-71.977385177601761, -70.19173825913137, -1373 -63.045075098584945,-64.70104074097425) -1374 lat = (42.88215610827428, 41.056141745853786, -1375 44.456701607935841, 46.271758064353897) -1376 beta = [1.0, 1.0, 1.0, 1.0] -1377 -1378 grd = Gridgen(lon, lat, beta, (32, 32), proj=proj) -1379 -1380 for seg in proj.coastsegs: -1381 grd.mask_polygon(seg) -1382 -1383 plt.pcolor(grd.x, grd.y, grd.mask) -1384 plt.show() -1385 else: -1386 x = [0.2, 0.85, 0.9, 0.82, 0.23] -1387 y = [0.2, 0.25, 0.5, 0.82, .83] -1388 beta = [1.0, 1.0, 0.0, 1.0, 1.0] -1389 -1390 grd = Gridgen(x, y, beta, (32, 32)) -1391 -1392 ax = plt.subplot(111) -1393 BoundaryInteractor(x, y, beta) -1394 plt.show() -1395 -1396 -1397 -
1398 -class get_position_from_map(object): -
1399 """ -1400 Get cell index position Interactively -1401 -1402 get_position_from_map(grd, proj) -1403 -1404 Get index i, j as well as lon, lat coordinates for one cell -1405 simply by clicking on the cell. -1406 -1407 Key commands: -1408 i : toggle between Interactive/Viewing mode -1409 """ -
1410 - def _on_key(self, event): -
1411 if event.key == 'i': -1412 self._clicking = not self._clicking -1413 plt.title('Interactive %s -- click "i" to toggle' % self._clicking) -1414 plt.draw() -
1415 -
1416 - def _on_click(self, event): -
1417 x, y = event.xdata, event.ydata -1418 if event.button==1 and event.inaxes is not None and self._clicking == True: -1419 d = (x-self._xc)**2 + (y-self._yc)**2 -1420 if isinstance(self.xv, np.ma.MaskedArray): -1421 idx = np.argwhere(d[~self._xc.mask] == d.min()) -1422 else: -1423 idx = np.argwhere(d.flatten() == d.min()) -1424 j, i = np.argwhere(d == d.min())[0] -1425 print 'Position on the grid (rho point): i =', i, ', j =', j -1426 if self.proj is not None: -1427 lon, lat = self.proj(self._xc[j,i], self._yc[j,i], inverse=True) -1428 print 'corresponding geographical position : lon = ', lon, ', lat =', lat -1429 else: -1430 print 'corresponding cartesian position : x = ', self._xc[j,i], ', y =', self._yc[j,i] -
1431 -
1432 - def __init__(self, grd, proj=None, **kwargs): -
1433 -1434 try: -1435 xv = grd.hgrid.lon_vert -1436 yv = grd.hgrid.lat_vert -1437 mask = grd.hgrid.mask_rho -1438 except: -1439 xv = grd.hgrid.x_vert -1440 yv = grd.hgrid.y_vert -1441 mask = grd.hgrid.mask_rho -1442 -1443 assert xv.shape == yv.shape, 'xv and yv must have the same shape' -1444 for dx, dq in zip(xv.shape, mask.shape): -1445 assert dx==dq+1, \ -1446 '''xv and yv must be cell verticies -1447 (i.e., one cell bigger in each dimension)''' -1448 -1449 self.xv = xv -1450 self.yv = yv -1451 -1452 self.mask = mask -1453 -1454 self.proj = proj -1455 -1456 land_color = kwargs.pop('land_color', (0.6, 1.0, 0.6)) -1457 sea_color = kwargs.pop('sea_color', (0.6, 0.6, 1.0)) -1458 -1459 cm = plt.matplotlib.colors.ListedColormap([land_color, sea_color], -1460 name='land/sea') -1461 -1462 if self.proj is None: -1463 self._pc = plt.pcolor(xv, yv, mask, cmap=cm, vmin=0, vmax=1, edgecolor='k', **kwargs) -1464 -1465 else: -1466 xv, yv = self.proj(xv, yv) -1467 self._pc = Basemap.pcolor(self.proj, xv, yv, mask, cmap=cm, vmin=0, vmax=1, edgecolor='k', **kwargs) -1468 self.proj.drawcoastlines() -1469 -1470 self._xc = 0.25*(xv[1:,1:]+xv[1:,:-1]+xv[:-1,1:]+xv[:-1,:-1]) -1471 self._yc = 0.25*(yv[1:,1:]+yv[1:,:-1]+yv[:-1,1:]+yv[:-1,:-1]) -1472 -1473 plt.connect('button_press_event', self._on_click) -1474 plt.connect('key_press_event', self._on_key) -1475 self._clicking = False -1476 plt.title('Interactive %s -- click "i" to toggle' % self._clicking) -1477 plt.draw() -
1478 -1479 -1480 if __name__ == '__main__': -1481 geographic = False -1482 if geographic: -1483 from mpl_toolkits.basemap import Basemap -1484 proj = Basemap(projection='lcc', -1485 resolution='i', -1486 llcrnrlon=-72.0, -1487 llcrnrlat= 40.0, -1488 urcrnrlon=-63.0, -1489 urcrnrlat=47.0, -1490 lat_0=43.0, -1491 lon_0=-62.5) -1492 -1493 lon = (-71.977385177601761, -70.19173825913137, -1494 -63.045075098584945,-64.70104074097425) -1495 lat = (42.88215610827428, 41.056141745853786, -1496 44.456701607935841, 46.271758064353897) -1497 beta = [1.0, 1.0, 1.0, 1.0] -1498 -1499 grd = Gridgen(lon, lat, beta, (32, 32), proj=proj) -1500 -1501 for seg in proj.coastsegs: -1502 grd.mask_polygon(seg) -1503 -1504 plt.pcolor(grd.x, grd.y, grd.mask) -1505 plt.show() -1506 else: -1507 x = [0.2, 0.85, 0.9, 0.82, 0.23] -1508 y = [0.2, 0.25, 0.5, 0.82, .83] -1509 beta = [1.0, 1.0, 0.0, 1.0, 1.0] -1510 -1511 grd = Gridgen(x, y, beta, (32, 32)) -1512 -1513 ax = plt.subplot(111) -1514 BoundaryInteractor(x, y, beta) -1515 plt.show() -1516 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.hgrid.BoundaryInteractor-class.html b/pyroms/docs/pyroms.hgrid.BoundaryInteractor-class.html deleted file mode 100644 index cec7cc6..0000000 --- a/pyroms/docs/pyroms.hgrid.BoundaryInteractor-class.html +++ /dev/null @@ -1,674 +0,0 @@ - - - - - pyroms.hgrid.BoundaryInteractor - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Module hgrid :: - Class BoundaryInteractor - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Class BoundaryInteractor

source code

-
-object --+
-         |
-        BoundaryInteractor
-
- -
-
-
-Interactive grid creation
-    
-bry = BoundaryClick(x=[], y=[], beta=None, ax=gca(), **gridgen_options)
-
-The initial boundary polygon points (x and y) are
-counterclockwise, starting in the upper left corner of the
-boundary. 
-
-Key commands:
-    
-    t : toggle visibility of verticies
-    d : delete a vertex
-    i : insert a vertex at a point on the polygon line
-    
-    p : set vertex as beta=1 (a Positive turn, marked with green triangle)
-    m : set vertex as beta=1 (a Negative turn, marked with red triangle)
-    z : set vertex as beta=0 (no corner, marked with a black dot)
-    
-    G : generate grid from the current boundary using gridgen
-    T : toggle visability of the current grid
-
-Methods:
-
-    bry.dump(bry_file)
-        Write the current boundary informtion (bry.x, bry.y, bry.beta) to
-        a cPickle file bry_file.
-    
-    bry.load(bry_file)
-        Read in boundary informtion (x, y, beta) from the cPickle file
-        bry_file.
-    
-    bry.remove_grid()  
-        Remove gridlines from axes.
-
-Attributes:
-    bry.x : the X boundary points
-    bry.y : the Y boundary points
-    bry.verts : the verticies of the grid
-    bry.grd : the CGrid object
-    
-
-
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - -
Instance Methods[hide private]
-
-   - - - - - - -
_update_beta_lines(self)
- Update m/pline by finding the points where self.beta== -/+ 1
- source code - -
- -
-   - - - - - - -
remove_grid(self)
- Remove a generated grid from the BoundaryClick figure
- source code - -
- -
-   - - - - - - -
_draw_callback(self, - event) - source code - -
- -
-   - - - - - - -
_poly_changed(self, - poly)
- this method is called whenever the polygon object is called
- source code - -
- -
-   - - - - - - -
_get_ind_under_point(self, - event)
- get the index of the vertex under point if within epsilon tolerance
- source code - -
- -
-   - - - - - - -
_button_press_callback(self, - event)
- whenever a mouse button is pressed
- source code - -
- -
-   - - - - - - -
_button_release_callback(self, - event)
- whenever a mouse button is released
- source code - -
- -
-   - - - - - - -
_key_press_callback(self, - event)
- whenever a key is pressed
- source code - -
- -
-   - - - - - - -
_motion_notify_callback(self, - event)
- on mouse movement
- source code - -
- -
-   - - - - - - -
__init__(self, - x, - y=None, - beta=None, - ax=None, - proj=None, - **gridgen_options)
- x.__init__(...) initializes x; see x.__class__.__doc__ for signature
- source code - -
- -
-   - - - - - - -
save_bry(self, - bry_file='bry.pickle') - source code - -
- -
-   - - - - - - -
load_bry(self, - bry_file='bry.pickle') - source code - -
- -
-   - - - - - - -
save_grid(self, - grid_file='grid.pickle') - source code - -
- -
-   - - - - - - -
_get_verts(self) - source code - -
- -
-   - - - - - - -
get_xdata(self) - source code - -
- -
-   - - - - - - -
get_ydata(self) - source code - -
- -
-

Inherited from object: - __delattr__, - __getattribute__, - __hash__, - __new__, - __reduce__, - __reduce_ex__, - __repr__, - __setattr__, - __str__ -

-
- - - - - - - - - - - - - - - - - - -
- - - - - -
Class Variables[hide private]
-
-   - - _showverts = True -
-   - - _showbetas = True -
-   - - _showgrid = True -
-   - - _epsilon = 5 -
- - - - - - - - - - - - - - - - - - -
- - - - - -
Properties[hide private]
-
-   - - verts -
-   - - x -
-   - - y -
-

Inherited from object: - __class__ -

-
- - - - - - -
- - - - - -
Method Details[hide private]
-
- -
- -
- - -
-

__init__(self, - x, - y=None, - beta=None, - ax=None, - proj=None, - **gridgen_options) -
(Constructor) -

-
source code  -
- -

x.__init__(...) initializes x; see x.__class__.__doc__ for - signature

-
-
Overrides: - object.__init__ -
(inherited documentation)
- -
-
-
-
- - - - - - -
- - - - - -
Property Details[hide private]
-
- -
- -
-

verts

- -
-
Get Method:
-
_get_verts(self) -
-
-
-
- -
- -
-

x

- -
-
Get Method:
-
get_xdata(self) -
-
-
-
- -
- -
-

y

- -
-
Get Method:
-
get_ydata(self) -
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.hgrid.CGrid-class.html b/pyroms/docs/pyroms.hgrid.CGrid-class.html deleted file mode 100644 index 9024e09..0000000 --- a/pyroms/docs/pyroms.hgrid.CGrid-class.html +++ /dev/null @@ -1,693 +0,0 @@ - - - - - pyroms.hgrid.CGrid - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Module hgrid :: - Class CGrid - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Class CGrid

source code

-
-object --+
-         |
-        CGrid
-
- -
Known Subclasses:
-
- -
- -
-
-
-Curvilinear Arakawa C-Grid
- 
-The basis for the CGrid class are two arrays defining the verticies of the
-grid in Cartesian (for geographic coordinates, see CGrid_geo). An optional
-mask may be defined on the cell centers. Other Arakawa C-grid properties,
-such as the locations of the cell centers (rho-points), cell edges (u and
-v velocity points), cell widths (dx and dy) and other metrics (angle,
-dmde, and dndx) are all calculated internally from the vertex points.
- 
-Input vertex arrays may be either type np.array or np.ma.MaskedArray. If
-masked arrays are used, the mask will be a combination of the specified
-mask (if given) and the masked locations.
- 
-EXAMPLES:
---------
- 
->>> x, y = mgrid[0.0:7.0, 0.0:8.0]
->>> x = np.ma.masked_where( (x<3) & (y<3), x)
->>> y = np.ma.MaskedArray(y, x.mask)
->>> grd = pyroms.grid.CGrid(x, y)
->>> print grd.x_rho
-[[-- -- -- 0.5 0.5 0.5 0.5]
- [-- -- -- 1.5 1.5 1.5 1.5]
- [-- -- -- 2.5 2.5 2.5 2.5]
- [3.5 3.5 3.5 3.5 3.5 3.5 3.5]
- [4.5 4.5 4.5 4.5 4.5 4.5 4.5]
- [5.5 5.5 5.5 5.5 5.5 5.5 5.5]]
->>> print grd.mask
-[[ 0.  0.  0.  1.  1.  1.  1.]
- [ 0.  0.  0.  1.  1.  1.  1.]
- [ 0.  0.  0.  1.  1.  1.  1.]
- [ 1.  1.  1.  1.  1.  1.  1.]
- [ 1.  1.  1.  1.  1.  1.  1.]
- [ 1.  1.  1.  1.  1.  1.  1.]]
-
-
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - -
Instance Methods[hide private]
-
-   - - - - - - -
__init__(self, - x_vert, - y_vert, - x_rho=None, - y_rho=None, - x_u=None, - y_u=None, - x_v=None, - y_v=None, - x_psi=None, - y_psi=None, - dx=None, - dy=None, - dndx=None, - dmde=None, - angle_rho=None)
- x.__init__(...) initializes x; see x.__class__.__doc__ for signature
- source code - -
- -
-   - - - - - - -
_calculate_subgrids(self) - source code - -
- -
-   - - - - - - -
_calculate_metrics(self)
- Calculates pm, pn, dndx, dmde from x_vert and y_vert
- source code - -
- -
-   - - - - - - -
_calculate_derivative_metrics(self) - source code - -
- -
-   - - - - - - -
_calculate_angle(self) - source code - -
- -
-   - - - - - - -
_calculate_angle_rho(self) - source code - -
- -
-   - - - - - - -
calculate_orthogonality(self)
- Calculate orthogonality error in radians
- source code - -
- -
-   - - - - - - -
mask_polygon(self, - polyverts, - mask_value=0.0)
- Mask Cartesian points contained within the polygon defined by polyverts
- source code - -
- -
-   - - - - - - -
_get_mask_u(self) - source code - -
- -
-   - - - - - - -
_get_mask_v(self) - source code - -
- -
-   - - - - - - -
_get_mask_psi(self) - source code - -
- -
-   - - - - - - -
_set_mask_rho(self, - mask_rho) - source code - -
- -
-

Inherited from object: - __delattr__, - __getattribute__, - __hash__, - __new__, - __reduce__, - __reduce_ex__, - __repr__, - __setattr__, - __str__ -

-
- - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - -
Properties[hide private]
-
-   - - x
- Return x_vert -
-   - - y
- Return x_vert -
-   - - mask
- Return mask_rho -
-   - - mask_u
- Return mask_u -
-   - - mask_v
- Return mask_v -
-   - - mask_psi
- Return mask_psi -
-

Inherited from object: - __class__ -

-
- - - - - - -
- - - - - -
Method Details[hide private]
-
- -
- -
- - -
-

__init__(self, - x_vert, - y_vert, - x_rho=None, - y_rho=None, - x_u=None, - y_u=None, - x_v=None, - y_v=None, - x_psi=None, - y_psi=None, - dx=None, - dy=None, - dndx=None, - dmde=None, - angle_rho=None) -
(Constructor) -

-
source code  -
- -

x.__init__(...) initializes x; see x.__class__.__doc__ for - signature

-
-
Overrides: - object.__init__ -
(inherited documentation)
- -
-
-
- -
- -
- - -
-

mask_polygon(self, - polyverts, - mask_value=0.0) -

-
source code  -
- -
-
-Mask Cartesian points contained within the polygon defined by polyverts
-
-A cell is masked if the cell center (x_rho, y_rho) is within the
-polygon. Other sub-masks (mask_u, mask_v, and mask_psi) are updated
-automatically.
-
-mask_value [=0.0] may be specified to alter the value of the mask set
-within the polygon.  E.g., mask_value=1 for water points.
-
-
-
-
-
-
-
- - - - - - -
- - - - - -
Property Details[hide private]
-
- -
- -
-

x

-
-Return x_vert
-
-
-
-
Get Method:
-
unreachable(self) -
-
-
-
- -
- -
-

y

-
-Return x_vert
-
-
-
-
Get Method:
-
unreachable(self) -
-
-
-
- -
- -
-

mask

-
-Return mask_rho
-
-
-
-
Get Method:
-
unreachable(self) -
-
Set Method:
-
_set_mask_rho(self, - mask_rho) -
-
-
-
- -
- -
-

mask_u

-
-Return mask_u
-
-
-
-
Get Method:
-
_get_mask_u(self) -
-
-
-
- -
- -
-

mask_v

-
-Return mask_v
-
-
-
-
Get Method:
-
_get_mask_v(self) -
-
-
-
- -
- -
-

mask_psi

-
-Return mask_psi
-
-
-
-
Get Method:
-
_get_mask_psi(self) -
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.hgrid.CGrid_geo-class.html b/pyroms/docs/pyroms.hgrid.CGrid_geo-class.html deleted file mode 100644 index c6aaf9a..0000000 --- a/pyroms/docs/pyroms.hgrid.CGrid_geo-class.html +++ /dev/null @@ -1,512 +0,0 @@ - - - - - pyroms.hgrid.CGrid_geo - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Module hgrid :: - Class CGrid_geo - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Class CGrid_geo

source code

-
-object --+    
-         |    
-     CGrid --+
-             |
-            CGrid_geo
-
- -
-
-
-Curvilinear Arakawa C-grid defined in geographic coordinates
-
-For a geographic grid, a projection may be specified, or The default
-projection for will be defined by the matplotlib.toolkits.Basemap
-projection:
-
-proj = Basemap(projection='merc', resolution=None, lat_ts=0.0)
-
-For a geographic grid, the cell widths are determined by the great
-circle distances. Angles, however, are defined using the projected
-coordinates, so a projection that conserves angles must be used. This
-means typically either Mercator (projection='merc') or Lambert
-Conformal Conic (projection='lcc').
-
-
- - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - -
Instance Methods[hide private]
-
-   - - - - - - -
_calculate_metrics(self)
- Calculates pm, pn, dndx, dmde from x_vert and y_vert
- source code - -
- -
-   - - - - - - -
_calculate_derivative_metrics(self) - source code - -
- -
-   - - - - - - -
_calculate_angle_rho(self) - source code - -
- -
-   - - - - - - -
__init__(self, - lon_vert, - lat_vert, - proj, - use_gcdist=True, - ellipse='WGS84', - lon_rho=None, - lat_rho=None, - lon_u=None, - lat_u=None, - lon_v=None, - lat_v=None, - lon_psi=None, - lat_psi=None, - dx=None, - dy=None, - dndx=None, - dmde=None, - angle_rho=None)
- x.__init__(...) initializes x; see x.__class__.__doc__ for signature
- source code - -
- -
-   - - - - - - -
mask_polygon_geo(lonlat_verts, - mask_value=0.0) - source code - -
- -
-

Inherited from CGrid: - calculate_orthogonality, - mask_polygon -

- -

Inherited from object: - __delattr__, - __getattribute__, - __hash__, - __new__, - __reduce__, - __reduce_ex__, - __repr__, - __setattr__, - __str__ -

-
- - - - - - - - - - - - - - - -
- - - - - -
Properties[hide private]
-
-   - - lon
- Shorthand for lon_vert -
-   - - lat
- Shorthand for lat_vert -
-

Inherited from CGrid: - mask, - mask_psi, - mask_u, - mask_v, - x, - y -

-

Inherited from object: - __class__ -

-
- - - - - - -
- - - - - -
Method Details[hide private]
-
- -
- -
- - -
-

_calculate_metrics(self) -

-
source code  -
- -
-Calculates pm, pn, dndx, dmde from x_vert and y_vert
-
-
-
-
Overrides: - CGrid._calculate_metrics -
(inherited documentation)
- -
-
-
- -
- -
- - -
-

_calculate_derivative_metrics(self) -

-
source code  -
- - -
-
Overrides: - CGrid._calculate_derivative_metrics -
-
-
-
- -
- -
- - -
-

_calculate_angle_rho(self) -

-
source code  -
- - -
-
Overrides: - CGrid._calculate_angle_rho -
-
-
-
- -
- -
- - -
-

__init__(self, - lon_vert, - lat_vert, - proj, - use_gcdist=True, - ellipse='WGS84', - lon_rho=None, - lat_rho=None, - lon_u=None, - lat_u=None, - lon_v=None, - lat_v=None, - lon_psi=None, - lat_psi=None, - dx=None, - dy=None, - dndx=None, - dmde=None, - angle_rho=None) -
(Constructor) -

-
source code  -
- -

x.__init__(...) initializes x; see x.__class__.__doc__ for - signature

-
-
Overrides: - object.__init__ -
(inherited documentation)
- -
-
-
-
- - - - - - -
- - - - - -
Property Details[hide private]
-
- -
- -
-

lon

-
-Shorthand for lon_vert
-
-
-
-
Get Method:
-
unreachable(self) -
-
-
-
- -
- -
-

lat

-
-Shorthand for lat_vert
-
-
-
-
Get Method:
-
unreachable(self) -
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.hgrid.Focus-class.html b/pyroms/docs/pyroms.hgrid.Focus-class.html deleted file mode 100644 index 1d6ccb0..0000000 --- a/pyroms/docs/pyroms.hgrid.Focus-class.html +++ /dev/null @@ -1,335 +0,0 @@ - - - - - pyroms.hgrid.Focus - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Module hgrid :: - Class Focus - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Class Focus

source code

-
-object --+
-         |
-        Focus
-
- -
-
-
-Return a container for a sequence of Focus objects
-
-foc = Focus()
-
-The sequence is populated by using the 'add_focus_x' and 'add_focus_y'
-methods. These methods define a point ('xo' or 'yo'), around witch to
-focus, a focusing factor of 'focus', and x and y extent of focusing given
-by Rx or Ry. The region of focusing will be approximately Gausian, and the
-resolution will be increased by approximately the value of factor.
-
-Methods
--------
-foc.add_focus_x(xo, factor=2.0, Rx=0.1)
-foc.add_focus_y(yo, factor=2.0, Ry=0.1)
-
-Calls to the object return transformed coordinates:
-    xf, yf = foc(x, y)
-where x and y must be within [0, 1], and are typically a uniform,
-normalized grid. The focused grid will be the result of applying each of
-the focus elements in the sequence they are added to the series.
-
-
-EXAMPLES
---------
-
->>> foc = pyroms.grid.Focus()
->>> foc.add_focus_x(0.2, factor=3.0, Rx=0.2)
->>> foc.add_focus_y(0.6, factor=5.0, Ry=0.35)
-
->>> x, y = np.mgrid[0:1:3j,0:1:3j]
->>> xf, yf = foc(x, y)
-
->>> print xf
-[[ 0.          0.          0.        ]
- [ 0.36594617  0.36594617  0.36594617]
- [ 1.          1.          1.        ]]
->>> print yf
-[[ 0.          0.62479833  1.        ]
- [ 0.          0.62479833  1.        ]
- [ 0.          0.62479833  1.        ]]
-
-
- - - - - - - - - - - - - - - - - - - - - - -
- - - - - -
Instance Methods[hide private]
-
-   - - - - - - -
__init__(self)
- x.__init__(...) initializes x; see x.__class__.__doc__ for signature
- source code - -
- -
-   - - - - - - -
add_focus_x(self, - xo, - factor=2.0, - Rx=0.1)
- docstring for add_point
- source code - -
- -
-   - - - - - - -
add_focus_y(self, - yo, - factor=2.0, - Ry=0.1)
- docstring for add_point
- source code - -
- -
-   - - - - - - -
__call__(self, - x, - y)
- docstring for __call__
- source code - -
- -
-

Inherited from object: - __delattr__, - __getattribute__, - __hash__, - __new__, - __reduce__, - __reduce_ex__, - __repr__, - __setattr__, - __str__ -

-
- - - - - - - - - -
- - - - - -
Properties[hide private]
-
-

Inherited from object: - __class__ -

-
- - - - - - -
- - - - - -
Method Details[hide private]
-
- -
- -
- - -
-

__init__(self) -
(Constructor) -

-
source code  -
- -

x.__init__(...) initializes x; see x.__class__.__doc__ for - signature

-
-
Overrides: - object.__init__ -
(inherited documentation)
- -
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.hgrid.Gridgen-class.html b/pyroms/docs/pyroms.hgrid.Gridgen-class.html deleted file mode 100644 index bc2614c..0000000 --- a/pyroms/docs/pyroms.hgrid.Gridgen-class.html +++ /dev/null @@ -1,323 +0,0 @@ - - - - - pyroms.hgrid.Gridgen - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Module hgrid :: - Class Gridgen - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Class Gridgen

source code

-
-object --+    
-         |    
-     CGrid --+
-             |
-            Gridgen
-
- -
-
-
-docstring for Gridgen
-
-
- - - - - - - - - - - - - - - - - - - -
- - - - - -
Instance Methods[hide private]
-
-   - - - - - - -
generate_grid(self) - source code - -
- -
-   - - - - - - -
__init__(self, - xbry, - ybry, - beta, - shape, - ul_idx=0, - focus=None, - proj=None, - nnodes=14, - precision=1e-12, - nppe=3, - newton=True, - thin=True, - checksimplepoly=True, - verbose=False)
- x.__init__(...) initializes x; see x.__class__.__doc__ for signature
- source code - -
- -
-   - - - - - - -
__del__(self)
- delete gridnode object upon deletion
- source code - -
- -
-

Inherited from CGrid: - calculate_orthogonality, - mask_polygon -

- -

Inherited from object: - __delattr__, - __getattribute__, - __hash__, - __new__, - __reduce__, - __reduce_ex__, - __repr__, - __setattr__, - __str__ -

-
- - - - - - - - - -
- - - - - -
Properties[hide private]
-
-

Inherited from CGrid: - mask, - mask_psi, - mask_u, - mask_v, - x, - y -

-

Inherited from object: - __class__ -

-
- - - - - - -
- - - - - -
Method Details[hide private]
-
- -
- -
- - -
-

__init__(self, - xbry, - ybry, - beta, - shape, - ul_idx=0, - focus=None, - proj=None, - nnodes=14, - precision=1e-12, - nppe=3, - newton=True, - thin=True, - checksimplepoly=True, - verbose=False) -
(Constructor) -

-
source code  -
- -

x.__init__(...) initializes x; see x.__class__.__doc__ for - signature

-
-
Overrides: - object.__init__ -
(inherited documentation)
- -
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.hgrid._Focus_x-class.html b/pyroms/docs/pyroms.hgrid._Focus_x-class.html deleted file mode 100644 index 1ae8c75..0000000 --- a/pyroms/docs/pyroms.hgrid._Focus_x-class.html +++ /dev/null @@ -1,283 +0,0 @@ - - - - - pyroms.hgrid._Focus_x - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Module hgrid :: - Class _Focus_x - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Class _Focus_x

source code

-
-object --+
-         |
-        _Focus_x
-
- -
-
-
-Return a transformed, uniform grid, focused in the x-direction
-
-This class may be called with a uniform grid, with limits from [0, 1], to
-create a focused grid in the x-directions centered about xo. The output
-grid is also uniform from [0, 1] in both x and y.
-
-Parameters
-----------
-xo : float
-    Location about which to focus grid
-factor : float
-    amount to focus grid. Creates cell sizes that are factor smaller in
-    the focused
-    region.
-Rx : float
-    Lateral extent of focused region, similar to a lateral spatial scale
-    for the focusing region.
-
-Returns
--------
-foc : class
-    The class may be called with arguments of a grid. The returned
-    transformed grid (x, y) will be focused as per the parameters above.
-
-
- - - - - - - - - - - - - - - - -
- - - - - -
Instance Methods[hide private]
-
-   - - - - - - -
__init__(self, - xo, - factor=2.0, - Rx=0.1)
- x.__init__(...) initializes x; see x.__class__.__doc__ for signature
- source code - -
- -
-   - - - - - - -
__call__(self, - x, - y) - source code - -
- -
-

Inherited from object: - __delattr__, - __getattribute__, - __hash__, - __new__, - __reduce__, - __reduce_ex__, - __repr__, - __setattr__, - __str__ -

-
- - - - - - - - - -
- - - - - -
Properties[hide private]
-
-

Inherited from object: - __class__ -

-
- - - - - - -
- - - - - -
Method Details[hide private]
-
- -
- -
- - -
-

__init__(self, - xo, - factor=2.0, - Rx=0.1) -
(Constructor) -

-
source code  -
- -

x.__init__(...) initializes x; see x.__class__.__doc__ for - signature

-
-
Overrides: - object.__init__ -
(inherited documentation)
- -
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.hgrid._Focus_y-class.html b/pyroms/docs/pyroms.hgrid._Focus_y-class.html deleted file mode 100644 index 39b1542..0000000 --- a/pyroms/docs/pyroms.hgrid._Focus_y-class.html +++ /dev/null @@ -1,282 +0,0 @@ - - - - - pyroms.hgrid._Focus_y - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Module hgrid :: - Class _Focus_y - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Class _Focus_y

source code

-
-object --+
-         |
-        _Focus_y
-
- -
-
-
-Return a transformed, uniform grid, focused in the y-direction
-
-This class may be called with a uniform grid, with limits from [0, 1], 
-to create a focused grid in the y-directions centered about yo. 
-The output grid is also uniform from [0, 1] in both x and y.
-
-Parameters
-----------
-yo : float
-    Location about which to focus grid
-factor : float
-    amount to focus grid. Creates cell sizes that are factor 
-    smaller in the focused region.
-Ry : float
-    Lateral extent of focused region, similar to a lateral 
-    spatial scale for the focusing region.
-
-Returns
--------
-foc : class
-    The class may be called with arguments of a grid. The returned 
-    transformed grid (x, y) will be focused as per the parameters above.
-
-
- - - - - - - - - - - - - - - - -
- - - - - -
Instance Methods[hide private]
-
-   - - - - - - -
__init__(self, - yo, - factor=2.0, - Ry=0.1)
- x.__init__(...) initializes x; see x.__class__.__doc__ for signature
- source code - -
- -
-   - - - - - - -
__call__(self, - x, - y) - source code - -
- -
-

Inherited from object: - __delattr__, - __getattribute__, - __hash__, - __new__, - __reduce__, - __reduce_ex__, - __repr__, - __setattr__, - __str__ -

-
- - - - - - - - - -
- - - - - -
Properties[hide private]
-
-

Inherited from object: - __class__ -

-
- - - - - - -
- - - - - -
Method Details[hide private]
-
- -
- -
- - -
-

__init__(self, - yo, - factor=2.0, - Ry=0.1) -
(Constructor) -

-
source code  -
- -

x.__init__(...) initializes x; see x.__class__.__doc__ for - signature

-
-
Overrides: - object.__init__ -
(inherited documentation)
- -
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.hgrid.edit_mask_mesh-class.html b/pyroms/docs/pyroms.hgrid.edit_mask_mesh-class.html deleted file mode 100644 index 635d9d7..0000000 --- a/pyroms/docs/pyroms.hgrid.edit_mask_mesh-class.html +++ /dev/null @@ -1,285 +0,0 @@ - - - - - pyroms.hgrid.edit_mask_mesh - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Module hgrid :: - Class edit_mask_mesh - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Class edit_mask_mesh

source code

-
-object --+
-         |
-        edit_mask_mesh
-
- -
-
-
-Interactive mask editor
-
-edit_mask_mesh(grd, proj)
-
-Edit grd mask. Mask/Unsmask cell by a simple click on the cell.
-Mask modification are store in mask_change.txt for further use.
-
-Key commands:
-    e : toggle between Editing/Viewing mode
-
-
- - - - - - - - - - - - - - - - - - - -
- - - - - -
Instance Methods[hide private]
-
-   - - - - - - -
_on_key(self, - event) - source code - -
- -
-   - - - - - - -
_on_click(self, - event) - source code - -
- -
-   - - - - - - -
__init__(self, - grd, - proj=None, - **kwargs)
- x.__init__(...) initializes x; see x.__class__.__doc__ for signature
- source code - -
- -
-

Inherited from object: - __delattr__, - __getattribute__, - __hash__, - __new__, - __reduce__, - __reduce_ex__, - __repr__, - __setattr__, - __str__ -

-
- - - - - - - - - -
- - - - - -
Properties[hide private]
-
-

Inherited from object: - __class__ -

-
- - - - - - -
- - - - - -
Method Details[hide private]
-
- -
- -
- - -
-

__init__(self, - grd, - proj=None, - **kwargs) -
(Constructor) -

-
source code  -
- -

x.__init__(...) initializes x; see x.__class__.__doc__ for - signature

-
-
Overrides: - object.__init__ -
(inherited documentation)
- -
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.hgrid.edit_mask_mesh_ij-class.html b/pyroms/docs/pyroms.hgrid.edit_mask_mesh_ij-class.html deleted file mode 100644 index b56883d..0000000 --- a/pyroms/docs/pyroms.hgrid.edit_mask_mesh_ij-class.html +++ /dev/null @@ -1,289 +0,0 @@ - - - - - pyroms.hgrid.edit_mask_mesh_ij - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - pyroms :: - hgrid :: - edit_mask_mesh_ij :: - Class edit_mask_mesh_ij - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Class edit_mask_mesh_ij

source code

-
-object --+
-         |
-        edit_mask_mesh_ij
-
- -
-
-
-Interactive mask editor
-
-edit_mask_mesh_ij(grd)
-
-Edit grd mask. Mask/Unsmask cell by a simple click on the cell.
-Mask modification are store in mask_change.txt for further use.
-
-Key commands:
-    e : toggle between Editing/Viewing mode
-
-
- - - - - - - - - - - - - - - - - - - -
- - - - - -
Instance Methods[hide private]
-
-   - - - - - - -
_on_key(self, - event) - source code - -
- -
-   - - - - - - -
_on_click(self, - event) - source code - -
- -
-   - - - - - - -
__init__(self, - grd, - coast=None, - **kwargs)
- x.__init__(...) initializes x; see x.__class__.__doc__ for signature
- source code - -
- -
-

Inherited from object: - __delattr__, - __format__, - __getattribute__, - __hash__, - __new__, - __reduce__, - __reduce_ex__, - __repr__, - __setattr__, - __sizeof__, - __str__, - __subclasshook__ -

-
- - - - - - - - - -
- - - - - -
Properties[hide private]
-
-

Inherited from object: - __class__ -

-
- - - - - - -
- - - - - -
Method Details[hide private]
-
- -
- -
- - -
-

__init__(self, - grd, - coast=None, - **kwargs) -
(Constructor) -

-
source code  -
- -

x.__init__(...) initializes x; see x.__class__.__doc__ for - signature

-
-
Overrides: - object.__init__ -
(inherited documentation)
- -
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.hgrid.get_position_from_map-class.html b/pyroms/docs/pyroms.hgrid.get_position_from_map-class.html deleted file mode 100644 index fc0ff95..0000000 --- a/pyroms/docs/pyroms.hgrid.get_position_from_map-class.html +++ /dev/null @@ -1,285 +0,0 @@ - - - - - pyroms.hgrid.get_position_from_map - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Module hgrid :: - Class get_position_from_map - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Class get_position_from_map

source code

-
-object --+
-         |
-        get_position_from_map
-
- -
-
-
-Get cell index position Interactively
-
-get_position_from_map(grd, proj)
-
-Get index i, j as well as lon, lat coordinates for one cell
-simply by clicking on the cell.
-
-Key commands:
-    i : toggle between Interactive/Viewing mode
-
-
- - - - - - - - - - - - - - - - - - - -
- - - - - -
Instance Methods[hide private]
-
-   - - - - - - -
_on_key(self, - event) - source code - -
- -
-   - - - - - - -
_on_click(self, - event) - source code - -
- -
-   - - - - - - -
__init__(self, - grd, - proj=None, - **kwargs)
- x.__init__(...) initializes x; see x.__class__.__doc__ for signature
- source code - -
- -
-

Inherited from object: - __delattr__, - __getattribute__, - __hash__, - __new__, - __reduce__, - __reduce_ex__, - __repr__, - __setattr__, - __str__ -

-
- - - - - - - - - -
- - - - - -
Properties[hide private]
-
-

Inherited from object: - __class__ -

-
- - - - - - -
- - - - - -
Method Details[hide private]
-
- -
- -
- - -
-

__init__(self, - grd, - proj=None, - **kwargs) -
(Constructor) -

-
source code  -
- -

x.__init__(...) initializes x; see x.__class__.__doc__ for - signature

-
-
Overrides: - object.__init__ -
(inherited documentation)
- -
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.hgrid_old-module.html b/pyroms/docs/pyroms.hgrid_old-module.html deleted file mode 100644 index fcd6a15..0000000 --- a/pyroms/docs/pyroms.hgrid_old-module.html +++ /dev/null @@ -1,367 +0,0 @@ - - - - - pyroms.hgrid_old - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Module hgrid_old - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module hgrid_old

source code

-
-Tools for creating and working with Arikawa C-Grids
-
-
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - -
Classes[hide private]
-
-   - - BoundaryInteractor
- Interactive grid creation -
-   - - _Focus_x
- Return a transformed, uniform grid, focused in the x-direction -
-   - - _Focus_y
- Return a transformed, uniform grid, focused in the y-direction -
-   - - Focus
- Return a container for a sequence of Focus objects -
-   - - CGrid
- Curvilinear Arakawa C-Grid -
-   - - CGrid_geo
- Curvilinear Arakawa C-grid defined in geographic coordinates -
-   - - Gridgen
- docstring for Gridgen -
-   - - edit_mask_mesh
- Interactive mask editor -
-   - - get_position_from_map
- Get cell index position Interactively -
- - - - - - - - - - - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
_approximate_erf(x)
- Return approximate solution to error function...
- source code - -
- -
-   - - - - - - -
rho_to_vert(xr, - yr, - pm, - pn, - ang) - source code - -
- -
-   - - - - - - -
rho_to_vert_geo(lonr, - latr, - lonp, - latp) - source code - -
- -
-   - - - - - - -
uvp_masks(rmask)
- return u-, v-, and psi-masks based on input rho-mask
- source code - -
- -
- - - - - - -
- - - - - -
Function Details[hide private]
-
- -
- -
- - -
-

_approximate_erf(x) -

-
source code  -
- -
-
-Return approximate solution to error function
-see http://en.wikipedia.org/wiki/Error_function
-
-
-
-
-
-
- -
- -
- - -
-

uvp_masks(rmask) -

-
source code  -
- -
-
-return u-, v-, and psi-masks based on input rho-mask
-
-Parameters
-----------
-
-rmask : ndarray
-    mask at CGrid rho-points
-
-Returns
--------
-(umask, vmask, pmask) : ndarrays
-    masks at u-, v-, and psi-points
-
-
-
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.hgrid_old-pysrc.html b/pyroms/docs/pyroms.hgrid_old-pysrc.html deleted file mode 100644 index 389b0e7..0000000 --- a/pyroms/docs/pyroms.hgrid_old-pysrc.html +++ /dev/null @@ -1,2470 +0,0 @@ - - - - - pyroms.hgrid_old - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Module hgrid_old - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms.hgrid_old

-
-   1  # encoding: utf-8 
-   2  '''Tools for creating and working with Arikawa C-Grids''' 
-   3  __docformat__ = "restructuredtext en" 
-   4   
-   5  import os 
-   6  import sys 
-   7  import ctypes 
-   8  import cPickle 
-   9  from warnings import warn 
-  10  from copy import deepcopy 
-  11   
-  12  import numpy as np 
-  13  import matplotlib.pyplot as plt 
-  14  from matplotlib.artist import Artist 
-  15  from matplotlib.patches import Polygon, CirclePolygon 
-  16  from matplotlib.lines import Line2D 
-  17  #from matplotlib.numerix.mlab import amin 
-  18  from matplotlib.mlab import dist_point_to_segment 
-  19  from matplotlib.nxutils import points_inside_poly 
-  20   
-  21  from mpl_toolkits.basemap import Basemap 
-  22  from mpl_toolkits.basemap import pyproj 
-  23   
-  24  try: 
-  25      import scipy.spatial.cKDTree as KDTree 
-  26  except: 
-  27      #  no scipy 
-  28      from pyroms.extern import KDTree 
-  29   
-  30  import pyroms 
-  31  from pyroms.vgrid import * 
-  32  from pyroms.extern import GreatCircle 
-  33   
-
34 -class BoundaryInteractor(object): -
35 """ - 36 Interactive grid creation - 37 - 38 bry = BoundaryClick(x=[], y=[], beta=None, ax=gca(), **gridgen_options) - 39 - 40 The initial boundary polygon points (x and y) are - 41 counterclockwise, starting in the upper left corner of the - 42 boundary. - 43 - 44 Key commands: - 45 - 46 t : toggle visibility of verticies - 47 d : delete a vertex - 48 i : insert a vertex at a point on the polygon line - 49 - 50 p : set vertex as beta=1 (a Positive turn, marked with green triangle) - 51 m : set vertex as beta=1 (a Negative turn, marked with red triangle) - 52 z : set vertex as beta=0 (no corner, marked with a black dot) - 53 - 54 G : generate grid from the current boundary using gridgen - 55 T : toggle visability of the current grid - 56 - 57 Methods: - 58 - 59 bry.dump(bry_file) - 60 Write the current boundary informtion (bry.x, bry.y, bry.beta) to - 61 a cPickle file bry_file. - 62 - 63 bry.load(bry_file) - 64 Read in boundary informtion (x, y, beta) from the cPickle file - 65 bry_file. - 66 - 67 bry.remove_grid() - 68 Remove gridlines from axes. - 69 - 70 Attributes: - 71 bry.x : the X boundary points - 72 bry.y : the Y boundary points - 73 bry.verts : the verticies of the grid - 74 bry.grd : the CGrid object - 75 - 76 """ - 77 - 78 _showverts = True - 79 _showbetas = True - 80 _showgrid = True - 81 _epsilon = 5 # max pixel distance to count as a vertex hit - 82 -
83 - def _update_beta_lines(self): -
84 """Update m/pline by finding the points where self.beta== -/+ 1""" - 85 x, y = zip(*self._poly.xy) - 86 num_points = len(x)-1 # the first and last point are repeated - 87 - 88 xp = [x[n] for n in range(num_points) if self.beta[n]==1] - 89 yp = [y[n] for n in range(num_points) if self.beta[n]==1] - 90 self._pline.set_data(xp, yp) - 91 - 92 xm = [x[n] for n in range(num_points) if self.beta[n]==-1] - 93 ym = [y[n] for n in range(num_points) if self.beta[n]==-1] - 94 self._mline.set_data(xm, ym) - 95 - 96 xz = [x[n] for n in range(num_points) if self.beta[n]==0] - 97 yz = [y[n] for n in range(num_points) if self.beta[n]==0] - 98 self._zline.set_data(xz, yz) - 99 - 100 if len(x)-1 < self.gridgen_options['ul_idx']: - 101 self.gridgen_options['ul_idx'] = len(x)-1 - 102 xs = x[self.gridgen_options['ul_idx']] - 103 ys = y[self.gridgen_options['ul_idx']] - 104 self._sline.set_data(xs, ys) -
105 -
106 - def remove_grid(self): -
107 """Remove a generated grid from the BoundaryClick figure""" - 108 if hasattr(self, '_gridlines'): - 109 for line in self._gridlines: - 110 self._ax.lines.remove(line) - 111 delattr(self, '_gridlines') -
112 -
113 - def _draw_callback(self, event): -
114 self._background = self._canvas.copy_from_bbox(self._ax.bbox) - 115 self._ax.draw_artist(self._poly) - 116 self._ax.draw_artist(self._pline) - 117 self._ax.draw_artist(self._mline) - 118 self._ax.draw_artist(self._zline) - 119 self._ax.draw_artist(self._sline) - 120 self._ax.draw_artist(self._line) - 121 self._canvas.blit(self._ax.bbox) -
122 -
123 - def _poly_changed(self, poly): -
124 'this method is called whenever the polygon object is called' - 125 # only copy the artist props to the line (except visibility) - 126 vis = self._line.get_visible() - 127 Artist.update_from(self._line, poly) - 128 self._line.set_visible(vis) # don't use the poly visibility state -
129 -
130 - def _get_ind_under_point(self, event): -
131 'get the index of the vertex under point if within epsilon tolerance' - 132 try: - 133 x, y = zip(*self._poly.xy) - 134 - 135 # display coords - 136 xt, yt = self._poly.get_transform().numerix_x_y(x, y) - 137 d = np.sqrt((xt-event.x)**2 + (yt-event.y)**2) - 138 indseq = np.nonzero(np.equal(d, np.amin(d))) - 139 ind = indseq[0] - 140 - 141 if d[ind]>=self._epsilon: - 142 ind = None - 143 - 144 return ind - 145 except: - 146 # display coords - 147 xy = np.asarray(self._poly.xy) - 148 xyt = self._poly.get_transform().transform(xy) - 149 xt, yt = xyt[:, 0], xyt[:, 1] - 150 d = np.sqrt((xt-event.x)**2 + (yt-event.y)**2) - 151 indseq = np.nonzero(np.equal(d, np.amin(d)))[0] - 152 ind = indseq[0] - 153 - 154 if d[ind]>=self._epsilon: - 155 ind = None - 156 - 157 return ind -
158 -
159 - def _button_press_callback(self, event): -
160 'whenever a mouse button is pressed' - 161 # if not self._showverts: return - 162 if event.inaxes==None: return - 163 if event.button != 1: return - 164 self._ind = self._get_ind_under_point(event) -
165 -
166 - def _button_release_callback(self, event): -
167 'whenever a mouse button is released' - 168 # if not self._showverts: return - 169 if event.button != 1: return - 170 self._ind = None -
171 -
172 - def _key_press_callback(self, event): -
173 'whenever a key is pressed' - 174 if not event.inaxes: return - 175 if event.key=='shift': return - 176 - 177 if event.key=='t': - 178 self._showbetas = not self._showbetas - 179 self._line.set_visible(self._showbetas) - 180 self._pline.set_visible(self._showbetas) - 181 self._mline.set_visible(self._showbetas) - 182 self._zline.set_visible(self._showbetas) - 183 self._sline.set_visible(self._showbetas) - 184 elif event.key=='d': - 185 ind = self._get_ind_under_point(event) - 186 if ind is not None: - 187 self._poly.xy = [tup for i,tup in enumerate(self._poly.xy) \ - 188 if i!=ind] - 189 self._line.set_data(zip(*self._poly.xy)) - 190 self.beta = [beta for i,beta in enumerate(self.beta) \ - 191 if i!=ind] - 192 elif event.key=='p': - 193 ind = self._get_ind_under_point(event) - 194 if ind is not None: - 195 self.beta[ind] = 1.0 - 196 elif event.key=='m': - 197 ind = self._get_ind_under_point(event) - 198 if ind is not None: - 199 self.beta[ind] = -1.0 - 200 elif event.key=='z': - 201 ind = self._get_ind_under_point(event) - 202 if ind is not None: - 203 self.beta[ind] = 0.0 - 204 elif event.key=='s': - 205 ind = self._get_ind_under_point(event) - 206 if ind is not None: - 207 self.gridgen_options['ul_idx'] = ind - 208 elif event.key=='i': - 209 xys = self._poly.get_transform().transform(self._poly.xy) - 210 p = event.x, event.y # display coords - 211 for i in range(len(xys)-1): - 212 s0 = xys[i] - 213 s1 = xys[i+1] - 214 d = dist_point_to_segment(p, s0, s1) - 215 if d<=self._epsilon: - 216 self._poly.xy = np.array( - 217 list(self._poly.xy[:i+1]) + - 218 [(event.xdata, event.ydata)] + - 219 list(self._poly.xy[i+1:])) - 220 self._line.set_data(zip(*self._poly.xy)) - 221 self.beta.insert(i+1, 0) - 222 break - 223 s0 = xys[-1] - 224 s1 = xys[0] - 225 d = dist_point_to_segment(p, s0, s1) - 226 if d<=self._epsilon: - 227 self._poly.xy = np.array( - 228 list(self._poly.xy) + - 229 [(event.xdata, event.ydata)]) - 230 self._line.set_data(zip(*self._poly.xy)) - 231 self.beta.append(0) - 232 elif event.key=='G' or event.key == '1': - 233 options = deepcopy(self.gridgen_options) - 234 shp = options.pop('shp') - 235 if self.proj is None: - 236 x = self.x - 237 y = self.y - 238 self.grd = Gridgen(x, y, self.beta, shp, - 239 proj=self.proj, **options) - 240 else: - 241 lon, lat = self.proj(self.x, self.y, inverse=True) - 242 self.grd = Gridgen(lon, lat, self.beta, shp, - 243 proj=self.proj, **options) - 244 self.remove_grid() - 245 self._showgrid = True - 246 gridlineprops = {'linestyle':'-', 'color':'k', 'lw':0.2} - 247 self._gridlines = [] - 248 for line in self._ax._get_lines(*(self.grd.x, self.grd.y), - 249 **gridlineprops): - 250 self._ax.add_line(line) - 251 self._gridlines.append(line) - 252 for line in self._ax._get_lines(*(self.grd.x.T, self.grd.y.T), - 253 **gridlineprops): - 254 self._ax.add_line(line) - 255 self._gridlines.append(line) - 256 elif event.key=='T' or event.key == '2': - 257 self._showgrid = not self._showgrid - 258 if hasattr(self, '_gridlines'): - 259 for line in self._gridlines: - 260 line.set_visible(self._showgrid) - 261 - 262 self._update_beta_lines() - 263 self._draw_callback(event) - 264 self._canvas.draw() -
265 -
266 - def _motion_notify_callback(self, event): -
267 'on mouse movement' - 268 # if not self._showverts: return - 269 if self._ind is None: return - 270 if event.inaxes is None: return - 271 if event.button != 1: return - 272 x,y = event.xdata, event.ydata - 273 self._poly.xy[self._ind] = x, y - 274 if self._ind == 0: - 275 self._poly.xy[-1] = x, y - 276 - 277 x, y = zip(*self._poly.xy) - 278 self._line.set_data(x[:-1], y[:-1]) - 279 self._update_beta_lines() - 280 - 281 self._canvas.restore_region(self._background) - 282 self._ax.draw_artist(self._poly) - 283 self._ax.draw_artist(self._pline) - 284 self._ax.draw_artist(self._mline) - 285 self._ax.draw_artist(self._zline) - 286 self._ax.draw_artist(self._sline) - 287 self._ax.draw_artist(self._line) - 288 self._canvas.blit(self._ax.bbox) -
289 - 290 -
291 - def __init__(self, x, y=None, beta=None, ax=None, proj=None, - 292 **gridgen_options): -
293 - 294 if isinstance(x, str): - 295 bry_dict = np.load(x) - 296 x = bry_dict['x'] - 297 y = bry_dict['y'] - 298 beta = bry_dict['beta'] - 299 - 300 assert len(x) >= 4, 'Boundary must have at least four points.' - 301 - 302 if ax is None: - 303 ax = plt.gca() - 304 - 305 self._ax = ax - 306 - 307 self.proj = proj - 308 - 309 # Set default gridgen option, and copy over specified options. - 310 self.gridgen_options = {'ul_idx': 0, 'shp': (32, 32)} - 311 - 312 for key, value in gridgen_options.iteritems(): - 313 self.gridgen_options[key] = gridgen_options[key] - 314 - 315 x = list(x); y = list(y) - 316 assert len(x)==len(y), 'arrays must be equal length' - 317 - 318 if beta is None: - 319 self.beta = [0 for xi in x] - 320 else: - 321 assert len(x)==len(beta), 'beta must have same length as x and y' - 322 self.beta = list(beta) - 323 - 324 self._line = Line2D(x, y, animated=True, - 325 ls='-', color='k', alpha=0.5, lw=1) - 326 self._ax.add_line(self._line) - 327 - 328 self._canvas = self._line.figure.canvas - 329 - 330 self._poly = Polygon(self.verts, alpha=0.1, fc='k', animated=True) - 331 self._ax.add_patch(self._poly) - 332 - 333 # Link in the lines that will show the beta values - 334 # pline for positive turns, mline for negative (minus) turns - 335 # otherwize zline (zero) for straight sections - 336 self._pline = Line2D([], [], marker='^', ms=12, mfc='g',\ - 337 animated=True, lw=0) - 338 self._mline = Line2D([], [], marker='v', ms=12, mfc='r',\ - 339 animated=True, lw=0) - 340 self._zline = Line2D([], [], marker='o', mfc='k', animated=True, lw=0) - 341 self._sline = Line2D([], [], marker='s', mfc='k', animated=True, lw=0) - 342 - 343 self._update_beta_lines() - 344 self._ax.add_artist(self._pline) - 345 self._ax.add_artist(self._mline) - 346 self._ax.add_artist(self._zline) - 347 self._ax.add_artist(self._sline) - 348 - 349 # get the canvas and connect the callback events - 350 cid = self._poly.add_callback(self._poly_changed) - 351 self._ind = None # the active vert - 352 - 353 self._canvas.mpl_connect('draw_event', self._draw_callback) - 354 self._canvas.mpl_connect('button_press_event',\ - 355 self._button_press_callback) - 356 self._canvas.mpl_connect('key_press_event', self._key_press_callback) - 357 self._canvas.mpl_connect('button_release_event',\ - 358 self._button_release_callback) - 359 self._canvas.mpl_connect('motion_notify_event',\ - 360 self._motion_notify_callback) -
361 -
362 - def save_bry(self, bry_file='bry.pickle'): -
363 f = open(bry_file, 'wb') - 364 bry_dict = {'x': self.x, 'y': self.y, 'beta': self.beta} - 365 cPickle.dump(bry_dict, f, protocol=-1) - 366 f.close() -
367 -
368 - def load_bry(self, bry_file='bry.pickle'): -
369 bry_dict = np.load(bry_file) - 370 x = bry_dict['x'] - 371 y = bry_dict['y'] - 372 self._line.set_data(x, y) - 373 self.beta = bry_dict['beta'] - 374 if hasattr(self, '_poly'): - 375 self._poly.xy = zip(x, y) - 376 self._update_beta_lines() - 377 self._draw_callback(None) - 378 self._canvas.draw() -
379 -
380 - def save_grid(self, grid_file='grid.pickle'): -
381 f = open(grid_file, 'wb') - 382 cPickle.dump(self.grd, f, protocol=-1) - 383 f.close() -
384 -
385 - def _get_verts(self): return zip(self.x, self.y) -
386 verts = property(_get_verts) -
387 - def get_xdata(self): return self._line.get_xdata() -
388 x = property(get_xdata) -
389 - def get_ydata(self): return self._line.get_ydata() -
390 y = property(get_ydata) -
391 - 392 - 393 -
394 -def _approximate_erf(x): -
395 ''' - 396 Return approximate solution to error function - 397 see http://en.wikipedia.org/wiki/Error_function - 398 ''' - 399 a = -(8*(np.pi-3.0)/(3.0*np.pi*(np.pi-4.0))) - 400 return np.sign(x) * \ - 401 np.sqrt(1.0 - np.exp( -x**2*(4.0/np.pi+a*x*x)/(1.0+a*x*x) )) -
402 - 403 -
404 -class _Focus_x(object): -
405 """ - 406 Return a transformed, uniform grid, focused in the x-direction - 407 - 408 This class may be called with a uniform grid, with limits from [0, 1], to - 409 create a focused grid in the x-directions centered about xo. The output - 410 grid is also uniform from [0, 1] in both x and y. - 411 - 412 Parameters - 413 ---------- - 414 xo : float - 415 Location about which to focus grid - 416 factor : float - 417 amount to focus grid. Creates cell sizes that are factor smaller in - 418 the focused - 419 region. - 420 Rx : float - 421 Lateral extent of focused region, similar to a lateral spatial scale - 422 for the focusing region. - 423 - 424 Returns - 425 ------- - 426 foc : class - 427 The class may be called with arguments of a grid. The returned - 428 transformed grid (x, y) will be focused as per the parameters above. - 429 """ - 430 -
431 - def __init__(self, xo, factor=2.0, Rx=0.1): -
432 self.xo = xo - 433 self.factor = factor - 434 self.Rx = Rx -
435 -
436 - def __call__(self, x, y): -
437 x = np.asarray(x) - 438 y = np.asarray(y) - 439 assert not np.any(x>1.0) or not np.any(x<0.0) \ - 440 or not np.any(y>1.0) or not np.any(x<0.0), \ - 441 'x and y must both be within the range [0, 1].' - 442 - 443 alpha = 1.0 - 1.0/self.factor - 444 def xf(x): - 445 return x - 0.5*( np.sqrt(np.pi)*self.Rx*alpha - 446 *_approximate_erf((x-self.xo)/self.Rx) ) -
447 - 448 xf0 = xf(0.0); xf1 = xf(1.0) - 449 - 450 return (xf(x)-xf0)/(xf1-xf0), y -
451 -
452 -class _Focus_y(object): -
453 """ - 454 Return a transformed, uniform grid, focused in the y-direction - 455 - 456 This class may be called with a uniform grid, with limits from [0, 1], - 457 to create a focused grid in the y-directions centered about yo. - 458 The output grid is also uniform from [0, 1] in both x and y. - 459 - 460 Parameters - 461 ---------- - 462 yo : float - 463 Location about which to focus grid - 464 factor : float - 465 amount to focus grid. Creates cell sizes that are factor - 466 smaller in the focused region. - 467 Ry : float - 468 Lateral extent of focused region, similar to a lateral - 469 spatial scale for the focusing region. - 470 - 471 Returns - 472 ------- - 473 foc : class - 474 The class may be called with arguments of a grid. The returned - 475 transformed grid (x, y) will be focused as per the parameters above. - 476 """ - 477 -
478 - def __init__(self, yo, factor=2.0, Ry=0.1): -
479 self.yo = yo - 480 self.factor = factor - 481 self.Ry = Ry -
482 -
483 - def __call__(self, x, y): -
484 x = np.asarray(x) - 485 y = np.asarray(y) - 486 assert not np.any(x>1.0) or not np.any(x<0.0) \ - 487 or not np.any(y>1.0) or not np.any(x<0.0), \ - 488 'x and y must both be within the range [0, 1].' - 489 - 490 alpha = 1.0 - 1.0/self.factor - 491 - 492 def yf(y): - 493 return y - 0.5*( np.sqrt(np.pi)*self.Ry*alpha - 494 *_approximate_erf((y-self.yo)/self.Ry) ) -
495 - 496 yf0 = yf(0.0); yf1 = yf(1.0) - 497 - 498 return x, (yf(y)-yf0)/(yf1-yf0) -
499 -
500 -class Focus(object): -
501 """ - 502 Return a container for a sequence of Focus objects - 503 - 504 foc = Focus() - 505 - 506 The sequence is populated by using the 'add_focus_x' and 'add_focus_y' - 507 methods. These methods define a point ('xo' or 'yo'), around witch to - 508 focus, a focusing factor of 'focus', and x and y extent of focusing given - 509 by Rx or Ry. The region of focusing will be approximately Gausian, and the - 510 resolution will be increased by approximately the value of factor. - 511 - 512 Methods - 513 ------- - 514 foc.add_focus_x(xo, factor=2.0, Rx=0.1) - 515 foc.add_focus_y(yo, factor=2.0, Ry=0.1) - 516 - 517 Calls to the object return transformed coordinates: - 518 xf, yf = foc(x, y) - 519 where x and y must be within [0, 1], and are typically a uniform, - 520 normalized grid. The focused grid will be the result of applying each of - 521 the focus elements in the sequence they are added to the series. - 522 - 523 - 524 EXAMPLES - 525 -------- - 526 - 527 >>> foc = pyroms.grid.Focus() - 528 >>> foc.add_focus_x(0.2, factor=3.0, Rx=0.2) - 529 >>> foc.add_focus_y(0.6, factor=5.0, Ry=0.35) - 530 - 531 >>> x, y = np.mgrid[0:1:3j,0:1:3j] - 532 >>> xf, yf = foc(x, y) - 533 - 534 >>> print xf - 535 [[ 0. 0. 0. ] - 536 [ 0.36594617 0.36594617 0.36594617] - 537 [ 1. 1. 1. ]] - 538 >>> print yf - 539 [[ 0. 0.62479833 1. ] - 540 [ 0. 0.62479833 1. ] - 541 [ 0. 0.62479833 1. ]] - 542 """ -
543 - def __init__(self): -
544 self._focuspoints = [] -
545 -
546 - def add_focus_x(self, xo, factor=2.0, Rx=0.1): -
547 """docstring for add_point""" - 548 self._focuspoints.append(_Focus_x(xo, factor, Rx)) -
549 -
550 - def add_focus_y(self, yo, factor=2.0, Ry=0.1): -
551 """docstring for add_point""" - 552 self._focuspoints.append(_Focus_y(yo, factor, Ry)) -
553 -
554 - def __call__(self, x, y): -
555 """docstring for __call__""" - 556 for focuspoint in self._focuspoints: - 557 x, y = focuspoint(x, y) - 558 return x, y -
559 - 560 - 561 -
562 -class CGrid(object): -
563 """ - 564 Curvilinear Arakawa C-Grid - 565 - 566 The basis for the CGrid class are two arrays defining the verticies of the - 567 grid in Cartesian (for geographic coordinates, see CGrid_geo). An optional - 568 mask may be defined on the cell centers. Other Arakawa C-grid properties, - 569 such as the locations of the cell centers (rho-points), cell edges (u and - 570 v velocity points), cell widths (dx and dy) and other metrics (angle, - 571 dmde, and dndx) are all calculated internally from the vertex points. - 572 - 573 Input vertex arrays may be either type np.array or np.ma.MaskedArray. If - 574 masked arrays are used, the mask will be a combination of the specified - 575 mask (if given) and the masked locations. - 576 - 577 EXAMPLES: - 578 -------- - 579 - 580 >>> x, y = mgrid[0.0:7.0, 0.0:8.0] - 581 >>> x = np.ma.masked_where( (x<3) & (y<3), x) - 582 >>> y = np.ma.MaskedArray(y, x.mask) - 583 >>> grd = pyroms.grid.CGrid(x, y) - 584 >>> print grd.x_rho - 585 [[-- -- -- 0.5 0.5 0.5 0.5] - 586 [-- -- -- 1.5 1.5 1.5 1.5] - 587 [-- -- -- 2.5 2.5 2.5 2.5] - 588 [3.5 3.5 3.5 3.5 3.5 3.5 3.5] - 589 [4.5 4.5 4.5 4.5 4.5 4.5 4.5] - 590 [5.5 5.5 5.5 5.5 5.5 5.5 5.5]] - 591 >>> print grd.mask - 592 [[ 0. 0. 0. 1. 1. 1. 1.] - 593 [ 0. 0. 0. 1. 1. 1. 1.] - 594 [ 0. 0. 0. 1. 1. 1. 1.] - 595 [ 1. 1. 1. 1. 1. 1. 1.] - 596 [ 1. 1. 1. 1. 1. 1. 1.] - 597 [ 1. 1. 1. 1. 1. 1. 1.]] - 598 """ - 599 -
600 - def __init__(self, x_vert, y_vert, x_rho=None, y_rho=None, x_u=None, y_u=None, x_v=None, y_v=None, \ - 601 x_psi=None, y_psi=None, dx=None, dy=None, dndx=None, dmde=None, angle_rho=None): -
602 - 603 assert np.ndim(x_vert)==2 and np.ndim(y_vert)==2 and np.shape(x_vert)==np.shape(y_vert), \ - 604 'x and y must be 2D arrays of the same size.' - 605 - 606 if np.any(np.isnan(x_vert)) or np.any(np.isnan(y_vert)): - 607 x_vert = np.ma.masked_where( (isnan(x_vert)) | (isnan(y_vert)) , x_vert) - 608 y_vert = np.ma.masked_where( (isnan(x_vert)) | (isnan(y_vert)) , y_vert) - 609 - 610 self.x_vert = x_vert - 611 self.y_vert = y_vert - 612 - 613 self.f = None - 614 self.spherical = 'F' - 615 - 616 mask_shape = tuple([n-1 for n in self.x_vert.shape]) - 617 self.mask_rho = np.ones(mask_shape, dtype='d') - 618 - 619 # If maskedarray is given for verticies, modify the mask such that - 620 # non-existant grid points are masked. A cell requires all four - 621 # verticies to be defined as a water point. - 622 if isinstance(self.x_vert, np.ma.MaskedArray): - 623 mask = (self.x_vert.mask[:-1,:-1] | self.x_vert.mask[1:,:-1] | \ - 624 self.x_vert.mask[:-1,1:] | self.x_vert.mask[1:,1:]) - 625 self.mask_rho = np.asarray(~(~np.bool_(self.mask_rho) | mask), dtype='d') - 626 - 627 if isinstance(self.y_vert, np.ma.MaskedArray): - 628 mask = (self.y_vert.mask[:-1,:-1] | self.y_vert.mask[1:,:-1] | \ - 629 self.y_vert.mask[:-1,1:] | self.y_vert.mask[1:,1:]) - 630 self.mask_rho = np.asarray(~(~np.bool_(self.mask_rho) | mask), dtype='d') - 631 - 632 if x_rho is None or y_rho is None or x_u is None or y_u is None or \ - 633 x_v is None or y_v is None or x_psi is None or y_psi is None: - 634 self._calculate_subgrids() - 635 else: - 636 self.x_rho = x_rho - 637 self.y_rho = y_rho - 638 self.x_u = x_u - 639 self.y_u = y_u - 640 self.x_v = x_v - 641 self.y_v = y_v - 642 self.x_psi = x_psi - 643 self.y_psi = y_psi - 644 - 645 if dx is None or dy is None: - 646 self._calculate_metrics() - 647 else: - 648 self.dx = dx - 649 self.dy = dy - 650 - 651 self.xl = np.maximum(self.dx[0,:].sum(), self.dx[-1,:].sum()) - 652 self.el = np.maximum(self.dy[:,0].sum(), self.dy[:,-1].sum()) - 653 - 654 if dndx is None or dmde is None: - 655 self._calculate_derivative_metrics() - 656 else: - 657 self.dndx = dndx - 658 self.dmde = dmde - 659 - 660 if angle_rho is None: - 661 self._calculate_angle_rho() - 662 else: - 663 self.angle_rho = angle_rho - 664 - 665 self._calculate_angle() -
666 - 667 -
668 - def _calculate_subgrids(self): -
669 self.x_rho = 0.25*(self.x_vert[1:,1:]+self.x_vert[1:,:-1]+ \ - 670 self.x_vert[:-1,1:]+self.x_vert[:-1,:-1]) - 671 self.y_rho = 0.25*(self.y_vert[1:,1:]+self.y_vert[1:,:-1]+ \ - 672 self.y_vert[:-1,1:]+self.y_vert[:-1,:-1]) - 673 self.x_u = 0.5*(self.x_vert[:-1,1:-1] + self.x_vert[1:,1:-1]) - 674 self.y_u = 0.5*(self.y_vert[:-1,1:-1] + self.y_vert[1:,1:-1]) - 675 self.x_v = 0.5*(self.x_vert[1:-1,:-1] + self.x_vert[1:-1,1:]) - 676 self.y_v = 0.5*(self.y_vert[1:-1,:-1] + self.y_vert[1:-1,1:]) - 677 self.x_psi = self.x_vert[1:-1,1:-1] - 678 self.y_psi = self.y_vert[1:-1,1:-1] -
679 -
680 - def _calculate_metrics(self): -
681 'Calculates pm, pn, dndx, dmde from x_vert and y_vert' - 682 x_temp = 0.5*(self.x_vert[1:,:]+self.x_vert[:-1,:]) - 683 y_temp = 0.5*(self.y_vert[1:,:]+self.y_vert[:-1,:]) - 684 self.dx = np.sqrt(np.diff(x_temp, axis=1)**2 + np.diff(y_temp, axis=1)**2) - 685 x_temp = 0.5*(self.x_vert[:,1:]+self.x_vert[:,:-1]) - 686 y_temp = 0.5*(self.y_vert[:,1:]+self.y_vert[:,:-1]) - 687 self.dy = np.sqrt(np.diff(x_temp, axis=0)**2 + np.diff(y_temp, axis=0)**2) -
688 - 690 if isinstance(self.dy, np.ma.MaskedArray): - 691 self.dndx = np.ma.zeros(self.x_rho.shape, dtype='d') - 692 else: - 693 self.dndx = np.zeros(self.x_rho.shape, dtype='d') - 694 - 695 if isinstance(self.dx, np.ma.MaskedArray): - 696 self.dmde = np.ma.zeros(self.x_rho.shape, dtype='d') - 697 else: - 698 self.dmde = np.zeros(self.x_rho.shape, dtype='d') - 699 - 700 self.dndx[1:-1,1:-1] = 0.5*(self.dy[1:-1,2:] - self.dy[1:-1,:-2]) - 701 self.dmde[1:-1,1:-1] = 0.5*(self.dx[2:,1:-1] - self.dx[:-2,1:-1]) -
702 -
703 - def _calculate_angle(self): -
704 if isinstance(self.x_vert, np.ma.MaskedArray) or \ - 705 isinstance(self.y_vert, np.ma.MaskedArray): - 706 self.angle = np.ma.zeros(self.x_vert.shape, dtype='d') - 707 else: - 708 self.angle = np.zeros(self.x_vert.shape, dtype='d') - 709 - 710 angle_ud = np.arctan2(np.diff(self.y_vert, axis=1), np.diff(self.x_vert, axis=1)) - 711 angle_lr = np.arctan2(np.diff(self.y_vert, axis=0), np.diff(self.x_vert, axis=0)) - np.pi/2.0 - 712 # domain center - 713 self.angle[1:-1,1:-1] = 0.25*(angle_ud[1:-1,1:]+angle_ud[1:-1,:-1]\ - 714 +angle_lr[1:,1:-1]+angle_lr[:-1,1:-1]) - 715 # edges - 716 self.angle[0,1:-1] = (1.0/3.0)*(angle_lr[0,1:-1]+angle_ud[0,1:]+angle_ud[0,:-1]) - 717 self.angle[-1,1:-1] = (1.0/3.0)*(angle_lr[-1,1:-1]+angle_ud[-1,1:]+angle_ud[-1,:-1]) - 718 self.angle[1:-1,0] = (1.0/3.0)*(angle_ud[1:-1,0]+angle_lr[1:,0]+angle_lr[:-1,0]) - 719 self.angle[1:-1,-1] = (1.0/3.0)*(angle_ud[1:-1,-1]+angle_lr[1:,-1]+angle_lr[:-1,-1]) - 720 #conrers - 721 self.angle[0,0] = 0.5*(angle_lr[0,0]+angle_ud[0,0]) - 722 self.angle[0,-1] = 0.5*(angle_lr[0,-1]+angle_ud[0,-1]) - 723 self.angle[-1,0] = 0.5*(angle_lr[-1,0]+angle_ud[-1,0]) - 724 self.angle[-1,-1] = 0.5*(angle_lr[-1,-1]+angle_ud[-1,-1]) - 725 -
726 - def _calculate_angle_rho(self): -
727 self.angle_rho = np.arctan2(np.diff(0.5*(self.y_vert[1:,:]+self.y_vert[:-1,:])), \ - 728 np.diff(0.5*(self.x_vert[1:,:]+self.x_vert[:-1,:]))) - 729 -
730 - def calculate_orthogonality(self): -
731 ''' - 732 Calculate orthogonality error in radians - 733 ''' - 734 z = self.x_vert + 1j*self.y_vert - 735 du = np.diff(z, axis=1); du = (du/abs(du))[:-1,:] - 736 dv = np.diff(z, axis=0); dv = (dv/abs(dv))[:,:-1] - 737 ang1 = np.arccos(du.real*dv.real + du.imag*dv.imag) - 738 du = np.diff(z, axis=1); du = (du/abs(du))[1:,:] - 739 dv = np.diff(z, axis=0); dv = (dv/abs(dv))[:,:-1] - 740 ang2 = np.arccos(du.real*dv.real + du.imag*dv.imag) - 741 du = np.diff(z, axis=1); du = (du/abs(du))[:-1,:] - 742 dv = np.diff(z, axis=0); dv = (dv/abs(dv))[:,1:] - 743 ang3 = np.arccos(du.real*dv.real + du.imag*dv.imag) - 744 du = np.diff(z, axis=1); du = (du/abs(du))[1:,:] - 745 dv = np.diff(z, axis=0); dv = (dv/abs(dv))[:,1:] - 746 ang4 = np.arccos(du.real*dv.real + du.imag*dv.imag) - 747 ang = np.mean([abs(ang1), abs(ang2), abs(ang3), abs(ang4)], axis=0) - 748 ang = (ang-np.pi/2.0) - 749 return ang -
750 -
751 - def mask_polygon(self, polyverts, mask_value=0.0): -
752 """ - 753 Mask Cartesian points contained within the polygon defined by polyverts - 754 - 755 A cell is masked if the cell center (x_rho, y_rho) is within the - 756 polygon. Other sub-masks (mask_u, mask_v, and mask_psi) are updated - 757 automatically. - 758 - 759 mask_value [=0.0] may be specified to alter the value of the mask set - 760 within the polygon. E.g., mask_value=1 for water points. - 761 """ - 762 - 763 polyverts = np.asarray(polyverts) - 764 assert polyverts.ndim == 2, \ - 765 'polyverts must be a 2D array, or a similar sequence' - 766 assert polyverts.shape[1] == 2, \ - 767 'polyverts must be two columns of points' - 768 assert polyverts.shape[0] > 2, \ - 769 'polyverts must contain at least 3 points' - 770 - 771 mask = self.mask_rho - 772 inside = points_inside_poly( - 773 np.vstack( (self.x_rho.flatten(), self.y_rho.flatten()) ).T, - 774 polyverts) - 775 if np.any(inside): - 776 self.mask_rho.flat[inside] = mask_value -
777 -
778 - def _get_mask_u(self): -
779 return self.mask_rho[:,1:]*self.mask_rho[:,:-1] -
780 -
781 - def _get_mask_v(self): -
782 return self.mask_rho[1:,:]*self.mask_rho[:-1,:] -
783 -
784 - def _get_mask_psi(self): -
785 return self.mask_rho[1:,1:]*self.mask_rho[:-1,1:]* \ - 786 self.mask_rho[1:,:-1]*self.mask_rho[:-1,:-1] -
787 -
788 - def _set_mask_rho(self, mask_rho): -
789 self.mask_rho = mask_rho -
790 - 791 x = property(lambda self: self.x_vert, None, None, 'Return x_vert') - 792 y = property(lambda self: self.y_vert, None, None, 'Return x_vert') - 793 mask = property(lambda self: self.mask_rho, _set_mask_rho, None, 'Return mask_rho') - 794 mask_u = property(_get_mask_u, None, None, 'Return mask_u') - 795 mask_v = property(_get_mask_v, None, None, 'Return mask_v') - 796 mask_psi = property(_get_mask_psi, None, None, 'Return mask_psi') - 797 - 798 -
799 -class CGrid_geo(CGrid): -
800 """ - 801 Curvilinear Arakawa C-grid defined in geographic coordinates - 802 - 803 For a geographic grid, a projection may be specified, or The default - 804 projection for will be defined by the matplotlib.toolkits.Basemap - 805 projection: - 806 - 807 proj = Basemap(projection='merc', resolution=None, lat_ts=0.0) - 808 - 809 For a geographic grid, the cell widths are determined by the great - 810 circle distances. Angles, however, are defined using the projected - 811 coordinates, so a projection that conserves angles must be used. This - 812 means typically either Mercator (projection='merc') or Lambert - 813 Conformal Conic (projection='lcc'). - 814 """ -
815 - def _calculate_metrics(self): -
816 # calculate metrics based on x and y grid - 817 super(CGrid_geo, self)._calculate_metrics() - 818 - 819 # optionally calculate dx and dy based on great circle distances - 820 # for more accurate cell sizes. - 821 if self.use_gcdist: - 822 geod = pyproj.Geod(ellps=self.ellipse) - 823 az_forward, az_back, dx = geod.inv(self.lon[:,1:], self.lat[:,1:], \ - 824 self.lon[:,:-1], self.lat[:,:-1]) - 825 self.dx = 0.5*(dx[1:,:]+dx[:-1,:]) - 826 self.pm = 1.0/self.dx - 827 az_forward, az_back, dy = geod.inv(self.lon[1:,:], self.lat[1:,:], \ - 828 self.lon[:-1,:], self.lat[:-1,:]) - 829 self.dy = 0.5*(dy[:,1:]+dy[:,:-1]) - 830 self.pn = 1.0/self.dy -
831 - 832 -
834 if isinstance(self.dy, np.ma.MaskedArray): - 835 self.dndx = np.ma.zeros(self.dy.shape, dtype='d') - 836 else: - 837 self.dndx = np.zeros(self.dy.shape, dtype='d') - 838 - 839 if isinstance(self.dx, np.ma.MaskedArray): - 840 self.dmde = np.ma.zeros(self.dx.shape, dtype='d') - 841 else: - 842 self.dmde = np.zeros(self.dx.shape, dtype='d') - 843 - 844 self.dndx[1:-1,1:-1] = 0.5*(self.dy[1:-1,2:] - self.dy[1:-1,:-2]) - 845 self.dmde[1:-1,1:-1] = 0.5*(self.dx[2:,1:-1] - self.dx[:-2,1:-1]) -
846 -
847 - def _calculate_angle_rho(self): -
848 if isinstance(self.lon, np.ma.MaskedArray) or \ - 849 isinstance(self.lat, np.ma.MaskedArray): - 850 self.angle_rho = np.ma.zeros(self.lon.shape, dtype='d') - 851 else: - 852 self.angle_rho = np.zeros(self.lon.shape, dtype='d') - 853 - 854 # calculate metrics based on x and y grid - 855 super(CGrid_geo, self)._calculate_angle_rho() - 856 - 857 # optionally calculate dx and dy based on great circle distances - 858 # for more accurate cell sizes. - 859 if self.use_gcdist: - 860 geod = pyproj.Geod(ellps=self.ellipse) - 861 az_forward, az_back, dx = geod.inv(self.lon[:,:-1], self.lat[:,:-1], \ - 862 self.lon[:,1:], self.lat[:,1:]) - 863 - 864 angle = 0.5 * (az_forward[1:,:] + az_forward[:-1,:]) - 865 self.angle_rho = (90 - angle) * np.pi/180. -
866 - 867 -
868 - def __init__(self, lon_vert, lat_vert, proj, use_gcdist=True, ellipse='WGS84', \ - 869 lon_rho=None, lat_rho=None, lon_u=None, lat_u=None, \ - 870 lon_v=None, lat_v=None, lon_psi=None, lat_psi=None, dx=None, dy=None, \ - 871 dndx=None, dmde=None, angle_rho=None): -
872 - 873 x, y = proj(lon_vert, lat_vert) - 874 self.lon_vert = lon_vert - 875 self.lat_vert = lat_vert - 876 self.proj = proj - 877 - 878 self.use_gcdist = use_gcdist - 879 self.ellipse = ellipse - 880 - 881 if lon_rho is None or lat_rho is None or lon_u is None or lat_u is None or \ - 882 lon_v is None or lat_v is None or lon_psi is None or lat_psi is None: - 883 - 884 super(CGrid_geo, self).__init__(x, y) - 885 - 886 self.lon_rho, self.lat_rho = self.proj(self.x_rho, self.y_rho, - 887 inverse=True) - 888 self.lon_u, self.lat_u = self.proj(self.x_u, self.y_u, inverse=True) - 889 self.lon_v, self.lat_v = self.proj(self.x_v, self.y_v, inverse=True) - 890 self.lon_psi, self.lat_psi = self.proj(self.x_psi, self.y_psi, - 891 inverse=True) - 892 else: - 893 self.lon_rho = lon_rho - 894 self.lat_rho = lat_rho - 895 self.lon_u = lon_u - 896 self.lat_u = lat_u - 897 self.lon_v = lon_v - 898 self.lat_v = lat_v - 899 self.lon_psi = lon_psi - 900 self.lat_psi = lat_psi - 901 #calculate cartesian position - 902 self.x_vert, self.y_vert = proj(lon_vert, lat_vert) - 903 self.x_rho, self.y_rho = proj(lon_rho, lat_rho) - 904 self.x_u, self.y_u = proj(lon_u, lat_u) - 905 self.x_v, self.y_v = proj(lon_v, lat_v) - 906 self.x_psi, self.y_psi = proj(lon_psi, lat_psi) - 907 - 908 if dx is None or dy is None: - 909 self._calculate_metrics() - 910 else: - 911 self.dx = dx - 912 self.dy = dy - 913 - 914 self.xl = np.maximum(self.dx[0,:].sum(), self.dx[-1,:].sum()) - 915 self.el = np.maximum(self.dy[:,0].sum(), self.dy[:,-1].sum()) - 916 - 917 if dndx is None or dmde is None: - 918 self._calculate_derivative_metrics() - 919 else: - 920 self.dndx = dndx - 921 self.dmde = dmde - 922 - 923 if angle_rho is None: - 924 self._calculate_angle_rho() - 925 else: - 926 self.angle_rho = angle_rho - 927 - 928 self.f = 2.0 * 7.29e-5 * np.sin(self.lat_rho * np.pi / 180.0) - 929 self.spherical = 'T' -
930 - 931 -
932 - def mask_polygon_geo(lonlat_verts, mask_value=0.0): -
933 lon, lat = zip(*lonlat_verts) - 934 x, y = proj(lon, lat, inverse=True) - 935 self.mask_polygon(zip(x, y), mask_value) -
936 - 937 lon = property(lambda self: self.lon_vert, None, None, 'Shorthand for lon_vert') - 938 lat = property(lambda self: self.lat_vert, None, None, 'Shorthand for lat_vert') -
939 - 940 - 941 -
942 -class Gridgen(CGrid): -
943 """ - 944 docstring for Gridgen - 945 """ - 946 - 947 -
948 - def generate_grid(self): -
949 - 950 if self._gn is not None: - 951 self._libgridgen.gridnodes_destroy(self._gn) - 952 - 953 nbry = len(self.xbry) - 954 - 955 nsigmas = ctypes.c_int(0) - 956 sigmas = ctypes.c_void_p(0) - 957 nrect = ctypes.c_int(0) - 958 xrect = ctypes.c_void_p(0) - 959 yrect = ctypes.c_void_p(0) - 960 - 961 if self.focus is None: - 962 ngrid = ctypes.c_int(0) - 963 xgrid = ctypes.POINTER(ctypes.c_double)() - 964 ygrid = ctypes.POINTER(ctypes.c_double)() - 965 else: - 966 y, x = np.mgrid[0:1:self.ny*1j, 0:1:self.nx*1j] - 967 xgrid, ygrid = self.focus(x, y) - 968 ngrid = ctypes.c_int(xgrid.size) - 969 xgrid = (ctypes.c_double * xgrid.size)(*xgrid.flatten()) - 970 ygrid = (ctypes.c_double * ygrid.size)(*ygrid.flatten()) - 971 - 972 self._gn = self._libgridgen.gridgen_generategrid2( - 973 ctypes.c_int(nbry), - 974 (ctypes.c_double * nbry)(*self.xbry), - 975 (ctypes.c_double * nbry)(*self.ybry), - 976 (ctypes.c_double * nbry)(*self.beta), - 977 ctypes.c_int(self.ul_idx), - 978 ctypes.c_int(self.nx), - 979 ctypes.c_int(self.ny), - 980 ngrid, - 981 xgrid, - 982 ygrid, - 983 ctypes.c_int(self.nnodes), - 984 ctypes.c_int(self.newton), - 985 ctypes.c_double(self.precision), - 986 ctypes.c_int(self.checksimplepoly), - 987 ctypes.c_int(self.thin), - 988 ctypes.c_int(self.nppe), - 989 ctypes.c_int(self.verbose), - 990 ctypes.byref(nsigmas), - 991 ctypes.byref(sigmas), - 992 ctypes.byref(nrect), - 993 ctypes.byref(xrect), - 994 ctypes.byref(yrect) ) - 995 - 996 x = self._libgridgen.gridnodes_getx(self._gn) - 997 x = np.asarray([x[0][i] for i in range(self.ny*self.nx)]) - 998 # x = np.asarray([x[j][i] for j in range(self.ny) for i in range(self.nx)]) - 999 x.shape = (self.ny, self.nx) -1000 -1001 y = self._libgridgen.gridnodes_gety(self._gn) -1002 y = np.asarray([y[0][i] for i in range(self.ny*self.nx)]) -1003 # y = np.asarray([y[j][i] for j in range(self.ny) for i in range(self.nx)]) -1004 y.shape = (self.ny, self.nx) -1005 -1006 if np.any(np.isnan(x)) or np.any(np.isnan(y)): -1007 x = np.ma.masked_where(np.isnan(x), x) -1008 y = np.ma.masked_where(np.isnan(y), y) -1009 -1010 # if self.proj is not None: -1011 # lon, lat = self.proj(x, y, inverse=True) -1012 # super(Gridgen, self).__init__(lon, lat, proj=self.proj) -1013 # else: -1014 super(Gridgen, self).__init__(x, y) -
1015 -1016 -1017 -
1018 - def __init__(self, xbry, ybry, beta, shape, ul_idx=0, \ -1019 focus=None, proj=None, \ -1020 nnodes=14, precision=1.0e-12, nppe=3, \ -1021 newton=True, thin=True, checksimplepoly=True, verbose=False): -
1022 -1023 #self._libgridgen = np.ctypeslib.load_library('libgridgen',__file__) -1024 self._libgridgen = np.ctypeslib.load_library('libgridgen', pyroms.__path__[0]) -1025 -1026 self._libgridgen.gridgen_generategrid2.restype = ctypes.c_void_p -1027 self._libgridgen.gridnodes_getx.restype = ctypes.POINTER(ctypes.POINTER(ctypes.c_double)) -1028 self._libgridgen.gridnodes_gety.restype = ctypes.POINTER(ctypes.POINTER(ctypes.c_double)) -1029 self._libgridgen.gridnodes_getnce1.restype = ctypes.c_int -1030 self._libgridgen.gridnodes_getnce2.restype = ctypes.c_int -1031 self._libgridgen.gridmap_build.restype = ctypes.c_void_p -1032 -1033 self.xbry = np.asarray(xbry, dtype='d') -1034 self.ybry = np.asarray(ybry, dtype='d') -1035 self.beta = np.asarray(beta, dtype='d') -1036 assert self.beta.sum() == 4.0, 'sum of beta must be 4.0' -1037 self.shape = shape -1038 self.ny = shape[0] -1039 self.nx = shape[1] -1040 self.ul_idx = ul_idx -1041 self.focus = focus -1042 self.nnodes = nnodes -1043 self.precision = precision -1044 self.nppe = nppe -1045 self.newton = newton -1046 self.thin = thin -1047 self.checksimplepoly = checksimplepoly -1048 self.verbose = verbose -1049 -1050 self.proj = proj -1051 if self.proj is not None: -1052 self.xbry, self.ybry = proj(self.xbry, self.ybry) -1053 -1054 self._gn = None -1055 self.generate_grid() -
1056 -
1057 - def __del__(self): -
1058 """delete gridnode object upon deletion""" -1059 self._libgridgen.gridnodes_destroy(self._gn) -
1060 -1061 -
1062 -def rho_to_vert(xr, yr, pm, pn, ang): -
1063 Mp, Lp = xr.shape -1064 x = np.empty((Mp+1, Lp+1), dtype='d') -1065 y = np.empty((Mp+1, Lp+1), dtype='d') -1066 x[1:-1, 1:-1] = 0.25*(xr[1:,1:]+xr[1:,:-1]+xr[:-1,1:]+xr[:-1,:-1]) -1067 y[1:-1, 1:-1] = 0.25*(yr[1:,1:]+yr[1:,:-1]+yr[:-1,1:]+yr[:-1,:-1]) -1068 -1069 # east side -1070 theta = 0.5*(ang[:-1,-1]+ang[1:,-1]) -1071 dx = 0.5*(1.0/pm[:-1,-1]+1.0/pm[1:,-1]) -1072 dy = 0.5*(1.0/pn[:-1,-1]+1.0/pn[1:,-1]) -1073 x[1:-1,-1] = x[1:-1,-2] + dx*np.cos(theta) -1074 y[1:-1,-1] = y[1:-1,-2] + dx*np.sin(theta) -1075 -1076 # west side -1077 theta = 0.5*(ang[:-1,0]+ang[1:,0]) -1078 dx = 0.5*(1.0/pm[:-1,0]+1.0/pm[1:,0]) -1079 dy = 0.5*(1.0/pn[:-1,0]+1.0/pn[1:,0]) -1080 x[1:-1,0] = x[1:-1,1] - dx*np.cos(theta) -1081 y[1:-1,0] = y[1:-1,1] - dx*np.sin(theta) -1082 -1083 # north side -1084 theta = 0.5*(ang[-1,:-1]+ang[-1,1:]) -1085 dx = 0.5*(1.0/pm[-1,:-1]+1.0/pm[-1,1:]) -1086 dy = 0.5*(1.0/pn[-1,:-1]+1.0/pn[-1,1:]) -1087 x[-1,1:-1] = x[-2,1:-1] - dy*np.sin(theta) -1088 y[-1,1:-1] = y[-2,1:-1] + dy*np.cos(theta) -1089 -1090 # here we are now going to the south side.. -1091 theta = 0.5*(ang[0,:-1]+ang[0,1:]) -1092 dx = 0.5*(1.0/pm[0,:-1]+1.0/pm[0,1:]) -1093 dy = 0.5*(1.0/pn[0,:-1]+1.0/pn[0,1:]) -1094 x[0,1:-1] = x[1,1:-1] + dy*np.sin(theta) -1095 y[0,1:-1] = y[1,1:-1] - dy*np.cos(theta) -1096 -1097 #Corners -1098 x[0,0] = 4.0*xr[0,0]-x[1,0]-x[0,1]-x[1,1] -1099 x[-1,0] = 4.0*xr[-1,0]-x[-2,0]-x[-1,1]-x[-2,1] -1100 x[0,-1] = 4.0*xr[0,-1]-x[0,-2]-x[1,-1]-x[1,-2] -1101 x[-1,-1] = 4.0*xr[-1,-1]-x[-2,-2]-x[-2,-1]-x[-1,-2] -1102 -1103 y[0,0] = 4.0*yr[0,0]-y[1,0]-y[0,1]-y[1,1] -1104 y[-1,0] = 4.0*yr[-1,0]-y[-2,0]-y[-1,1]-y[-2,1] -1105 y[0,-1] = 4.0*yr[0,-1]-y[0,-2]-y[1,-1]-y[1,-2] -1106 y[-1,-1] = 4.0*yr[-1,-1]-y[-2,-2]-y[-2,-1]-y[-1,-2] -1107 -1108 return x, y -
1109 -1110 -
1111 -def rho_to_vert_geo(lonr, latr, lonp, latp): -
1112 Mm, Lm = lonr.shape -1113 lon = np.zeros((Mm+1,Lm+1)) -1114 lat = np.zeros((Mm+1,Lm+1)) -1115 -1116 lon[1:-1, 1:-1] = lonp[:,:] -1117 lat[1:-1, 1:-1] = latp[:,:] -1118 -1119 #North edge -1120 lon[Mm,0:-2] = lonr[Mm-1,0:-1] - ( lonp[Mm-2,:] - lonr[Mm-1,0:-1] ) -1121 lon[Mm,-2:] = lonr[Mm-1,-2:] - ( lonp[Mm-2,-2:] - lonr[Mm-1,-2:] ) -1122 lat[Mm,0:-2] = latr[Mm-1,0:-1] - ( latp[Mm-2,:] - latr[Mm-1,0:-1] ) -1123 lat[Mm,-2:] = latr[Mm-1,-2:] - ( latp[Mm-2,-2:] - latr[Mm-1,-2:] ) -1124 -1125 #South edge -1126 lon[0,0:-2] = lonr[0,0:-1] - ( lonp[0,:] - lonr[0,0:-1] ) -1127 lon[0,-2:] = lonr[0,-2:] - ( lonp[0,-2:] - lonr[0,-2:] ) -1128 lat[0,0:-2] = latr[0,0:-1] - ( latp[0,:] - latr[0,0:-1] ) -1129 lat[0,-2:] = latr[0,-2:] - ( latp[0,-2:] - latr[0,-2:] ) -1130 -1131 #East edge -1132 lon[0:-2,Lm] = lonr[0:-1,Lm-1] - ( lonp[:,Lm-2] - lonr[0:-1,Lm-1] ) -1133 lon[-2:,Lm] = lonr[-2:,Lm-1] - ( lonp[-2:,Lm-2] - lonr[-2:,Lm-1] ) -1134 lat[0:-2,Lm] = latr[0:-1,Lm-1] - ( latp[:,Lm-2] - latr[0:-1,Lm-1] ) -1135 lat[-2:,Lm] = latr[-2:,Lm-1] - ( latp[-2:,Lm-2] - latr[-2:,Lm-1] ) -1136 -1137 #West edge -1138 lon[0:-2,0] = lonr[0:-1,0] - ( lonp[:,0] - lonr[0:-1,0] ) -1139 lon[-2:,0] = lonr[-2:,0] - ( lonp[-2:,0] - lonr[-2:,0] ) -1140 lat[0:-2,0] = latr[0:-1,0] - ( latp[:,0] - latr[0:-1,0] ) -1141 lat[-2:,0] = latr[-2:,0] - ( latp[-2:,0] - latr[-2:,0] ) -1142 -1143 return lon, lat -
1144 -1145 -
1146 -class edit_mask_mesh(object): -
1147 """ -1148 Interactive mask editor -1149 -1150 edit_mask_mesh(grd, proj) -1151 -1152 Edit grd mask. Mask/Unsmask cell by a simple click on the cell. -1153 Mask modification are store in mask_change.txt for further use. -1154 -1155 Key commands: -1156 e : toggle between Editing/Viewing mode -1157 """ -1158 -
1159 - def _on_key(self, event): -
1160 if event.key == 'e': -1161 self._clicking = not self._clicking -1162 plt.title('Editing %s -- click "e" to toggle' % self._clicking) -1163 plt.draw() -
1164 -
1165 - def _on_click(self, event): -
1166 x, y = event.xdata, event.ydata -1167 if event.button==1 and event.inaxes is not None and self._clicking == True: -1168 d = (x-self._xc)**2 + (y-self._yc)**2 -1169 if isinstance(self.xv, np.ma.MaskedArray): -1170 idx = np.argwhere(d[~self._xc.mask] == d.min()) -1171 else: -1172 idx = np.argwhere(d.flatten() == d.min()) -1173 self._mask[idx] = float(not self._mask[idx]) -1174 i, j = np.argwhere(d == d.min())[0] -1175 self.mask[i, j] = float(not self.mask[i, j]) -1176 #open output file -1177 f = open('mask_change.txt','a') -1178 value = (i, j, self.mask[i, j]) -1179 s = str(value) -1180 f.write(s + '\n') -1181 #close file -1182 f.close() -1183 self._pc.set_array(self._mask) -1184 self._pc.changed() -1185 plt.draw() -
1186 -
1187 - def __init__(self, grd, proj=None, **kwargs): -
1188 -1189 if type(grd).__name__ == 'ROMS_Grid': -1190 try: -1191 xv = grd.hgrid.lon_vert -1192 yv = grd.hgrid.lat_vert -1193 mask = grd.hgrid.mask_rho -1194 except: -1195 xv = grd.hgrid.x_vert -1196 yv = grd.hgrid.y_vert -1197 mask = grd.hgrid.mask_rho -1198 -1199 if type(grd).__name__ == 'CGrid_geo': -1200 try: -1201 xv = grd.lon_vert -1202 yv = grd.lat_vert -1203 mask = grd.mask_rho -1204 except: -1205 xv = grd.x_vert -1206 yv = grd.y_vert -1207 mask = grd.mask_rho -1208 -1209 assert xv.shape == yv.shape, 'xv and yv must have the same shape' -1210 for dx, dq in zip(xv.shape, mask.shape): -1211 assert dx==dq+1, \ -1212 '''xv and yv must be cell verticies -1213 (i.e., one cell bigger in each dimension)''' -1214 -1215 self.xv = xv -1216 self.yv = yv -1217 -1218 self.mask = mask -1219 -1220 self.proj = proj -1221 -1222 land_color = kwargs.pop('land_color', (0.6, 1.0, 0.6)) -1223 sea_color = kwargs.pop('sea_color', (0.6, 0.6, 1.0)) -1224 -1225 cm = plt.matplotlib.colors.ListedColormap([land_color, sea_color], -1226 name='land/sea') -1227 -1228 if self.proj is None: -1229 self._pc = plt.pcolor(xv, yv, mask, cmap=cm, vmin=0, vmax=1, edgecolor='k', **kwargs) -1230 else: -1231 xv, yv = self.proj(xv, yv) -1232 self._pc = Basemap.pcolor(self.proj, xv, yv, mask, cmap=cm, vmin=0, vmax=1, edgecolor='k', **kwargs) -1233 self.proj.drawcoastlines() -1234 -1235 self._xc = 0.25*(xv[1:,1:]+xv[1:,:-1]+xv[:-1,1:]+xv[:-1,:-1]) -1236 self._yc = 0.25*(yv[1:,1:]+yv[1:,:-1]+yv[:-1,1:]+yv[:-1,:-1]) -1237 -1238 if isinstance(self.xv, np.ma.MaskedArray): -1239 self._mask = mask[~self._xc.mask] -1240 else: -1241 self._mask = mask.flatten() -1242 -1243 plt.connect('button_press_event', self._on_click) -1244 plt.connect('key_press_event', self._on_key) -1245 self._clicking = False -1246 plt.title('Editing %s -- click "e" to toggle' % self._clicking) -1247 plt.draw() -
1248 -1249 -
1250 -def uvp_masks(rmask): -
1251 ''' -1252 return u-, v-, and psi-masks based on input rho-mask -1253 -1254 Parameters -1255 ---------- -1256 -1257 rmask : ndarray -1258 mask at CGrid rho-points -1259 -1260 Returns -1261 ------- -1262 (umask, vmask, pmask) : ndarrays -1263 masks at u-, v-, and psi-points -1264 ''' -1265 rmask = np.asarray(rmask) -1266 assert rmask.ndim == 2, 'rmask must be a 2D array' -1267 assert np.all((rmask==0)|(rmask==1)), 'rmask array must contain only ones and zeros.' -1268 -1269 umask = rmask[:, :-1] * rmask[:, 1:] -1270 vmask = rmask[:-1, :] * rmask[1:, :] -1271 pmask = rmask[:-1, :-1] * rmask[:-1, 1:] * rmask[1:, :-1] * rmask[1:, 1:] -1272 -1273 return umask, vmask, pmask -
1274 -1275 -1276 -1277 if __name__ == '__main__': -1278 geographic = False -1279 if geographic: -1280 from mpl_toolkits.basemap import Basemap -1281 proj = Basemap(projection='lcc', -1282 resolution='i', -1283 llcrnrlon=-72.0, -1284 llcrnrlat= 40.0, -1285 urcrnrlon=-63.0, -1286 urcrnrlat=47.0, -1287 lat_0=43.0, -1288 lon_0=-62.5) -1289 -1290 lon = (-71.977385177601761, -70.19173825913137, -1291 -63.045075098584945,-64.70104074097425) -1292 lat = (42.88215610827428, 41.056141745853786, -1293 44.456701607935841, 46.271758064353897) -1294 beta = [1.0, 1.0, 1.0, 1.0] -1295 -1296 grd = Gridgen(lon, lat, beta, (32, 32), proj=proj) -1297 -1298 for seg in proj.coastsegs: -1299 grd.mask_polygon(seg) -1300 -1301 plt.pcolor(grd.x, grd.y, grd.mask) -1302 plt.show() -1303 else: -1304 x = [0.2, 0.85, 0.9, 0.82, 0.23] -1305 y = [0.2, 0.25, 0.5, 0.82, .83] -1306 beta = [1.0, 1.0, 0.0, 1.0, 1.0] -1307 -1308 grd = Gridgen(x, y, beta, (32, 32)) -1309 -1310 ax = plt.subplot(111) -1311 BoundaryInteractor(x, y, beta) -1312 plt.show() -1313 -1314 -1315 -
1316 -class get_position_from_map(object): -
1317 """ -1318 Get cell index position Interactively -1319 -1320 get_position_from_map(grd, proj) -1321 -1322 Get index i, j as well as lon, lat coordinates for one cell -1323 simply by clicking on the cell. -1324 -1325 Key commands: -1326 i : toggle between Interactive/Viewing mode -1327 """ -
1328 - def _on_key(self, event): -
1329 if event.key == 'i': -1330 self._clicking = not self._clicking -1331 plt.title('Interactive %s -- click "i" to toggle' % self._clicking) -1332 plt.draw() -
1333 -
1334 - def _on_click(self, event): -
1335 x, y = event.xdata, event.ydata -1336 if event.button==1 and event.inaxes is not None and self._clicking == True: -1337 d = (x-self._xc)**2 + (y-self._yc)**2 -1338 if isinstance(self.xv, np.ma.MaskedArray): -1339 idx = np.argwhere(d[~self._xc.mask] == d.min()) -1340 else: -1341 idx = np.argwhere(d.flatten() == d.min()) -1342 j, i = np.argwhere(d == d.min())[0] -1343 print 'Position on the grid (rho point): i =', i, ', j =', j -1344 if self.proj is not None: -1345 lon, lat = self.proj(self._xc[j,i], self._yc[j,i], inverse=True) -1346 print 'corresponding geographical position : lon = ', lon, ', lat =', lat -1347 else: -1348 print 'corresponding cartesian position : x = ', self._xc[j,i], ', y =', self._yc[j,i] -
1349 -
1350 - def __init__(self, grd, proj=None, **kwargs): -
1351 -1352 try: -1353 xv = grd.hgrid.lon_vert -1354 yv = grd.hgrid.lat_vert -1355 mask = grd.hgrid.mask_rho -1356 except: -1357 xv = grd.hgrid.x_vert -1358 yv = grd.hgrid.y_vert -1359 mask = grd.hgrid.mask_rho -1360 -1361 assert xv.shape == yv.shape, 'xv and yv must have the same shape' -1362 for dx, dq in zip(xv.shape, mask.shape): -1363 assert dx==dq+1, \ -1364 '''xv and yv must be cell verticies -1365 (i.e., one cell bigger in each dimension)''' -1366 -1367 self.xv = xv -1368 self.yv = yv -1369 -1370 self.mask = mask -1371 -1372 self.proj = proj -1373 -1374 land_color = kwargs.pop('land_color', (0.6, 1.0, 0.6)) -1375 sea_color = kwargs.pop('sea_color', (0.6, 0.6, 1.0)) -1376 -1377 cm = plt.matplotlib.colors.ListedColormap([land_color, sea_color], -1378 name='land/sea') -1379 -1380 if self.proj is None: -1381 self._pc = plt.pcolor(xv, yv, mask, cmap=cm, vmin=0, vmax=1, edgecolor='k', **kwargs) -1382 -1383 else: -1384 xv, yv = self.proj(xv, yv) -1385 self._pc = Basemap.pcolor(self.proj, xv, yv, mask, cmap=cm, vmin=0, vmax=1, edgecolor='k', **kwargs) -1386 self.proj.drawcoastlines() -1387 -1388 self._xc = 0.25*(xv[1:,1:]+xv[1:,:-1]+xv[:-1,1:]+xv[:-1,:-1]) -1389 self._yc = 0.25*(yv[1:,1:]+yv[1:,:-1]+yv[:-1,1:]+yv[:-1,:-1]) -1390 -1391 plt.connect('button_press_event', self._on_click) -1392 plt.connect('key_press_event', self._on_key) -1393 self._clicking = False -1394 plt.title('Interactive %s -- click "i" to toggle' % self._clicking) -1395 plt.draw() -
1396 -1397 -1398 if __name__ == '__main__': -1399 geographic = False -1400 if geographic: -1401 from mpl_toolkits.basemap import Basemap -1402 proj = Basemap(projection='lcc', -1403 resolution='i', -1404 llcrnrlon=-72.0, -1405 llcrnrlat= 40.0, -1406 urcrnrlon=-63.0, -1407 urcrnrlat=47.0, -1408 lat_0=43.0, -1409 lon_0=-62.5) -1410 -1411 lon = (-71.977385177601761, -70.19173825913137, -1412 -63.045075098584945,-64.70104074097425) -1413 lat = (42.88215610827428, 41.056141745853786, -1414 44.456701607935841, 46.271758064353897) -1415 beta = [1.0, 1.0, 1.0, 1.0] -1416 -1417 grd = Gridgen(lon, lat, beta, (32, 32), proj=proj) -1418 -1419 for seg in proj.coastsegs: -1420 grd.mask_polygon(seg) -1421 -1422 plt.pcolor(grd.x, grd.y, grd.mask) -1423 plt.show() -1424 else: -1425 x = [0.2, 0.85, 0.9, 0.82, 0.23] -1426 y = [0.2, 0.25, 0.5, 0.82, .83] -1427 beta = [1.0, 1.0, 0.0, 1.0, 1.0] -1428 -1429 grd = Gridgen(x, y, beta, (32, 32)) -1430 -1431 ax = plt.subplot(111) -1432 BoundaryInteractor(x, y, beta) -1433 plt.show() -1434 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.hgrid_old.BoundaryInteractor-class.html b/pyroms/docs/pyroms.hgrid_old.BoundaryInteractor-class.html deleted file mode 100644 index 198cb74..0000000 --- a/pyroms/docs/pyroms.hgrid_old.BoundaryInteractor-class.html +++ /dev/null @@ -1,674 +0,0 @@ - - - - - pyroms.hgrid_old.BoundaryInteractor - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Module hgrid_old :: - Class BoundaryInteractor - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Class BoundaryInteractor

source code

-
-object --+
-         |
-        BoundaryInteractor
-
- -
-
-
-Interactive grid creation
-    
-bry = BoundaryClick(x=[], y=[], beta=None, ax=gca(), **gridgen_options)
-
-The initial boundary polygon points (x and y) are
-counterclockwise, starting in the upper left corner of the
-boundary. 
-
-Key commands:
-    
-    t : toggle visibility of verticies
-    d : delete a vertex
-    i : insert a vertex at a point on the polygon line
-    
-    p : set vertex as beta=1 (a Positive turn, marked with green triangle)
-    m : set vertex as beta=1 (a Negative turn, marked with red triangle)
-    z : set vertex as beta=0 (no corner, marked with a black dot)
-    
-    G : generate grid from the current boundary using gridgen
-    T : toggle visability of the current grid
-
-Methods:
-
-    bry.dump(bry_file)
-        Write the current boundary informtion (bry.x, bry.y, bry.beta) to
-        a cPickle file bry_file.
-    
-    bry.load(bry_file)
-        Read in boundary informtion (x, y, beta) from the cPickle file
-        bry_file.
-    
-    bry.remove_grid()  
-        Remove gridlines from axes.
-
-Attributes:
-    bry.x : the X boundary points
-    bry.y : the Y boundary points
-    bry.verts : the verticies of the grid
-    bry.grd : the CGrid object
-    
-
-
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - -
Instance Methods[hide private]
-
-   - - - - - - -
_update_beta_lines(self)
- Update m/pline by finding the points where self.beta== -/+ 1
- source code - -
- -
-   - - - - - - -
remove_grid(self)
- Remove a generated grid from the BoundaryClick figure
- source code - -
- -
-   - - - - - - -
_draw_callback(self, - event) - source code - -
- -
-   - - - - - - -
_poly_changed(self, - poly)
- this method is called whenever the polygon object is called
- source code - -
- -
-   - - - - - - -
_get_ind_under_point(self, - event)
- get the index of the vertex under point if within epsilon tolerance
- source code - -
- -
-   - - - - - - -
_button_press_callback(self, - event)
- whenever a mouse button is pressed
- source code - -
- -
-   - - - - - - -
_button_release_callback(self, - event)
- whenever a mouse button is released
- source code - -
- -
-   - - - - - - -
_key_press_callback(self, - event)
- whenever a key is pressed
- source code - -
- -
-   - - - - - - -
_motion_notify_callback(self, - event)
- on mouse movement
- source code - -
- -
-   - - - - - - -
__init__(self, - x, - y=None, - beta=None, - ax=None, - proj=None, - **gridgen_options)
- x.__init__(...) initializes x; see x.__class__.__doc__ for signature
- source code - -
- -
-   - - - - - - -
save_bry(self, - bry_file='bry.pickle') - source code - -
- -
-   - - - - - - -
load_bry(self, - bry_file='bry.pickle') - source code - -
- -
-   - - - - - - -
save_grid(self, - grid_file='grid.pickle') - source code - -
- -
-   - - - - - - -
_get_verts(self) - source code - -
- -
-   - - - - - - -
get_xdata(self) - source code - -
- -
-   - - - - - - -
get_ydata(self) - source code - -
- -
-

Inherited from object: - __delattr__, - __getattribute__, - __hash__, - __new__, - __reduce__, - __reduce_ex__, - __repr__, - __setattr__, - __str__ -

-
- - - - - - - - - - - - - - - - - - -
- - - - - -
Class Variables[hide private]
-
-   - - _showverts = True -
-   - - _showbetas = True -
-   - - _showgrid = True -
-   - - _epsilon = 5 -
- - - - - - - - - - - - - - - - - - -
- - - - - -
Properties[hide private]
-
-   - - verts -
-   - - x -
-   - - y -
-

Inherited from object: - __class__ -

-
- - - - - - -
- - - - - -
Method Details[hide private]
-
- -
- -
- - -
-

__init__(self, - x, - y=None, - beta=None, - ax=None, - proj=None, - **gridgen_options) -
(Constructor) -

-
source code  -
- -

x.__init__(...) initializes x; see x.__class__.__doc__ for - signature

-
-
Overrides: - object.__init__ -
(inherited documentation)
- -
-
-
-
- - - - - - -
- - - - - -
Property Details[hide private]
-
- -
- -
-

verts

- -
-
Get Method:
-
_get_verts(self) -
-
-
-
- -
- -
-

x

- -
-
Get Method:
-
get_xdata(self) -
-
-
-
- -
- -
-

y

- -
-
Get Method:
-
get_ydata(self) -
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.hgrid_old.CGrid-class.html b/pyroms/docs/pyroms.hgrid_old.CGrid-class.html deleted file mode 100644 index 2536121..0000000 --- a/pyroms/docs/pyroms.hgrid_old.CGrid-class.html +++ /dev/null @@ -1,693 +0,0 @@ - - - - - pyroms.hgrid_old.CGrid - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Module hgrid_old :: - Class CGrid - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Class CGrid

source code

-
-object --+
-         |
-        CGrid
-
- -
Known Subclasses:
-
- -
- -
-
-
-Curvilinear Arakawa C-Grid
- 
-The basis for the CGrid class are two arrays defining the verticies of the
-grid in Cartesian (for geographic coordinates, see CGrid_geo). An optional
-mask may be defined on the cell centers. Other Arakawa C-grid properties,
-such as the locations of the cell centers (rho-points), cell edges (u and
-v velocity points), cell widths (dx and dy) and other metrics (angle,
-dmde, and dndx) are all calculated internally from the vertex points.
- 
-Input vertex arrays may be either type np.array or np.ma.MaskedArray. If
-masked arrays are used, the mask will be a combination of the specified
-mask (if given) and the masked locations.
- 
-EXAMPLES:
---------
- 
->>> x, y = mgrid[0.0:7.0, 0.0:8.0]
->>> x = np.ma.masked_where( (x<3) & (y<3), x)
->>> y = np.ma.MaskedArray(y, x.mask)
->>> grd = pyroms.grid.CGrid(x, y)
->>> print grd.x_rho
-[[-- -- -- 0.5 0.5 0.5 0.5]
- [-- -- -- 1.5 1.5 1.5 1.5]
- [-- -- -- 2.5 2.5 2.5 2.5]
- [3.5 3.5 3.5 3.5 3.5 3.5 3.5]
- [4.5 4.5 4.5 4.5 4.5 4.5 4.5]
- [5.5 5.5 5.5 5.5 5.5 5.5 5.5]]
->>> print grd.mask
-[[ 0.  0.  0.  1.  1.  1.  1.]
- [ 0.  0.  0.  1.  1.  1.  1.]
- [ 0.  0.  0.  1.  1.  1.  1.]
- [ 1.  1.  1.  1.  1.  1.  1.]
- [ 1.  1.  1.  1.  1.  1.  1.]
- [ 1.  1.  1.  1.  1.  1.  1.]]
-
-
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - -
Instance Methods[hide private]
-
-   - - - - - - -
__init__(self, - x_vert, - y_vert, - x_rho=None, - y_rho=None, - x_u=None, - y_u=None, - x_v=None, - y_v=None, - x_psi=None, - y_psi=None, - dx=None, - dy=None, - dndx=None, - dmde=None, - angle_rho=None)
- x.__init__(...) initializes x; see x.__class__.__doc__ for signature
- source code - -
- -
-   - - - - - - -
_calculate_subgrids(self) - source code - -
- -
-   - - - - - - -
_calculate_metrics(self)
- Calculates pm, pn, dndx, dmde from x_vert and y_vert
- source code - -
- -
-   - - - - - - -
_calculate_derivative_metrics(self) - source code - -
- -
-   - - - - - - -
_calculate_angle(self) - source code - -
- -
-   - - - - - - -
_calculate_angle_rho(self) - source code - -
- -
-   - - - - - - -
calculate_orthogonality(self)
- Calculate orthogonality error in radians
- source code - -
- -
-   - - - - - - -
mask_polygon(self, - polyverts, - mask_value=0.0)
- Mask Cartesian points contained within the polygon defined by polyverts
- source code - -
- -
-   - - - - - - -
_get_mask_u(self) - source code - -
- -
-   - - - - - - -
_get_mask_v(self) - source code - -
- -
-   - - - - - - -
_get_mask_psi(self) - source code - -
- -
-   - - - - - - -
_set_mask_rho(self, - mask_rho) - source code - -
- -
-

Inherited from object: - __delattr__, - __getattribute__, - __hash__, - __new__, - __reduce__, - __reduce_ex__, - __repr__, - __setattr__, - __str__ -

-
- - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - -
Properties[hide private]
-
-   - - x
- Return x_vert -
-   - - y
- Return x_vert -
-   - - mask
- Return mask_rho -
-   - - mask_u
- Return mask_u -
-   - - mask_v
- Return mask_v -
-   - - mask_psi
- Return mask_psi -
-

Inherited from object: - __class__ -

-
- - - - - - -
- - - - - -
Method Details[hide private]
-
- -
- -
- - -
-

__init__(self, - x_vert, - y_vert, - x_rho=None, - y_rho=None, - x_u=None, - y_u=None, - x_v=None, - y_v=None, - x_psi=None, - y_psi=None, - dx=None, - dy=None, - dndx=None, - dmde=None, - angle_rho=None) -
(Constructor) -

-
source code  -
- -

x.__init__(...) initializes x; see x.__class__.__doc__ for - signature

-
-
Overrides: - object.__init__ -
(inherited documentation)
- -
-
-
- -
- -
- - -
-

mask_polygon(self, - polyverts, - mask_value=0.0) -

-
source code  -
- -
-
-Mask Cartesian points contained within the polygon defined by polyverts
-
-A cell is masked if the cell center (x_rho, y_rho) is within the
-polygon. Other sub-masks (mask_u, mask_v, and mask_psi) are updated
-automatically.
-
-mask_value [=0.0] may be specified to alter the value of the mask set
-within the polygon.  E.g., mask_value=1 for water points.
-
-
-
-
-
-
-
- - - - - - -
- - - - - -
Property Details[hide private]
-
- -
- -
-

x

-
-Return x_vert
-
-
-
-
Get Method:
-
unreachable(self) -
-
-
-
- -
- -
-

y

-
-Return x_vert
-
-
-
-
Get Method:
-
unreachable(self) -
-
-
-
- -
- -
-

mask

-
-Return mask_rho
-
-
-
-
Get Method:
-
unreachable(self) -
-
Set Method:
-
_set_mask_rho(self, - mask_rho) -
-
-
-
- -
- -
-

mask_u

-
-Return mask_u
-
-
-
-
Get Method:
-
_get_mask_u(self) -
-
-
-
- -
- -
-

mask_v

-
-Return mask_v
-
-
-
-
Get Method:
-
_get_mask_v(self) -
-
-
-
- -
- -
-

mask_psi

-
-Return mask_psi
-
-
-
-
Get Method:
-
_get_mask_psi(self) -
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.hgrid_old.CGrid_geo-class.html b/pyroms/docs/pyroms.hgrid_old.CGrid_geo-class.html deleted file mode 100644 index d56fea2..0000000 --- a/pyroms/docs/pyroms.hgrid_old.CGrid_geo-class.html +++ /dev/null @@ -1,512 +0,0 @@ - - - - - pyroms.hgrid_old.CGrid_geo - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Module hgrid_old :: - Class CGrid_geo - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Class CGrid_geo

source code

-
-object --+    
-         |    
-     CGrid --+
-             |
-            CGrid_geo
-
- -
-
-
-Curvilinear Arakawa C-grid defined in geographic coordinates
-
-For a geographic grid, a projection may be specified, or The default
-projection for will be defined by the matplotlib.toolkits.Basemap
-projection:
-
-proj = Basemap(projection='merc', resolution=None, lat_ts=0.0)
-
-For a geographic grid, the cell widths are determined by the great
-circle distances. Angles, however, are defined using the projected
-coordinates, so a projection that conserves angles must be used. This
-means typically either Mercator (projection='merc') or Lambert
-Conformal Conic (projection='lcc').
-
-
- - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - -
Instance Methods[hide private]
-
-   - - - - - - -
_calculate_metrics(self)
- Calculates pm, pn, dndx, dmde from x_vert and y_vert
- source code - -
- -
-   - - - - - - -
_calculate_derivative_metrics(self) - source code - -
- -
-   - - - - - - -
_calculate_angle_rho(self) - source code - -
- -
-   - - - - - - -
__init__(self, - lon_vert, - lat_vert, - proj, - use_gcdist=True, - ellipse='WGS84', - lon_rho=None, - lat_rho=None, - lon_u=None, - lat_u=None, - lon_v=None, - lat_v=None, - lon_psi=None, - lat_psi=None, - dx=None, - dy=None, - dndx=None, - dmde=None, - angle_rho=None)
- x.__init__(...) initializes x; see x.__class__.__doc__ for signature
- source code - -
- -
-   - - - - - - -
mask_polygon_geo(lonlat_verts, - mask_value=0.0) - source code - -
- -
-

Inherited from CGrid: - calculate_orthogonality, - mask_polygon -

- -

Inherited from object: - __delattr__, - __getattribute__, - __hash__, - __new__, - __reduce__, - __reduce_ex__, - __repr__, - __setattr__, - __str__ -

-
- - - - - - - - - - - - - - - -
- - - - - -
Properties[hide private]
-
-   - - lon
- Shorthand for lon_vert -
-   - - lat
- Shorthand for lat_vert -
-

Inherited from CGrid: - mask, - mask_psi, - mask_u, - mask_v, - x, - y -

-

Inherited from object: - __class__ -

-
- - - - - - -
- - - - - -
Method Details[hide private]
-
- -
- -
- - -
-

_calculate_metrics(self) -

-
source code  -
- -
-Calculates pm, pn, dndx, dmde from x_vert and y_vert
-
-
-
-
Overrides: - CGrid._calculate_metrics -
(inherited documentation)
- -
-
-
- -
- -
- - -
-

_calculate_derivative_metrics(self) -

-
source code  -
- - -
-
Overrides: - CGrid._calculate_derivative_metrics -
-
-
-
- -
- -
- - -
-

_calculate_angle_rho(self) -

-
source code  -
- - -
-
Overrides: - CGrid._calculate_angle_rho -
-
-
-
- -
- -
- - -
-

__init__(self, - lon_vert, - lat_vert, - proj, - use_gcdist=True, - ellipse='WGS84', - lon_rho=None, - lat_rho=None, - lon_u=None, - lat_u=None, - lon_v=None, - lat_v=None, - lon_psi=None, - lat_psi=None, - dx=None, - dy=None, - dndx=None, - dmde=None, - angle_rho=None) -
(Constructor) -

-
source code  -
- -

x.__init__(...) initializes x; see x.__class__.__doc__ for - signature

-
-
Overrides: - object.__init__ -
(inherited documentation)
- -
-
-
-
- - - - - - -
- - - - - -
Property Details[hide private]
-
- -
- -
-

lon

-
-Shorthand for lon_vert
-
-
-
-
Get Method:
-
unreachable(self) -
-
-
-
- -
- -
-

lat

-
-Shorthand for lat_vert
-
-
-
-
Get Method:
-
unreachable(self) -
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.hgrid_old.Focus-class.html b/pyroms/docs/pyroms.hgrid_old.Focus-class.html deleted file mode 100644 index 142d4d5..0000000 --- a/pyroms/docs/pyroms.hgrid_old.Focus-class.html +++ /dev/null @@ -1,335 +0,0 @@ - - - - - pyroms.hgrid_old.Focus - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Module hgrid_old :: - Class Focus - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Class Focus

source code

-
-object --+
-         |
-        Focus
-
- -
-
-
-Return a container for a sequence of Focus objects
-
-foc = Focus()
-
-The sequence is populated by using the 'add_focus_x' and 'add_focus_y'
-methods. These methods define a point ('xo' or 'yo'), around witch to
-focus, a focusing factor of 'focus', and x and y extent of focusing given
-by Rx or Ry. The region of focusing will be approximately Gausian, and the
-resolution will be increased by approximately the value of factor.
-
-Methods
--------
-foc.add_focus_x(xo, factor=2.0, Rx=0.1)
-foc.add_focus_y(yo, factor=2.0, Ry=0.1)
-
-Calls to the object return transformed coordinates:
-    xf, yf = foc(x, y)
-where x and y must be within [0, 1], and are typically a uniform,
-normalized grid. The focused grid will be the result of applying each of
-the focus elements in the sequence they are added to the series.
-
-
-EXAMPLES
---------
-
->>> foc = pyroms.grid.Focus()
->>> foc.add_focus_x(0.2, factor=3.0, Rx=0.2)
->>> foc.add_focus_y(0.6, factor=5.0, Ry=0.35)
-
->>> x, y = np.mgrid[0:1:3j,0:1:3j]
->>> xf, yf = foc(x, y)
-
->>> print xf
-[[ 0.          0.          0.        ]
- [ 0.36594617  0.36594617  0.36594617]
- [ 1.          1.          1.        ]]
->>> print yf
-[[ 0.          0.62479833  1.        ]
- [ 0.          0.62479833  1.        ]
- [ 0.          0.62479833  1.        ]]
-
-
- - - - - - - - - - - - - - - - - - - - - - -
- - - - - -
Instance Methods[hide private]
-
-   - - - - - - -
__init__(self)
- x.__init__(...) initializes x; see x.__class__.__doc__ for signature
- source code - -
- -
-   - - - - - - -
add_focus_x(self, - xo, - factor=2.0, - Rx=0.1)
- docstring for add_point
- source code - -
- -
-   - - - - - - -
add_focus_y(self, - yo, - factor=2.0, - Ry=0.1)
- docstring for add_point
- source code - -
- -
-   - - - - - - -
__call__(self, - x, - y)
- docstring for __call__
- source code - -
- -
-

Inherited from object: - __delattr__, - __getattribute__, - __hash__, - __new__, - __reduce__, - __reduce_ex__, - __repr__, - __setattr__, - __str__ -

-
- - - - - - - - - -
- - - - - -
Properties[hide private]
-
-

Inherited from object: - __class__ -

-
- - - - - - -
- - - - - -
Method Details[hide private]
-
- -
- -
- - -
-

__init__(self) -
(Constructor) -

-
source code  -
- -

x.__init__(...) initializes x; see x.__class__.__doc__ for - signature

-
-
Overrides: - object.__init__ -
(inherited documentation)
- -
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.hgrid_old.Gridgen-class.html b/pyroms/docs/pyroms.hgrid_old.Gridgen-class.html deleted file mode 100644 index 009ebb7..0000000 --- a/pyroms/docs/pyroms.hgrid_old.Gridgen-class.html +++ /dev/null @@ -1,323 +0,0 @@ - - - - - pyroms.hgrid_old.Gridgen - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Module hgrid_old :: - Class Gridgen - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Class Gridgen

source code

-
-object --+    
-         |    
-     CGrid --+
-             |
-            Gridgen
-
- -
-
-
-docstring for Gridgen
-
-
- - - - - - - - - - - - - - - - - - - -
- - - - - -
Instance Methods[hide private]
-
-   - - - - - - -
generate_grid(self) - source code - -
- -
-   - - - - - - -
__init__(self, - xbry, - ybry, - beta, - shape, - ul_idx=0, - focus=None, - proj=None, - nnodes=14, - precision=1e-12, - nppe=3, - newton=True, - thin=True, - checksimplepoly=True, - verbose=False)
- x.__init__(...) initializes x; see x.__class__.__doc__ for signature
- source code - -
- -
-   - - - - - - -
__del__(self)
- delete gridnode object upon deletion
- source code - -
- -
-

Inherited from CGrid: - calculate_orthogonality, - mask_polygon -

- -

Inherited from object: - __delattr__, - __getattribute__, - __hash__, - __new__, - __reduce__, - __reduce_ex__, - __repr__, - __setattr__, - __str__ -

-
- - - - - - - - - -
- - - - - -
Properties[hide private]
-
-

Inherited from CGrid: - mask, - mask_psi, - mask_u, - mask_v, - x, - y -

-

Inherited from object: - __class__ -

-
- - - - - - -
- - - - - -
Method Details[hide private]
-
- -
- -
- - -
-

__init__(self, - xbry, - ybry, - beta, - shape, - ul_idx=0, - focus=None, - proj=None, - nnodes=14, - precision=1e-12, - nppe=3, - newton=True, - thin=True, - checksimplepoly=True, - verbose=False) -
(Constructor) -

-
source code  -
- -

x.__init__(...) initializes x; see x.__class__.__doc__ for - signature

-
-
Overrides: - object.__init__ -
(inherited documentation)
- -
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.hgrid_old._Focus_x-class.html b/pyroms/docs/pyroms.hgrid_old._Focus_x-class.html deleted file mode 100644 index 34a4fd2..0000000 --- a/pyroms/docs/pyroms.hgrid_old._Focus_x-class.html +++ /dev/null @@ -1,283 +0,0 @@ - - - - - pyroms.hgrid_old._Focus_x - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Module hgrid_old :: - Class _Focus_x - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Class _Focus_x

source code

-
-object --+
-         |
-        _Focus_x
-
- -
-
-
-Return a transformed, uniform grid, focused in the x-direction
-
-This class may be called with a uniform grid, with limits from [0, 1], to
-create a focused grid in the x-directions centered about xo. The output
-grid is also uniform from [0, 1] in both x and y.
-
-Parameters
-----------
-xo : float
-    Location about which to focus grid
-factor : float
-    amount to focus grid. Creates cell sizes that are factor smaller in
-    the focused
-    region.
-Rx : float
-    Lateral extent of focused region, similar to a lateral spatial scale
-    for the focusing region.
-
-Returns
--------
-foc : class
-    The class may be called with arguments of a grid. The returned
-    transformed grid (x, y) will be focused as per the parameters above.
-
-
- - - - - - - - - - - - - - - - -
- - - - - -
Instance Methods[hide private]
-
-   - - - - - - -
__init__(self, - xo, - factor=2.0, - Rx=0.1)
- x.__init__(...) initializes x; see x.__class__.__doc__ for signature
- source code - -
- -
-   - - - - - - -
__call__(self, - x, - y) - source code - -
- -
-

Inherited from object: - __delattr__, - __getattribute__, - __hash__, - __new__, - __reduce__, - __reduce_ex__, - __repr__, - __setattr__, - __str__ -

-
- - - - - - - - - -
- - - - - -
Properties[hide private]
-
-

Inherited from object: - __class__ -

-
- - - - - - -
- - - - - -
Method Details[hide private]
-
- -
- -
- - -
-

__init__(self, - xo, - factor=2.0, - Rx=0.1) -
(Constructor) -

-
source code  -
- -

x.__init__(...) initializes x; see x.__class__.__doc__ for - signature

-
-
Overrides: - object.__init__ -
(inherited documentation)
- -
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.hgrid_old._Focus_y-class.html b/pyroms/docs/pyroms.hgrid_old._Focus_y-class.html deleted file mode 100644 index c4de6cd..0000000 --- a/pyroms/docs/pyroms.hgrid_old._Focus_y-class.html +++ /dev/null @@ -1,282 +0,0 @@ - - - - - pyroms.hgrid_old._Focus_y - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Module hgrid_old :: - Class _Focus_y - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Class _Focus_y

source code

-
-object --+
-         |
-        _Focus_y
-
- -
-
-
-Return a transformed, uniform grid, focused in the y-direction
-
-This class may be called with a uniform grid, with limits from [0, 1], 
-to create a focused grid in the y-directions centered about yo. 
-The output grid is also uniform from [0, 1] in both x and y.
-
-Parameters
-----------
-yo : float
-    Location about which to focus grid
-factor : float
-    amount to focus grid. Creates cell sizes that are factor 
-    smaller in the focused region.
-Ry : float
-    Lateral extent of focused region, similar to a lateral 
-    spatial scale for the focusing region.
-
-Returns
--------
-foc : class
-    The class may be called with arguments of a grid. The returned 
-    transformed grid (x, y) will be focused as per the parameters above.
-
-
- - - - - - - - - - - - - - - - -
- - - - - -
Instance Methods[hide private]
-
-   - - - - - - -
__init__(self, - yo, - factor=2.0, - Ry=0.1)
- x.__init__(...) initializes x; see x.__class__.__doc__ for signature
- source code - -
- -
-   - - - - - - -
__call__(self, - x, - y) - source code - -
- -
-

Inherited from object: - __delattr__, - __getattribute__, - __hash__, - __new__, - __reduce__, - __reduce_ex__, - __repr__, - __setattr__, - __str__ -

-
- - - - - - - - - -
- - - - - -
Properties[hide private]
-
-

Inherited from object: - __class__ -

-
- - - - - - -
- - - - - -
Method Details[hide private]
-
- -
- -
- - -
-

__init__(self, - yo, - factor=2.0, - Ry=0.1) -
(Constructor) -

-
source code  -
- -

x.__init__(...) initializes x; see x.__class__.__doc__ for - signature

-
-
Overrides: - object.__init__ -
(inherited documentation)
- -
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.hgrid_old.edit_mask_mesh-class.html b/pyroms/docs/pyroms.hgrid_old.edit_mask_mesh-class.html deleted file mode 100644 index cd9d0c1..0000000 --- a/pyroms/docs/pyroms.hgrid_old.edit_mask_mesh-class.html +++ /dev/null @@ -1,285 +0,0 @@ - - - - - pyroms.hgrid_old.edit_mask_mesh - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Module hgrid_old :: - Class edit_mask_mesh - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Class edit_mask_mesh

source code

-
-object --+
-         |
-        edit_mask_mesh
-
- -
-
-
-Interactive mask editor
-
-edit_mask_mesh(grd, proj)
-
-Edit grd mask. Mask/Unsmask cell by a simple click on the cell.
-Mask modification are store in mask_change.txt for further use.
-
-Key commands:
-    e : toggle between Editing/Viewing mode
-
-
- - - - - - - - - - - - - - - - - - - -
- - - - - -
Instance Methods[hide private]
-
-   - - - - - - -
_on_key(self, - event) - source code - -
- -
-   - - - - - - -
_on_click(self, - event) - source code - -
- -
-   - - - - - - -
__init__(self, - grd, - proj=None, - **kwargs)
- x.__init__(...) initializes x; see x.__class__.__doc__ for signature
- source code - -
- -
-

Inherited from object: - __delattr__, - __getattribute__, - __hash__, - __new__, - __reduce__, - __reduce_ex__, - __repr__, - __setattr__, - __str__ -

-
- - - - - - - - - -
- - - - - -
Properties[hide private]
-
-

Inherited from object: - __class__ -

-
- - - - - - -
- - - - - -
Method Details[hide private]
-
- -
- -
- - -
-

__init__(self, - grd, - proj=None, - **kwargs) -
(Constructor) -

-
source code  -
- -

x.__init__(...) initializes x; see x.__class__.__doc__ for - signature

-
-
Overrides: - object.__init__ -
(inherited documentation)
- -
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.hgrid_old.get_position_from_map-class.html b/pyroms/docs/pyroms.hgrid_old.get_position_from_map-class.html deleted file mode 100644 index fd6f6f7..0000000 --- a/pyroms/docs/pyroms.hgrid_old.get_position_from_map-class.html +++ /dev/null @@ -1,285 +0,0 @@ - - - - - pyroms.hgrid_old.get_position_from_map - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Module hgrid_old :: - Class get_position_from_map - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Class get_position_from_map

source code

-
-object --+
-         |
-        get_position_from_map
-
- -
-
-
-Get cell index position Interactively
-
-get_position_from_map(grd, proj)
-
-Get index i, j as well as lon, lat coordinates for one cell
-simply by clicking on the cell.
-
-Key commands:
-    i : toggle between Interactive/Viewing mode
-
-
- - - - - - - - - - - - - - - - - - - -
- - - - - -
Instance Methods[hide private]
-
-   - - - - - - -
_on_key(self, - event) - source code - -
- -
-   - - - - - - -
_on_click(self, - event) - source code - -
- -
-   - - - - - - -
__init__(self, - grd, - proj=None, - **kwargs)
- x.__init__(...) initializes x; see x.__class__.__doc__ for signature
- source code - -
- -
-

Inherited from object: - __delattr__, - __getattribute__, - __hash__, - __new__, - __reduce__, - __reduce_ex__, - __repr__, - __setattr__, - __str__ -

-
- - - - - - - - - -
- - - - - -
Properties[hide private]
-
-

Inherited from object: - __class__ -

-
- - - - - - -
- - - - - -
Method Details[hide private]
-
- -
- -
- - -
-

__init__(self, - grd, - proj=None, - **kwargs) -
(Constructor) -

-
source code  -
- -

x.__init__(...) initializes x; see x.__class__.__doc__ for - signature

-
-
Overrides: - object.__init__ -
(inherited documentation)
- -
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.io-module.html b/pyroms/docs/pyroms.io-module.html deleted file mode 100644 index ad3b34f..0000000 --- a/pyroms/docs/pyroms.io-module.html +++ /dev/null @@ -1,320 +0,0 @@ - - - - - pyroms.io - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Module io - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module io

source code

-
-A thin wrapper for netCDF4.Dataset and netCDF4.MFDataset
-
-This module provides two functions, Dataset and MFDataset, that are similar to the
-netCDF[3/4] functions of the same name. This package is a thin wrapper around these
-functions, and provides two services. First of all, it will use either netCDF3 or
-netCDF4 (prefering the later), so that the netCDF package does not need to be changed
-on different systems that only have one or the other. Second, it will pass through
-netCDF[3/4] objects unchanged, so that netCDF objects, filenames, lists of files, or
-strings with wildcards can be passed to the function indescriminately.
-
-Examples of usage
------------------
-
-with an input of a string:
-    # returns netCDF4.Dataset object based on file
-    nc = pyroms.io.Dataset(file) 
-  
-    # returns MFnetCDF4.Dataset object based on file (with wildcard chars)
-    nc = pyroms.io.MFDataset(file) 
-
-with an input of a list of files:
-    # returns MFDataset object based on list of files
-    nc = pyroms.io.Dataset(files) 
-    
-    # returns MFDataset object based on list of files
-    nc = pyroms.io.MFDataset(files)
-
-with an input of a netCDF4.Dataset or MFnetCDF4.Dataset object:
-    # passes through netCDF4.Dataset or MFnetCDF4.Dataset object
-    nc = pyroms.io.Dataset(nc)
-    
-    # passes through MFDataset object based on file (with wildcard chars)
-    nc = pyroms.io.MFDataset(nc)
-
-
- - - - - - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
MFDataset(ncfile)
- A thin wrapper for netCDF4.Dataset and netCDF4.MFDataset
- source code - -
- -
-   - - - - - - -
Dataset(ncfile)
- A thin wrapper for netCDF4.Dataset and netCDF4.MFDataset
- source code - -
- -
- - - - - - -
- - - - - -
Function Details[hide private]
-
- -
- -
- - -
-

MFDataset(ncfile) -

-
source code  -
- -
-A thin wrapper for netCDF4.Dataset and netCDF4.MFDataset
-
-This module provides two functions, Dataset and MFDataset, that are similar to the
-netCDF[3/4] functions of the same name. This package is a thin wrapper around these
-functions, and provides two services. First of all, it will use either netCDF3 or
-netCDF4 (prefering the later), so that the netCDF package does not need to be changed
-on different systems that only have one or the other. Second, it will pass through
-netCDF[3/4] objects unchanged, so that netCDF objects, filenames, lists of files, or
-strings with wildcards can be passed to the function indescriminately.
-
-Examples of usage
------------------
-
-with an input of a string:
-    # returns netCDF4.Dataset object based on file
-    nc = pyroms.io.Dataset(file) 
-  
-    # returns MFnetCDF4.Dataset object based on file (with wildcard chars)
-    nc = pyroms.io.MFDataset(file) 
-
-with an input of a list of files:
-    # returns MFDataset object based on list of files
-    nc = pyroms.io.Dataset(files) 
-    
-    # returns MFDataset object based on list of files
-    nc = pyroms.io.MFDataset(files)
-
-with an input of a netCDF4.Dataset or MFnetCDF4.Dataset object:
-    # passes through netCDF4.Dataset or MFnetCDF4.Dataset object
-    nc = pyroms.io.Dataset(nc)
-    
-    # passes through MFDataset object based on file (with wildcard chars)
-    nc = pyroms.io.MFDataset(nc)
-
-
-
-
-
-
- -
- -
- - -
-

Dataset(ncfile) -

-
source code  -
- -
-A thin wrapper for netCDF4.Dataset and netCDF4.MFDataset
-
-This module provides two functions, Dataset and MFDataset, that are similar to the
-netCDF[3/4] functions of the same name. This package is a thin wrapper around these
-functions, and provides two services. First of all, it will use either netCDF3 or
-netCDF4 (prefering the later), so that the netCDF package does not need to be changed
-on different systems that only have one or the other. Second, it will pass through
-netCDF[3/4] objects unchanged, so that netCDF objects, filenames, lists of files, or
-strings with wildcards can be passed to the function indescriminately.
-
-Examples of usage
------------------
-
-with an input of a string:
-    # returns netCDF4.Dataset object based on file
-    nc = pyroms.io.Dataset(file) 
-  
-    # returns MFnetCDF4.Dataset object based on file (with wildcard chars)
-    nc = pyroms.io.MFDataset(file) 
-
-with an input of a list of files:
-    # returns MFDataset object based on list of files
-    nc = pyroms.io.Dataset(files) 
-    
-    # returns MFDataset object based on list of files
-    nc = pyroms.io.MFDataset(files)
-
-with an input of a netCDF4.Dataset or MFnetCDF4.Dataset object:
-    # passes through netCDF4.Dataset or MFnetCDF4.Dataset object
-    nc = pyroms.io.Dataset(nc)
-    
-    # passes through MFDataset object based on file (with wildcard chars)
-    nc = pyroms.io.MFDataset(nc)
-
-
-
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.io-pysrc.html b/pyroms/docs/pyroms.io-pysrc.html deleted file mode 100644 index 65050f3..0000000 --- a/pyroms/docs/pyroms.io-pysrc.html +++ /dev/null @@ -1,212 +0,0 @@ - - - - - pyroms.io - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Module io - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms.io

-
-  1  # encoding: utf-8 
-  2  '''A thin wrapper for netCDF4.Dataset and netCDF4.MFDataset 
-  3   
-  4  This module provides two functions, Dataset and MFDataset, that are similar to the 
-  5  netCDF[3/4] functions of the same name. This package is a thin wrapper around these 
-  6  functions, and provides two services. First of all, it will use either netCDF3 or 
-  7  netCDF4 (prefering the later), so that the netCDF package does not need to be changed 
-  8  on different systems that only have one or the other. Second, it will pass through 
-  9  netCDF[3/4] objects unchanged, so that netCDF objects, filenames, lists of files, or 
- 10  strings with wildcards can be passed to the function indescriminately. 
- 11   
- 12  Examples of usage 
- 13  ----------------- 
- 14   
- 15  with an input of a string: 
- 16      # returns netCDF4.Dataset object based on file 
- 17      nc = pyroms.io.Dataset(file)  
- 18     
- 19      # returns MFnetCDF4.Dataset object based on file (with wildcard chars) 
- 20      nc = pyroms.io.MFDataset(file)  
- 21   
- 22  with an input of a list of files: 
- 23      # returns MFDataset object based on list of files 
- 24      nc = pyroms.io.Dataset(files)  
- 25       
- 26      # returns MFDataset object based on list of files 
- 27      nc = pyroms.io.MFDataset(files) 
- 28   
- 29  with an input of a netCDF4.Dataset or MFnetCDF4.Dataset object: 
- 30      # passes through netCDF4.Dataset or MFnetCDF4.Dataset object 
- 31      nc = pyroms.io.Dataset(nc) 
- 32       
- 33      # passes through MFDataset object based on file (with wildcard chars) 
- 34      nc = pyroms.io.MFDataset(nc) 
- 35  ''' 
- 36  __docformat__ = "restructuredtext en" 
- 37   
- 38  from glob import glob 
- 39   
- 40  try: 
- 41      try: 
- 42          import netCDF4 as netCDF 
- 43      except: 
- 44          import netCDF3 as netCDF 
- 45       
-
46 - def Dataset(ncfile): -
47 """Return an appropriate netcdf object: - 48 netCDF4 object given a file string - 49 MFnetCDF4 object given a list of files - 50 - 51 A netCDF4 or MFnetCDF4 object returns itself.""" - 52 if isinstance(ncfile, str): - 53 return netCDF.Dataset(ncfile, 'r') - 54 elif isinstance(ncfile, list) or isinstance(ncfile, tuple): - 55 return netCDF.MFDataset(sorted(ncfile)) - 56 elif hasattr(ncfile, 'variables'): # accept any oject with a variables attribute - 57 assert isinstance(ncfile.variables, dict), \ - 58 'variables attribute must be a dictionary' - 59 return ncfile - 60 else: - 61 raise TypeError, 'type %s not supported' % type(ncfile) -
62 - 63 Dataset.__doc__ = __doc__ - 64 -
65 - def MFDataset(ncfile): -
66 """Return an MFnetCDF4 object given a string or list. A string is expanded - 67 with wildcards using glob. A netCDF4 or MFnetCDF4 object returns itself.""" - 68 if isinstance(ncfile, str): - 69 ncfiles = glob(ncfile) - 70 return netCDF.MFDataset(sorted(ncfiles)) - 71 elif isinstance(ncfile, list) or isinstance(ncfile, tuple): - 72 return netCDF.MFDataset(sorted(ncfile)) - 73 elif hasattr(ncfile, 'variables'): # accept any oject with a variables attribute - 74 assert isinstance(ncfile.variables, dict), \ - 75 'variables attribute must be a dictionary' - 76 return ncfile - 77 else: - 78 raise TypeError, 'type %s not supported' % type(ncfile) - 79 return MFnetCDF4.Dataset(files) -
80 - 81 MFDataset.__doc__ = __doc__ - 82 - 83 except: - 84 import pyroms.extern.pupynere - 85 import warnings - 86 - 87 warnings.warn('netCDF[3/4] not found -- using pupynere.') - 88 -
89 - def Dataset(ncfile): -
90 if isinstance(ncfile, str): - 91 return pupynere.NetCDFFile(ncfile) - 92 elif isinstance(ncfile, pupynere.NetCDFFile): - 93 return ncfile - 94 else: - 95 raise TypeError, 'type %s not supported' % type(ncfile) -
96 - 97 Dataset.__doc__ = __doc__ - 98 - 99 -100 if __name__ == '__main__': -101 pass -102 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.remapping-module.html b/pyroms/docs/pyroms.remapping-module.html deleted file mode 100644 index c5c7fc4..0000000 --- a/pyroms/docs/pyroms.remapping-module.html +++ /dev/null @@ -1,143 +0,0 @@ - - - - - pyroms.remapping - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Package remapping - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Package remapping

source code

-

A set of tools for remapping

- - - - - - - - -
- - - - - -
Submodules[hide private]
-
-
- -
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.remapping-pysrc.html b/pyroms/docs/pyroms.remapping-pysrc.html deleted file mode 100644 index 26d777c..0000000 --- a/pyroms/docs/pyroms.remapping-pysrc.html +++ /dev/null @@ -1,127 +0,0 @@ - - - - - pyroms.remapping - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Package remapping - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Package pyroms.remapping

-
- 1  # encoding: utf-8 
- 2  '''  
- 3  A set of tools for remapping 
- 4  ''' 
- 5   
- 6  from make_remap_grid_file import make_remap_grid_file 
- 7  from compute_remap_weights import compute_remap_weights 
- 8  from test_remap_weights import test_remap_weights 
- 9  from remap import remap 
-10  try: 
-11      import scrip 
-12  except: 
-13      print 'scrip.so not found. Remapping function will not be available' 
-14  from roms2z import roms2z 
-15  from z2roms import z2roms 
-16  from flood import flood 
-17   
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.remapping.compute_remap_weights'-module.html b/pyroms/docs/pyroms.remapping.compute_remap_weights'-module.html deleted file mode 100644 index 71f03c1..0000000 --- a/pyroms/docs/pyroms.remapping.compute_remap_weights'-module.html +++ /dev/null @@ -1,155 +0,0 @@ - - - - - pyroms.remapping.compute_remap_weights' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Package remapping :: - Module compute_remap_weights' - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module compute_remap_weights'

source code

- - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
compute_remap_weights(grid1_file, - grid2_file, - interp_file1, - interp_file2, - map1_name, - map2_name, - num_maps, - map_method, - luse_grid1_area='.false.', - luse_grid2_area='.false.', - normalize_opt='fracarea', - output_opt='scrip', - restrict_type='latitude', - num_srch_bins='90', - grid1_periodic='.false.', - grid2_periodic='.false.')
- compute remap weights and addresses
- source code - -
- -
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.remapping.compute_remap_weights'-pysrc.html b/pyroms/docs/pyroms.remapping.compute_remap_weights'-pysrc.html deleted file mode 100644 index 1d8a318..0000000 --- a/pyroms/docs/pyroms.remapping.compute_remap_weights'-pysrc.html +++ /dev/null @@ -1,156 +0,0 @@ - - - - - pyroms.remapping.compute_remap_weights' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Package remapping :: - Module compute_remap_weights' - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms.remapping.compute_remap_weights'

-
- 1  import os 
- 2  import pyroms 
- 3   
- 4   
-
5 -def compute_remap_weights(grid1_file, grid2_file, \ - 6 interp_file1, interp_file2, map1_name, \ - 7 map2_name, num_maps, map_method, \ - 8 luse_grid1_area='.false.', luse_grid2_area='.false.', \ - 9 normalize_opt='fracarea', output_opt='scrip', \ -10 restrict_type='latitude', num_srch_bins='90', \ -11 grid1_periodic='.false.', grid2_periodic='.false.'): -
12 ''' -13 compute remap weights and addresses -14 ''' -15 -16 # write namelist file -17 f = open('compute_remap_weights_in','w') -18 -19 f.write('&remap_inputs' + '\n') -20 f.write(' num_maps = ' + str(num_maps) + '\n') -21 f.write(' grid1_file = \'' + str(grid1_file) + '\'\n') -22 f.write(' grid2_file = \'' + str(grid2_file) + '\'\n') -23 f.write(' interp_file1 = \'' + str(interp_file1) + '\'\n') -24 f.write(' interp_file2 = \'' + str(interp_file2) + '\'\n') -25 f.write(' map1_name = \'' + str(map1_name) + '\'\n') -26 f.write(' map2_name = \'' + str(map2_name) + '\'\n') -27 f.write(' map_method = \'' + str(map_method) + '\'\n') -28 f.write(' normalize_opt = \'' + str(normalize_opt) + '\'\n') -29 f.write(' output_opt = \'' + str(output_opt) + '\'\n') -30 f.write(' restrict_type = \'' + str(restrict_type) + '\'\n') -31 f.write(' num_srch_bins = ' + str(num_srch_bins) + '\n') -32 f.write(' luse_grid1_area = ' + str(luse_grid1_area) + '\n') -33 f.write(' luse_grid2_area = ' + str(luse_grid2_area) + '\n') -34 f.write(' grid1_periodic = ' + str(grid1_periodic) + '\n') -35 f.write(' grid2_periodic = ' + str(grid2_periodic) + '\n') -36 f.write('/\n') -37 -38 f.close() -39 -40 # compute weights -41 pyroms.remapping.scrip.compute_remap_weights('compute_remap_weights_in') -42 -43 # clean -44 os.remove('compute_remap_weights_in') -
45 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.remapping.compute_remap_weights-module.html b/pyroms/docs/pyroms.remapping.compute_remap_weights-module.html deleted file mode 100644 index b39f8fb..0000000 --- a/pyroms/docs/pyroms.remapping.compute_remap_weights-module.html +++ /dev/null @@ -1,155 +0,0 @@ - - - - - pyroms.remapping.compute_remap_weights - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Package remapping :: - Module compute_remap_weights - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module compute_remap_weights

source code

- - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
compute_remap_weights(grid1_file, - grid2_file, - interp_file1, - interp_file2, - map1_name, - map2_name, - num_maps, - map_method, - luse_grid1_area='.false.', - luse_grid2_area='.false.', - normalize_opt='fracarea', - output_opt='scrip', - restrict_type='latitude', - num_srch_bins='90', - grid1_periodic='.false.', - grid2_periodic='.false.')
- compute remap weights and addresses
- source code - -
- -
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.remapping.compute_remap_weights-pysrc.html b/pyroms/docs/pyroms.remapping.compute_remap_weights-pysrc.html deleted file mode 100644 index 3604ff5..0000000 --- a/pyroms/docs/pyroms.remapping.compute_remap_weights-pysrc.html +++ /dev/null @@ -1,156 +0,0 @@ - - - - - pyroms.remapping.compute_remap_weights - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Package remapping :: - Module compute_remap_weights - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms.remapping.compute_remap_weights

-
- 1  import os 
- 2  import pyroms 
- 3   
- 4   
-
5 -def compute_remap_weights(grid1_file, grid2_file, \ - 6 interp_file1, interp_file2, map1_name, \ - 7 map2_name, num_maps, map_method, \ - 8 luse_grid1_area='.false.', luse_grid2_area='.false.', \ - 9 normalize_opt='fracarea', output_opt='scrip', \ -10 restrict_type='latitude', num_srch_bins='90', \ -11 grid1_periodic='.false.', grid2_periodic='.false.'): -
12 ''' -13 compute remap weights and addresses -14 ''' -15 -16 # write namelist file -17 f = open('compute_remap_weights_in','w') -18 -19 f.write('&remap_inputs' + '\n') -20 f.write(' num_maps = ' + str(num_maps) + '\n') -21 f.write(' grid1_file = \'' + str(grid1_file) + '\'\n') -22 f.write(' grid2_file = \'' + str(grid2_file) + '\'\n') -23 f.write(' interp_file1 = \'' + str(interp_file1) + '\'\n') -24 f.write(' interp_file2 = \'' + str(interp_file2) + '\'\n') -25 f.write(' map1_name = \'' + str(map1_name) + '\'\n') -26 f.write(' map2_name = \'' + str(map2_name) + '\'\n') -27 f.write(' map_method = \'' + str(map_method) + '\'\n') -28 f.write(' normalize_opt = \'' + str(normalize_opt) + '\'\n') -29 f.write(' output_opt = \'' + str(output_opt) + '\'\n') -30 f.write(' restrict_type = \'' + str(restrict_type) + '\'\n') -31 f.write(' num_srch_bins = ' + str(num_srch_bins) + '\n') -32 f.write(' luse_grid1_area = ' + str(luse_grid1_area) + '\n') -33 f.write(' luse_grid2_area = ' + str(luse_grid2_area) + '\n') -34 f.write(' grid1_periodic = ' + str(grid1_periodic) + '\n') -35 f.write(' grid2_periodic = ' + str(grid2_periodic) + '\n') -36 f.write('/\n') -37 -38 f.close() -39 -40 # compute weights -41 pyroms.remapping.scrip.compute_remap_weights('compute_remap_weights_in') -42 -43 # clean -44 os.remove('compute_remap_weights_in') -
45 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.remapping.flood'-module.html b/pyroms/docs/pyroms.remapping.flood'-module.html deleted file mode 100644 index 5213f5a..0000000 --- a/pyroms/docs/pyroms.remapping.flood'-module.html +++ /dev/null @@ -1,222 +0,0 @@ - - - - - pyroms.remapping.flood' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Package remapping :: - Module flood' - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module flood'

source code

- - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
flood(varz, - grdz, - Cpos='rho', - irange=None, - jrange=None, - spval=1e+37, - dmax=0, - cdepth=0, - kk=0)
- var = flood(var, grdz)
- source code - -
- -
- - - - - - -
- - - - - -
Function Details[hide private]
-
- -
- -
- - -
-

flood(varz, - grdz, - Cpos='rho', - irange=None, - jrange=None, - spval=1e+37, - dmax=0, - cdepth=0, - kk=0) -

-
source code  -
- -

var = flood(var, grdz)

-

optional switch:

-
    -
  • - Cpos='rho', 'u' or 'v' specify the C-grid position where the - variable rely -
  • -
  • - irange specify grid sub-sample for i direction -
  • -
  • - jrange specify grid sub-sample for j direction -
  • -
  • - spval=1e37 define spval value -
  • -
  • - dmax=0 if dmax>0, maximum horizontal - flooding distance -
  • -
  • - cdepth=0 critical depth for flooding if - depth<cdepth => no flooding -
  • -
  • - kk -
  • -
-

Flood varz on gridz

-
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.remapping.flood'-pysrc.html b/pyroms/docs/pyroms.remapping.flood'-pysrc.html deleted file mode 100644 index 85a4041..0000000 --- a/pyroms/docs/pyroms.remapping.flood'-pysrc.html +++ /dev/null @@ -1,292 +0,0 @@ - - - - - pyroms.remapping.flood' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Package remapping :: - Module flood' - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms.remapping.flood'

-
-  1  # encoding: utf-8 
-  2   
-  3  import numpy as np 
-  4  import _remapping 
-  5   
-  6  import pyroms 
-  7   
-
8 -def flood(varz, grdz, Cpos='rho', irange=None, jrange=None, \ - 9 spval=1e37, dmax=0, cdepth=0, kk=0): -
10 """ - 11 var = flood(var, grdz) - 12 - 13 optional switch: - 14 - Cpos='rho', 'u' or 'v' specify the C-grid position where - 15 the variable rely - 16 - irange specify grid sub-sample for i direction - 17 - jrange specify grid sub-sample for j direction - 18 - spval=1e37 define spval value - 19 - dmax=0 if dmax>0, maximum horizontal - 20 flooding distance - 21 - cdepth=0 critical depth for flooding - 22 if depth<cdepth => no flooding - 23 - kk - 24 - 25 Flood varz on gridz - 26 """ - 27 - 28 varz = varz.copy() - 29 varz = np.array(varz) - 30 - 31 assert len(varz.shape) == 3, 'var must be 3D' - 32 - 33 # replace spval by nan - 34 idx = np.where(abs((varz-spval)/spval)<=1e-5) - 35 varz[idx] = np.nan - 36 - 37 if Cpos is 'rho': - 38 x = grdz.hgrid.lon_rho - 39 y = grdz.hgrid.lat_rho - 40 z = grdz.vgrid.z[:] - 41 h = grdz.vgrid.h - 42 mask = grdz.hgrid.mask_rho - 43 elif Cpos is 'u': - 44 x = grdz.hgrid.lon_u - 45 y = grdz.hgrid.lat_u - 46 z = 0.5 * (grdz.vgrid.z[:,:,:-1] + grdz.vgrid.z[:,:,1:]) - 47 h = 0.5 * (grdz.vgrid.h[:,:-1] + grdz.vgrid.h[:,1:]) - 48 mask = grdz.hgrid.mask_u - 49 elif Cpos is 'v': - 50 x = grdz.hgrid.lon_v - 51 y = grdz.hgrid.lat_v - 52 z = 0.5 * (grdz.vgrid.z[:,:-1,:] + grdz.vgrid.z[:,1:,:]) - 53 h = 0.5 * (grdz.vgrid.h[:-1,:] + grdz.vgrid.h[1:,:]) - 54 mask = grdz.hgrid.mask_v - 55 elif Cpos is 'w': - 56 x = grdz.hgrid.lon_rho - 57 y = grdz.hgrid.lat_rho - 58 z = grdz.vgrid.z[:] - 59 h = grdz.vgrid.h - 60 mask = grdz.hgrid.mask_rho - 61 else: - 62 raise Warning, '%s bad position. Use depth at Arakawa-C rho points instead.' % Cpos - 63 - 64 nlev, Mm, Lm = varz.shape - 65 - 66 if irange is None: - 67 irange = (0,Lm) - 68 else: - 69 assert varz.shape[2] == irange[1]-irange[0], \ - 70 'var shape and irange must agreed' - 71 - 72 if jrange is None: - 73 jrange = (0,Mm) - 74 else: - 75 assert varz.shape[1] == jrange[1]-jrange[0], \ - 76 'var shape and jrange must agreed' - 77 - 78 x = x[jrange[0]:jrange[1], irange[0]:irange[1]] - 79 y = y[jrange[0]:jrange[1], irange[0]:irange[1]] - 80 z = z[:,jrange[0]:jrange[1], irange[0]:irange[1]] - 81 h = h[jrange[0]:jrange[1], irange[0]:irange[1]] - 82 mask = mask[jrange[0]:jrange[1], irange[0]:irange[1]] - 83 - 84 # Finding nearest values in horizontal - 85 # critical deph => no change if depth is less than specified value - 86 cdepth = abs(cdepth) - 87 if cdepth != 0: - 88 idx = np.where(h >= cdepth) - 89 msk = np.zeros(mask.shape) - 90 msk[idx] = 1 - 91 else: - 92 msk = mask.copy() - 93 for k in range(nlev-1): - 94 c1 = np.array(msk, dtype=bool) - 95 c2 = np.isnan(varz[k,:,:]) == 1 - 96 if kk == 0: - 97 c3 = np.ones(mask.shape).astype(bool) - 98 else: - 99 c3 = np.isnan(varz[min(k+kk,nlev-1),:,:]) == 0 -100 c = c1 & c2 & c3 -101 idxnan = np.where(c == True) -102 idx = np.where(c2 == False) -103 if list(idx[0]): -104 wet = np.zeros((len(idx[0]),2)) -105 dry = np.zeros((len(idxnan[0]),2)) -106 wet[:,0] = idx[0]+1 -107 wet[:,1] = idx[1]+1 -108 dry[:,0] = idxnan[0]+1 -109 dry[:,1] = idxnan[1]+1 -110 -111 varz[k,:] = _remapping.flood(varz[k,:], wet, dry, x, y, dmax) -112 -113 # drop the deepest values down -114 idx = np.where(np.isnan(varz) == 1) -115 varz[idx] = spval -116 bottom = pyroms.utility.get_bottom(varz, mask, spval=spval) -117 surface = pyroms.utility.get_surface(varz, mask, spval=spval) -118 for i in range(Lm): -119 for j in range(Mm): -120 if mask[j,i] == 1: -121 varz[:bottom[j,i],j,i] = varz[bottom[j,i],j,i] -122 varz[surface[j,i]:,j,i] = varz[surface[j,i],j,i] -123 -124 return varz -
125 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.remapping.flood-module.html b/pyroms/docs/pyroms.remapping.flood-module.html deleted file mode 100644 index 1160a23..0000000 --- a/pyroms/docs/pyroms.remapping.flood-module.html +++ /dev/null @@ -1,222 +0,0 @@ - - - - - pyroms.remapping.flood - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Package remapping :: - Module flood - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module flood

source code

- - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
flood(varz, - grdz, - Cpos='rho', - irange=None, - jrange=None, - spval=1e37, - dmax=0, - cdepth=0, - kk=0)
- var = flood(var, grdz)
- source code - -
- -
- - - - - - -
- - - - - -
Function Details[hide private]
-
- -
- -
- - -
-

flood(varz, - grdz, - Cpos='rho', - irange=None, - jrange=None, - spval=1e37, - dmax=0, - cdepth=0, - kk=0) -

-
source code  -
- -

var = flood(var, grdz)

-

optional switch:

-
    -
  • - Cpos='rho', 'u' or 'v' specify the C-grid position where the - variable rely -
  • -
  • - irange specify grid sub-sample for i direction -
  • -
  • - jrange specify grid sub-sample for j direction -
  • -
  • - spval=1e37 define spval value -
  • -
  • - dmax=0 if dmax>0, maximum horizontal - flooding distance -
  • -
  • - cdepth=0 critical depth for flooding if - depth<cdepth => no flooding -
  • -
  • - kk -
  • -
-

Flood varz on gridz

-
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.remapping.flood-pysrc.html b/pyroms/docs/pyroms.remapping.flood-pysrc.html deleted file mode 100644 index 4a49cdd..0000000 --- a/pyroms/docs/pyroms.remapping.flood-pysrc.html +++ /dev/null @@ -1,250 +0,0 @@ - - - - - pyroms.remapping.flood - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Package remapping :: - Module flood - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms.remapping.flood

-
-  1  # encoding: utf-8 
-  2   
-  3  import numpy as np 
-  4  import _remapping 
-  5   
-  6  import pyroms 
-  7   
-
8 -def flood(varz, grdz, Cpos='rho', irange=None, jrange=None, \ - 9 spval=1e37, dmax=0, cdepth=0, kk=0): -
10 """ - 11 var = flood(var, grdz) - 12 - 13 optional switch: - 14 - Cpos='rho', 'u' or 'v' specify the C-grid position where - 15 the variable rely - 16 - irange specify grid sub-sample for i direction - 17 - jrange specify grid sub-sample for j direction - 18 - spval=1e37 define spval value - 19 - dmax=0 if dmax>0, maximum horizontal - 20 flooding distance - 21 - cdepth=0 critical depth for flooding - 22 if depth<cdepth => no flooding - 23 - kk - 24 - 25 Flood varz on gridz - 26 """ - 27 - 28 varz = varz.copy() - 29 varz = np.array(varz) - 30 - 31 assert len(varz.shape) == 3, 'var must be 3D' - 32 - 33 # replace spval by nan - 34 idx = np.where(abs((varz-spval)/spval)<=1e-5) - 35 varz[idx] = np.nan - 36 - 37 if Cpos is 'rho': - 38 x = grdz.hgrid.lon_rho - 39 y = grdz.hgrid.lat_rho - 40 z = grdz.vgrid.z[:] - 41 h = grdz.vgrid.h - 42 mask = grdz.hgrid.mask_rho - 43 elif Cpos is 'u': - 44 x = grdz.hgrid.lon_u - 45 y = grdz.hgrid.lat_u - 46 z = 0.5 * (grdz.vgrid.z[:,:,:-1] + grdz.vgrid.z[:,:,1:]) - 47 h = 0.5 * (grdz.vgrid.h[:,:-1] + grdz.vgrid.h[:,1:]) - 48 mask = grdz.hgrid.mask_u - 49 elif Cpos is 'v': - 50 x = grdz.hgrid.lon_v - 51 y = grdz.hgrid.lat_v - 52 z = 0.5 * (grdz.vgrid.z[:,:-1,:] + grdz.vgrid.z[:,1:,:]) - 53 h = 0.5 * (grdz.vgrid.h[:-1,:] + grdz.vgrid.h[1:,:]) - 54 mask = grdz.hgrid.mask_v - 55 elif Cpos is 'w': - 56 x = grdz.hgrid.lon_rho - 57 y = grdz.hgrid.lat_rho - 58 z = grdz.vgrid.z[:] - 59 h = grdz.vgrid.h - 60 mask = grdz.hgrid.mask_rho - 61 else: - 62 raise Warning, '%s bad position. Use depth at Arakawa-C rho points instead.' % Cpos - 63 - 64 nlev, Mm, Lm = varz.shape - 65 - 66 if irange is None: - 67 irange = (0,Lm) - 68 else: - 69 assert varz.shape[2] == irange[1]-irange[0], \ - 70 'var shape and irange must agreed' - 71 - 72 if jrange is None: - 73 jrange = (0,Mm) - 74 else: - 75 assert varz.shape[1] == jrange[1]-jrange[0], \ - 76 'var shape and jrange must agreed' - 77 - 78 x = x[jrange[0]:jrange[1], irange[0]:irange[1]] - 79 y = y[jrange[0]:jrange[1], irange[0]:irange[1]] - 80 z = z[:,jrange[0]:jrange[1], irange[0]:irange[1]] - 81 h = h[jrange[0]:jrange[1], irange[0]:irange[1]] - 82 mask = mask[jrange[0]:jrange[1], irange[0]:irange[1]] - 83 - 84 # Finding nearest values in horizontal - 85 # critical deph => no change if depth is less than specified value - 86 cdepth = abs(cdepth) - 87 if cdepth != 0: - 88 idx = np.where(h >= cdepth) - 89 msk = np.zeros(mask.shape) - 90 msk[idx] = 1 - 91 else: - 92 msk = mask.copy() - 93 for k in range(nlev-1): - 94 c1 = np.array(msk, dtype=bool) - 95 c2 = np.isnan(varz[k,:,:]) == 1 - 96 if kk == 0: - 97 c3 = np.ones(mask.shape).astype(bool) - 98 else: - 99 c3 = np.isnan(varz[min(k+kk,nlev-1),:,:]) == 0 -100 c = c1 & c2 & c3 -101 idxnan = np.where(c == True) -102 idx = np.where(c2 == False) -103 if list(idx[0]): -104 wet = np.zeros((len(idx[0]),2)) -105 dry = np.zeros((len(idxnan[0]),2)) -106 wet[:,0] = idx[0]+1 -107 wet[:,1] = idx[1]+1 -108 dry[:,0] = idxnan[0]+1 -109 dry[:,1] = idxnan[1]+1 -110 -111 varz[k,:] = _remapping.flood(varz[k,:], wet, dry, x, y, dmax) -112 -113 # drop the deepest values down -114 idx = np.where(np.isnan(varz) == 1) -115 varz[idx] = spval -116 bottom = pyroms.utility.get_bottom(varz, mask, spval=spval) -117 surface = pyroms.utility.get_surface(varz, mask, spval=spval) -118 for i in range(Lm): -119 for j in range(Mm): -120 if mask[j,i] == 1: -121 varz[:bottom[j,i],j,i] = varz[bottom[j,i],j,i] -122 varz[surface[j,i]:,j,i] = varz[surface[j,i],j,i] -123 -124 return varz -
125 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.remapping.make_remap_grid_file'-module.html b/pyroms/docs/pyroms.remapping.make_remap_grid_file'-module.html deleted file mode 100644 index c5ca72f..0000000 --- a/pyroms/docs/pyroms.remapping.make_remap_grid_file'-module.html +++ /dev/null @@ -1,141 +0,0 @@ - - - - - pyroms.remapping.make_remap_grid_file' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Package remapping :: - Module make_remap_grid_file' - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module make_remap_grid_file'

source code

- - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
make_remap_grid_file(grid)
- generate grid file to be used with scrip to compute the weights for - remapping.
- source code - -
- -
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.remapping.make_remap_grid_file'-pysrc.html b/pyroms/docs/pyroms.remapping.make_remap_grid_file'-pysrc.html deleted file mode 100644 index 653a49e..0000000 --- a/pyroms/docs/pyroms.remapping.make_remap_grid_file'-pysrc.html +++ /dev/null @@ -1,228 +0,0 @@ - - - - - pyroms.remapping.make_remap_grid_file' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Package remapping :: - Module make_remap_grid_file' - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms.remapping.make_remap_grid_file'

-
-  1  import numpy as np 
-  2  from datetime import datetime 
-  3  try: 
-  4    import netCDF4 as netCDF 
-  5  except: 
-  6    import netCDF3 as netCDF 
-  7  import pyroms 
-  8   
-  9   
-
10 -def make_remap_grid_file(grid, Cpos='rho'): -
11 ''' - 12 make_remap_grid_file(grid) - 13 - 14 generate grid file to be used with scrip to compute - 15 the weights for remapping. - 16 ''' - 17 - 18 # get grid - 19 if type(grid).__name__ == 'ROMS_Grid': - 20 grd = grid - 21 else: - 22 grd = pyroms.grid.get_ROMS_grid(grid) - 23 - 24 #create remap file - 25 remap_filename = 'remap_grid_' + grd.name + '_' + Cpos + '.nc' - 26 nc = netCDF.Dataset(remap_filename, 'w', format='NETCDF3_CLASSIC') - 27 nc.Description = 'remap grid file on' + Cpos + 'points' - 28 nc.Author = 'pyroms.remapping.make_remap_grid_file' - 29 nc.Created = datetime.now().strftime("%Y-%m-%d %H:%M:%S") - 30 nc.title = grd.name - 31 - 32 if Cpos == 'rho': - 33 lon_corner = grd.hgrid.lon_vert - 34 lat_corner = grd.hgrid.lat_vert - 35 grid_center_lon = grd.hgrid.lon_rho.flatten() - 36 grid_center_lat = grd.hgrid.lat_rho.flatten() - 37 grid_imask = grd.hgrid.mask_rho.flatten() - 38 Mp, Lp = grd.hgrid.mask_rho.shape - 39 elif Cpos == 'u': - 40 lon_corner = 0.5 * (grd.hgrid.lon_vert[:,:-1] + \ - 41 grd.hgrid.lon_vert[:,1:]) - 42 lat_corner = 0.5 * (grd.hgrid.lat_vert[:,:-1] + \ - 43 grd.hgrid.lat_vert[:,1:]) - 44 grid_center_lon = grd.hgrid.lon_u.flatten() - 45 grid_center_lat = grd.hgrid.lat_u.flatten() - 46 grid_imask = grd.hgrid.mask_u.flatten() - 47 Mp, Lp = grd.hgrid.mask_u.shape - 48 elif Cpos == 'v': - 49 lon_corner = 0.5 * (grd.hgrid.lon_vert[:-1,:] + \ - 50 grd.hgrid.lon_vert[1:,:]) - 51 lat_corner = 0.5 * (grd.hgrid.lat_vert[:-1,:] + \ - 52 grd.hgrid.lat_vert[1:,:]) - 53 grid_center_lon = grd.hgrid.lon_v.flatten() - 54 grid_center_lat = grd.hgrid.lat_v.flatten() - 55 grid_imask = grd.hgrid.mask_v.flatten() - 56 Mp, Lp = grd.hgrid.mask_v.shape - 57 else: - 58 raise ValueError, 'Cpos must be rho, u or v' - 59 - 60 grid_size = Lp * Mp - 61 - 62 grid_corner_lon = np.zeros((grid_size, 4)) - 63 grid_corner_lat = np.zeros((grid_size, 4)) - 64 k = 0 - 65 for j in range(Mp): - 66 for i in range(Lp): - 67 grid_corner_lon[k,0] = lon_corner[j,i] - 68 grid_corner_lat[k,0] = lat_corner[j,i] - 69 grid_corner_lon[k,1] = lon_corner[j,i+1] - 70 grid_corner_lat[k,1] = lat_corner[j,i+1] - 71 grid_corner_lon[k,2] = lon_corner[j+1,i+1] - 72 grid_corner_lat[k,2] = lat_corner[j+1,i+1] - 73 grid_corner_lon[k,3] = lon_corner[j+1,i] - 74 grid_corner_lat[k,3] = lat_corner[j+1,i] - 75 k = k + 1 - 76 - 77 #Write netcdf file - 78 nc.createDimension('grid_size', grid_size) - 79 nc.createDimension('grid_corners', 4) - 80 nc.createDimension('grid_rank', 2) - 81 - 82 nc.createVariable('grid_dims', 'i4', ('grid_rank')) - 83 nc.variables['grid_dims'].long_name = 'grid size along x and y axis' - 84 nc.variables['grid_dims'].units = 'None' - 85 nc.variables['grid_dims'][:] = [(Lp, Mp)] - 86 - 87 nc.createVariable('grid_center_lon', 'f8', ('grid_size')) - 88 nc.variables['grid_center_lon'].long_name = 'longitude of cell center' - 89 nc.variables['grid_center_lon'].units = 'degrees' - 90 nc.variables['grid_center_lon'][:] = grid_center_lon - 91 - 92 nc.createVariable('grid_center_lat', 'f8', ('grid_size')) - 93 nc.variables['grid_center_lat'].long_name = 'latitude of cell center' - 94 nc.variables['grid_center_lat'].units = 'degrees' - 95 nc.variables['grid_center_lat'][:] = grid_center_lat - 96 - 97 nc.createVariable('grid_imask', 'i4', ('grid_size')) - 98 nc.variables['grid_imask'].long_name = 'mask' - 99 nc.variables['grid_imask'].units = 'None' -100 nc.variables['grid_imask'][:] = grid_imask -101 -102 nc.createVariable('grid_corner_lon', 'f8', ('grid_size', 'grid_corners')) -103 nc.variables['grid_corner_lon'].long_name = 'longitude of cell corner' -104 nc.variables['grid_corner_lon'].units = 'degrees' -105 nc.variables['grid_corner_lon'][:] = grid_corner_lon -106 -107 nc.createVariable('grid_corner_lat', 'f8', ('grid_size', 'grid_corners')) -108 nc.variables['grid_corner_lat'].long_name = 'latitude of cell corner' -109 nc.variables['grid_corner_lat'].units = 'degrees' -110 nc.variables['grid_corner_lat'][:] = grid_corner_lat -111 -112 nc.close() -
113 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.remapping.make_remap_grid_file-module.html b/pyroms/docs/pyroms.remapping.make_remap_grid_file-module.html deleted file mode 100644 index 007d6c2..0000000 --- a/pyroms/docs/pyroms.remapping.make_remap_grid_file-module.html +++ /dev/null @@ -1,141 +0,0 @@ - - - - - pyroms.remapping.make_remap_grid_file - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Package remapping :: - Module make_remap_grid_file - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module make_remap_grid_file

source code

- - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
make_remap_grid_file(grid)
- generate grid file to be used with scrip to compute the weights for - remapping.
- source code - -
- -
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.remapping.make_remap_grid_file-pysrc.html b/pyroms/docs/pyroms.remapping.make_remap_grid_file-pysrc.html deleted file mode 100644 index 035a1f7..0000000 --- a/pyroms/docs/pyroms.remapping.make_remap_grid_file-pysrc.html +++ /dev/null @@ -1,224 +0,0 @@ - - - - - pyroms.remapping.make_remap_grid_file - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Package remapping :: - Module make_remap_grid_file - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms.remapping.make_remap_grid_file

-
-  1  import numpy as np 
-  2  from datetime import datetime 
-  3  try: 
-  4    import netCDF4 as netCDF 
-  5  except: 
-  6    import netCDF3 as netCDF 
-  7  import pyroms 
-  8   
-  9   
-
10 -def make_remap_grid_file(grid, Cpos='rho'): -
11 ''' - 12 make_remap_grid_file(grid) - 13 - 14 generate grid file to be used with scrip to compute - 15 the weights for remapping. - 16 ''' - 17 - 18 # get grid - 19 if type(grid).__name__ == 'ROMS_Grid': - 20 grd = grid - 21 else: - 22 grd = pyroms.grid.get_ROMS_grid(grid) - 23 - 24 #create remap file - 25 remap_filename = 'remap_grid_' + grd.name + '_' + Cpos + '.nc' - 26 nc = netCDF.Dataset(remap_filename, 'w', format='NETCDF3_CLASSIC') - 27 nc.Description = 'remap grid file on' + Cpos + 'points' - 28 nc.Author = 'pyroms.remapping.make_remap_grid_file' - 29 nc.Created = datetime.now().strftime("%Y-%m-%d %H:%M:%S") - 30 nc.title = grd.name - 31 - 32 if Cpos == 'rho': - 33 lon_corner = grd.hgrid.lon_vert - 34 lat_corner = grd.hgrid.lat_vert - 35 grid_center_lon = grd.hgrid.lon_rho.flatten() - 36 grid_center_lat = grd.hgrid.lat_rho.flatten() - 37 grid_imask = grd.hgrid.mask_rho.flatten() - 38 Mp, Lp = grd.hgrid.mask_rho.shape - 39 elif Cpos == 'u': - 40 lon_corner = 0.5 * (grd.hgrid.lon_vert[:,:-1] + \ - 41 grd.hgrid.lon_vert[:,1:]) - 42 lat_corner = 0.5 * (grd.hgrid.lat_vert[:,:-1] + \ - 43 grd.hgrid.lat_vert[:,1:]) - 44 grid_center_lon = grd.hgrid.lon_u.flatten() - 45 grid_center_lat = grd.hgrid.lat_u.flatten() - 46 grid_imask = grd.hgrid.mask_u.flatten() - 47 Mp, Lp = grd.hgrid.mask_u.shape - 48 elif Cpos == 'v': - 49 lon_corner = 0.5 * (grd.hgrid.lon_vert[:-1,:] + \ - 50 grd.hgrid.lon_vert[1:,:]) - 51 lat_corner = 0.5 * (grd.hgrid.lat_vert[:-1,:] + \ - 52 grd.hgrid.lat_vert[1:,:]) - 53 grid_center_lon = grd.hgrid.lon_v.flatten() - 54 grid_center_lat = grd.hgrid.lat_v.flatten() - 55 grid_imask = grd.hgrid.mask_v.flatten() - 56 Mp, Lp = grd.hgrid.mask_v.shape - 57 else: - 58 raise ValueError, 'Cpos must be rho, u or v' - 59 - 60 grid_size = Lp * Mp - 61 - 62 grid_corner_lon = np.zeros((grid_size, 4)) - 63 grid_corner_lat = np.zeros((grid_size, 4)) - 64 k = 0 - 65 for j in range(Mp): - 66 for i in range(Lp): - 67 grid_corner_lon[k,0] = lon_corner[j,i] - 68 grid_corner_lat[k,0] = lat_corner[j,i] - 69 grid_corner_lon[k,1] = lon_corner[j,i+1] - 70 grid_corner_lat[k,1] = lat_corner[j,i+1] - 71 grid_corner_lon[k,2] = lon_corner[j+1,i+1] - 72 grid_corner_lat[k,2] = lat_corner[j+1,i+1] - 73 grid_corner_lon[k,3] = lon_corner[j+1,i] - 74 grid_corner_lat[k,3] = lat_corner[j+1,i] - 75 k = k + 1 - 76 - 77 #Write netcdf file - 78 nc.createDimension('grid_size', grid_size) - 79 nc.createDimension('grid_corners', 4) - 80 nc.createDimension('grid_rank', 2) - 81 - 82 nc.createVariable('grid_dims', 'i4', ('grid_rank')) - 83 nc.variables['grid_dims'].long_name = 'grid size along x and y axis' - 84 nc.variables['grid_dims'].units = 'None' - 85 nc.variables['grid_dims'][:] = [(Lp, Mp)] - 86 - 87 nc.createVariable('grid_center_lon', 'f8', ('grid_size')) - 88 nc.variables['grid_center_lon'].long_name = 'longitude of cell center' - 89 nc.variables['grid_center_lon'].units = 'degrees' - 90 nc.variables['grid_center_lon'][:] = grid_center_lon - 91 - 92 nc.createVariable('grid_center_lat', 'f8', ('grid_size')) - 93 nc.variables['grid_center_lat'].long_name = 'latitude of cell center' - 94 nc.variables['grid_center_lat'].units = 'degrees' - 95 nc.variables['grid_center_lat'][:] = grid_center_lat - 96 - 97 nc.createVariable('grid_imask', 'i4', ('grid_size')) - 98 nc.variables['grid_imask'].long_name = 'mask' - 99 nc.variables['grid_imask'].units = 'None' -100 nc.variables['grid_imask'][:] = grid_imask -101 -102 nc.createVariable('grid_corner_lon', 'f8', ('grid_size', 'grid_corners')) -103 nc.variables['grid_corner_lon'].long_name = 'longitude of cell corner' -104 nc.variables['grid_corner_lon'].units = 'degrees' -105 nc.variables['grid_corner_lon'][:] = grid_corner_lon -106 -107 nc.createVariable('grid_corner_lat', 'f8', ('grid_size', 'grid_corners')) -108 nc.variables['grid_corner_lat'].long_name = 'latitude of cell corner' -109 nc.variables['grid_corner_lat'].units = 'degrees' -110 nc.variables['grid_corner_lat'][:] = grid_corner_lat -111 -112 nc.close() -
113 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.remapping.remap'-module.html b/pyroms/docs/pyroms.remapping.remap'-module.html deleted file mode 100644 index 672d065..0000000 --- a/pyroms/docs/pyroms.remapping.remap'-module.html +++ /dev/null @@ -1,146 +0,0 @@ - - - - - pyroms.remapping.remap' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Package remapping :: - Module remap' - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module remap'

source code

- - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
remap(src_array, - remap_file, - src_grad1=None, - src_grad2=None, - src_grad3=None, - spval=1e+37, - verbose=False)
- remap based on addresses and weights computed in a setup phase
- source code - -
- -
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.remapping.remap'-pysrc.html b/pyroms/docs/pyroms.remapping.remap'-pysrc.html deleted file mode 100644 index aa87c4c..0000000 --- a/pyroms/docs/pyroms.remapping.remap'-pysrc.html +++ /dev/null @@ -1,267 +0,0 @@ - - - - - pyroms.remapping.remap' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Package remapping :: - Module remap' - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms.remapping.remap'

-
-  1  import numpy as np 
-  2  try: 
-  3      import netCDF4 as netCDF 
-  4  except: 
-  5      import netCDF3 as netCDF 
-  6  import pyroms 
-  7   
-  8   
-
9 -def remap(src_array, remap_file, src_grad1=None, src_grad2=None, \ - 10 src_grad3=None, spval=1e37, verbose=False): -
11 ''' - 12 remap based on addresses and weights computed in a setup phase - 13 ''' - 14 - 15 # get info from remap_file - 16 data = netCDF.Dataset(remap_file, 'r') - 17 title = data.title - 18 map_method = data.map_method - 19 normalization = data.normalization - 20 src_grid_name = data.source_grid - 21 dst_grid_name = data.dest_grid - 22 src_grid_size = len(data.dimensions['src_grid_size']) - 23 dst_grid_size = len(data.dimensions['dst_grid_size']) - 24 num_links = len(data.dimensions['num_links']) - 25 src_grid_dims = data.variables['src_grid_dims'] - 26 dst_grid_dims = data.variables['dst_grid_dims'] - 27 - 28 # get weights and addresses from remap_file - 29 map_wts = data.variables['remap_matrix'][:] - 30 dst_add = data.variables['dst_address'][:] - 31 src_add = data.variables['src_address'][:] - 32 - 33 # get destination mask - 34 dst_mask = data.variables['dst_grid_imask'][:] - 35 - 36 # remap from src grid to dst grid - 37 if src_grad1 is not None: - 38 iorder = 2 - 39 else: - 40 iorder = 1 - 41 - 42 if verbose is True: - 43 print 'Reading remapping: ', title - 44 print 'From file: ', remap_file - 45 print ' ' - 46 print 'Remapping between:' - 47 print src_grid_name - 48 print 'and' - 49 print dst_grid_name - 50 print 'Remapping method: ', map_method - 51 - 52 ndim = len(src_array.squeeze().shape) - 53 - 54 if (ndim == 2): - 55 tmp_dst_array = np.zeros((dst_grid_size)) - 56 tmp_src_array = src_array.flatten() - 57 - 58 if iorder == 1: - 59 # first order remapping - 60 # insure that map_wts is a (num_links,4) array - 61 tmp_map_wts = np.zeros((num_links,4)) - 62 tmp_map_wts[:,0] = map_wts[:,0].copy() - 63 map_wts = tmp_map_wts - 64 pyroms.remapping.scrip.remap(tmp_dst_array, map_wts, \ - 65 dst_add, src_add, tmp_src_array) - 66 - 67 if iorder == 2: - 68 # second order remapping - 69 if map_method == 'conservative': - 70 # insure that map_wts is a (num_links,4) array - 71 tmp_map_wts = np.zeros((num_links,4)) - 72 tmp_map_wts[:,0:2] = map_wts[:,0:2].copy() - 73 map_wts = tmp_map_wts - 74 tmp_src_grad1 = src_grad1.flatten() - 75 tmp_src_grad2 = src_grad2.flatten() - 76 pyroms.remapping.scrip.remap(tmp_dst_array, map_wts, \ - 77 dst_add, src_add, tmp_src_array, \ - 78 tmp_src_grad1, tmp_src_grad2) - 79 elif map_method == 'bicubic': - 80 tmp_src_grad1 = src_grad1.flatten() - 81 tmp_src_grad2 = src_grad2.flatten() - 82 tmp_src_grad3 = src_grad3.flatten() - 83 pyroms.remapping.scrip.remap(tmp_dst_array, map_wts, \ - 84 dst_add, src_add, tmp_src_array, \ - 85 tmp_src_grad1, tmp_src_grad2, \ - 86 tmp_src_grad3) - 87 else: - 88 raise ValueError, 'Unknow method' - 89 - 90 # mask dst_array - 91 idx = np.where(dst_mask == 0) - 92 tmp_dst_array[idx] = spval - 93 tmp_dst_array = np.ma.masked_values(tmp_dst_array, spval) - 94 - 95 # reshape - 96 dst_array = np.reshape(tmp_dst_array, (dst_grid_dims[1], \ - 97 dst_grid_dims[0])) - 98 - 99 elif (ndim == 3): -100 -101 nlev = src_array.shape[0] -102 dst_array = np.zeros((nlev, dst_grid_dims[1], dst_grid_dims[0])) -103 -104 # loop over vertical level -105 for k in range(nlev): -106 -107 tmp_src_array = src_array[k,:,:].flatten() -108 tmp_dst_array = np.zeros((dst_grid_size)) -109 -110 if iorder == 1: -111 # first order remapping -112 # insure that map_wts is a (num_links,4) array -113 tmp_map_wts = np.zeros((num_links,4)) -114 tmp_map_wts[:,0] = map_wts[:,0].copy() -115 map_wts = tmp_map_wts -116 pyroms.remapping.scrip.remap(tmp_dst_array, map_wts, \ -117 dst_add, src_add, tmp_src_array) -118 -119 if iorder == 2: -120 # second order remapping -121 if map_method == 'conservative': -122 tmp_src_grad1 = src_grad1.flatten() -123 tmp_src_grad2 = src_grad2.flatten() -124 pyroms.remapping.scrip.remap(tmp_dst_array, map_wts, \ -125 dst_add, src_add, tmp_src_array, \ -126 tmp_src_grad1, tmp_src_grad2) -127 elif map_method == 'bicubic': -128 tmp_src_grad1 = src_grad1.flatten() -129 tmp_src_grad2 = src_grad2.flatten() -130 tmp_src_grad3 = src_grad3.flatten() -131 pyroms.remapping.scrip.remap(tmp_dst_array, map_wts, \ -132 dst_add, src_add, tmp_src_array, \ -133 tmp_src_grad1, tmp_src_grad2, \ -134 tmp_src_grad3) -135 else: -136 raise ValueError, 'Unknow method' -137 -138 -139 # mask dst_array -140 idx = np.where(dst_mask == 0) -141 tmp_dst_array[idx] = spval -142 tmp_dst_array = np.ma.masked_values(tmp_dst_array, spval) -143 -144 # reshape -145 dst_array[k,:,:] = np.reshape(tmp_dst_array, (dst_grid_dims[1], \ -146 dst_grid_dims[0])) -147 -148 else: -149 raise ValueError, 'src_array must have two or three dimensions' -150 -151 -152 # close data file -153 data.close() -154 -155 return dst_array -
156 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.remapping.remap-module.html b/pyroms/docs/pyroms.remapping.remap-module.html deleted file mode 100644 index 7ce225e..0000000 --- a/pyroms/docs/pyroms.remapping.remap-module.html +++ /dev/null @@ -1,146 +0,0 @@ - - - - - pyroms.remapping.remap - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Package remapping :: - Module remap - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module remap

source code

- - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
remap(src_array, - remap_file, - src_grad1=None, - src_grad2=None, - src_grad3=None, - spval=1e37, - verbose=False)
- remap based on addresses and weights computed in a setup phase
- source code - -
- -
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.remapping.remap-pysrc.html b/pyroms/docs/pyroms.remapping.remap-pysrc.html deleted file mode 100644 index 6647ed5..0000000 --- a/pyroms/docs/pyroms.remapping.remap-pysrc.html +++ /dev/null @@ -1,267 +0,0 @@ - - - - - pyroms.remapping.remap - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Package remapping :: - Module remap - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms.remapping.remap

-
-  1  import numpy as np 
-  2  try: 
-  3      import netCDF4 as netCDF 
-  4  except: 
-  5      import netCDF3 as netCDF 
-  6  import pyroms 
-  7   
-  8   
-
9 -def remap(src_array, remap_file, src_grad1=None, src_grad2=None, \ - 10 src_grad3=None, spval=1e37, verbose=False): -
11 ''' - 12 remap based on addresses and weights computed in a setup phase - 13 ''' - 14 - 15 # get info from remap_file - 16 data = netCDF.Dataset(remap_file, 'r') - 17 title = data.title - 18 map_method = data.map_method - 19 normalization = data.normalization - 20 src_grid_name = data.source_grid - 21 dst_grid_name = data.dest_grid - 22 src_grid_size = len(data.dimensions['src_grid_size']) - 23 dst_grid_size = len(data.dimensions['dst_grid_size']) - 24 num_links = len(data.dimensions['num_links']) - 25 src_grid_dims = data.variables['src_grid_dims'] - 26 dst_grid_dims = data.variables['dst_grid_dims'] - 27 - 28 # get weights and addresses from remap_file - 29 map_wts = data.variables['remap_matrix'][:] - 30 dst_add = data.variables['dst_address'][:] - 31 src_add = data.variables['src_address'][:] - 32 - 33 # get destination mask - 34 dst_mask = data.variables['dst_grid_imask'][:] - 35 - 36 # remap from src grid to dst grid - 37 if src_grad1 is not None: - 38 iorder = 2 - 39 else: - 40 iorder = 1 - 41 - 42 if verbose is True: - 43 print 'Reading remapping: ', title - 44 print 'From file: ', remap_file - 45 print ' ' - 46 print 'Remapping between:' - 47 print src_grid_name - 48 print 'and' - 49 print dst_grid_name - 50 print 'Remapping method: ', map_method - 51 - 52 ndim = len(src_array.squeeze().shape) - 53 - 54 if (ndim == 2): - 55 tmp_dst_array = np.zeros((dst_grid_size)) - 56 tmp_src_array = src_array.flatten() - 57 - 58 if iorder == 1: - 59 # first order remapping - 60 # insure that map_wts is a (num_links,4) array - 61 tmp_map_wts = np.zeros((num_links,4)) - 62 tmp_map_wts[:,0] = map_wts[:,0].copy() - 63 map_wts = tmp_map_wts - 64 pyroms.remapping.scrip.remap(tmp_dst_array, map_wts, \ - 65 dst_add, src_add, tmp_src_array) - 66 - 67 if iorder == 2: - 68 # second order remapping - 69 if map_method == 'conservative': - 70 # insure that map_wts is a (num_links,4) array - 71 tmp_map_wts = np.zeros((num_links,4)) - 72 tmp_map_wts[:,0:2] = map_wts[:,0:2].copy() - 73 map_wts = tmp_map_wts - 74 tmp_src_grad1 = src_grad1.flatten() - 75 tmp_src_grad2 = src_grad2.flatten() - 76 pyroms.remapping.scrip.remap(tmp_dst_array, map_wts, \ - 77 dst_add, src_add, tmp_src_array, \ - 78 tmp_src_grad1, tmp_src_grad2) - 79 elif map_method == 'bicubic': - 80 tmp_src_grad1 = src_grad1.flatten() - 81 tmp_src_grad2 = src_grad2.flatten() - 82 tmp_src_grad3 = src_grad3.flatten() - 83 pyroms.remapping.scrip.remap(tmp_dst_array, map_wts, \ - 84 dst_add, src_add, tmp_src_array, \ - 85 tmp_src_grad1, tmp_src_grad2, \ - 86 tmp_src_grad3) - 87 else: - 88 raise ValueError, 'Unknow method' - 89 - 90 # mask dst_array - 91 idx = np.where(dst_mask == 0) - 92 tmp_dst_array[idx] = spval - 93 tmp_dst_array = np.ma.masked_values(tmp_dst_array, spval) - 94 - 95 # reshape - 96 dst_array = np.reshape(tmp_dst_array, (dst_grid_dims[1], \ - 97 dst_grid_dims[0])) - 98 - 99 elif (ndim == 3): -100 -101 nlev = src_array.shape[0] -102 dst_array = np.zeros((nlev, dst_grid_dims[1], dst_grid_dims[0])) -103 -104 # loop over vertical level -105 for k in range(nlev): -106 -107 tmp_src_array = src_array[k,:,:].flatten() -108 tmp_dst_array = np.zeros((dst_grid_size)) -109 -110 if iorder == 1: -111 # first order remapping -112 # insure that map_wts is a (num_links,4) array -113 tmp_map_wts = np.zeros((num_links,4)) -114 tmp_map_wts[:,0] = map_wts[:,0].copy() -115 map_wts = tmp_map_wts -116 pyroms.remapping.scrip.remap(tmp_dst_array, map_wts, \ -117 dst_add, src_add, tmp_src_array) -118 -119 if iorder == 2: -120 # second order remapping -121 if map_method == 'conservative': -122 tmp_src_grad1 = src_grad1.flatten() -123 tmp_src_grad2 = src_grad2.flatten() -124 pyroms.remapping.scrip.remap(tmp_dst_array, map_wts, \ -125 dst_add, src_add, tmp_src_array, \ -126 tmp_src_grad1, tmp_src_grad2) -127 elif map_method == 'bicubic': -128 tmp_src_grad1 = src_grad1.flatten() -129 tmp_src_grad2 = src_grad2.flatten() -130 tmp_src_grad3 = src_grad3.flatten() -131 pyroms.remapping.scrip.remap(tmp_dst_array, map_wts, \ -132 dst_add, src_add, tmp_src_array, \ -133 tmp_src_grad1, tmp_src_grad2, \ -134 tmp_src_grad3) -135 else: -136 raise ValueError, 'Unknow method' -137 -138 -139 # mask dst_array -140 idx = np.where(dst_mask == 0) -141 tmp_dst_array[idx] = spval -142 tmp_dst_array = np.ma.masked_values(tmp_dst_array, spval) -143 -144 # reshape -145 dst_array[k,:,:] = np.reshape(tmp_dst_array, (dst_grid_dims[1], \ -146 dst_grid_dims[0])) -147 -148 else: -149 raise ValueError, 'src_array must have two or three dimensions' -150 -151 -152 # close data file -153 data.close() -154 -155 return dst_array -
156 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.remapping.roms2z'-module.html b/pyroms/docs/pyroms.remapping.roms2z'-module.html deleted file mode 100644 index a2c7698..0000000 --- a/pyroms/docs/pyroms.remapping.roms2z'-module.html +++ /dev/null @@ -1,213 +0,0 @@ - - - - - pyroms.remapping.roms2z' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Package remapping :: - Module roms2z' - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module roms2z'

source code

- - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
roms2z(var, - grd, - grdz, - Cpos='rho', - irange=None, - jrange=None, - spval=1e+37, - mode='linear')
- varz = roms2z(var, grd, grdz)
- source code - -
- -
- - - - - - -
- - - - - -
Function Details[hide private]
-
- -
- -
- - -
-

roms2z(var, - grd, - grdz, - Cpos='rho', - irange=None, - jrange=None, - spval=1e+37, - mode='linear') -

-
source code  -
- -

varz = roms2z(var, grd, grdz)

-

optional switch:

-
    -
  • - Cpos='rho', 'u' 'v' or 'w' specify the C-grid position where the - variable rely -
  • -
  • - irange specify grid sub-sample for i direction -
  • -
  • - jrange specify grid sub-sample for j direction -
  • -
  • - spval=1e37 define spval value -
  • -
  • - mode='linear' or 'spline' specify the type of interpolation -
  • -
-

Interpolate the variable from ROMS grid grd to z vertical grid - grdz

-
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.remapping.roms2z'-pysrc.html b/pyroms/docs/pyroms.remapping.roms2z'-pysrc.html deleted file mode 100644 index 1f9531a..0000000 --- a/pyroms/docs/pyroms.remapping.roms2z'-pysrc.html +++ /dev/null @@ -1,205 +0,0 @@ - - - - - pyroms.remapping.roms2z' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Package remapping :: - Module roms2z' - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms.remapping.roms2z'

-
- 1  # encoding: utf-8 
- 2   
- 3  import numpy as np 
- 4  import _interp 
- 5   
-
6 -def roms2z(var, grd, grdz, Cpos='rho', irange=None, jrange=None, \ - 7 spval=1e37, mode='linear'): -
8 """ - 9 varz = roms2z(var, grd, grdz) -10 -11 optional switch: -12 - Cpos='rho', 'u' 'v' or 'w' specify the C-grid position where -13 the variable rely -14 - irange specify grid sub-sample for i direction -15 - jrange specify grid sub-sample for j direction -16 - spval=1e37 define spval value -17 - mode='linear' or 'spline' specify the type of interpolation -18 -19 Interpolate the variable from ROMS grid grd to z vertical grid grdz -20 """ -21 -22 var = var.copy() -23 -24 assert len(var.shape) == 3, 'var must be 3D' -25 -26 if mode=='linear': -27 imode=0 -28 elif mode=='spline': -29 imode=1 -30 else: -31 imode=0 -32 raise Warning, '%s not supported, defaulting to linear' % mode -33 -34 -35 if Cpos is 'rho': -36 z = grd.vgrid.z_r[0,:] -37 depth = grdz.vgrid.z -38 mask = grd.hgrid.mask_rho -39 elif Cpos is 'u': -40 z = 0.5 * (grd.vgrid.z_r[0,:,:,:-1] + grd.vgrid.z_r[0,:,:,1:]) -41 depth = 0.5 * (grdz.vgrid.z[:,:,:-1] + grdz.vgrid.z[:,:,1:]) -42 mask = grd.hgrid.mask_u -43 elif Cpos is 'v': -44 z = 0.5 * (grd.vgrid.z_r[0,:,:-1,:] + grd.vgrid.z_r[0,:,1:,:]) -45 depth = 0.5 * (grdz.vgrid.z[:,:-1,:] + grdz.vgrid.z[:,1:,:]) -46 mask = grd.hgrid.mask_v -47 elif Cpos is 'w': -48 z = grd.vgrid.z_w[0,:] -49 depth = grdz.vgrid.z -50 mask = grd.hgrid.mask_rho -51 else: -52 raise Warning, '%s unknown position. Cpos must be rho, u, v or w.' % Cpos -53 -54 Nm, Mm, Lm = var.shape -55 nlev = grdz.vgrid.N -56 -57 var = np.concatenate((var, var[-2:-1,:,:]), 0) -58 z = np.concatenate((z, 100*np.ones((1,z.shape[1], z.shape[2]))), 0) -59 -60 if irange is None: -61 irange = (0,Lm) -62 else: -63 assert var.shape[2] == irange[1]-irange[0], \ -64 'var shape and irange must agreed' -65 -66 if jrange is None: -67 jrange = (0,Mm) -68 else: -69 assert var.shape[1] == jrange[1]-jrange[0], \ -70 'var shape and jrange must agreed' -71 -72 varz = np.zeros((nlev, jrange[1]-jrange[0], irange[1]-irange[0])) -73 -74 for k in range(nlev): -75 varz[k,:,:] = _interp.xhslice(var, \ -76 z[:,jrange[0]:jrange[1], irange[0]:irange[1]], \ -77 depth[k,jrange[0]:jrange[1], irange[0]:irange[1]], \ -78 mask[jrange[0]:jrange[1], irange[0]:irange[1]], \ -79 imode, spval) -80 -81 #mask -82 idx = np.where(abs((varz-spval)/spval)<=1e-5) -83 varz[idx] = spval -84 #varz = np.ma.masked_values(varz, spval, rtol=1e-5) -85 -86 return varz -
87 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.remapping.roms2z-module.html b/pyroms/docs/pyroms.remapping.roms2z-module.html deleted file mode 100644 index 4c48d04..0000000 --- a/pyroms/docs/pyroms.remapping.roms2z-module.html +++ /dev/null @@ -1,213 +0,0 @@ - - - - - pyroms.remapping.roms2z - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Package remapping :: - Module roms2z - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module roms2z

source code

- - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
roms2z(var, - grd, - grdz, - Cpos='rho', - irange=None, - jrange=None, - spval=1e37, - mode='linear')
- varz = roms2z(var, grd, grdz)
- source code - -
- -
- - - - - - -
- - - - - -
Function Details[hide private]
-
- -
- -
- - -
-

roms2z(var, - grd, - grdz, - Cpos='rho', - irange=None, - jrange=None, - spval=1e37, - mode='linear') -

-
source code  -
- -

varz = roms2z(var, grd, grdz)

-

optional switch:

-
    -
  • - Cpos='rho', 'u' 'v' or 'w' specify the C-grid position where the - variable rely -
  • -
  • - irange specify grid sub-sample for i direction -
  • -
  • - jrange specify grid sub-sample for j direction -
  • -
  • - spval=1e37 define spval value -
  • -
  • - mode='linear' or 'spline' specify the type of interpolation -
  • -
-

Interpolate the variable from ROMS grid grd to z vertical grid - grdz

-
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.remapping.roms2z-pysrc.html b/pyroms/docs/pyroms.remapping.roms2z-pysrc.html deleted file mode 100644 index 0e2e19f..0000000 --- a/pyroms/docs/pyroms.remapping.roms2z-pysrc.html +++ /dev/null @@ -1,198 +0,0 @@ - - - - - pyroms.remapping.roms2z - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Package remapping :: - Module roms2z - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms.remapping.roms2z

-
- 1  # encoding: utf-8 
- 2   
- 3  import numpy as np 
- 4  import _interp 
- 5   
-
6 -def roms2z(var, grd, grdz, Cpos='rho', irange=None, jrange=None, \ - 7 spval=1e37, mode='linear'): -
8 """ - 9 varz = roms2z(var, grd, grdz) -10 -11 optional switch: -12 - Cpos='rho', 'u' 'v' or 'w' specify the C-grid position where -13 the variable rely -14 - irange specify grid sub-sample for i direction -15 - jrange specify grid sub-sample for j direction -16 - spval=1e37 define spval value -17 - mode='linear' or 'spline' specify the type of interpolation -18 -19 Interpolate the variable from ROMS grid grd to z vertical grid grdz -20 """ -21 -22 var = var.copy() -23 -24 assert len(var.shape) == 3, 'var must be 3D' -25 -26 if mode=='linear': -27 imode=0 -28 elif mode=='spline': -29 imode=1 -30 else: -31 imode=0 -32 raise Warning, '%s not supported, defaulting to linear' % mode -33 -34 -35 if Cpos is 'rho': -36 z = grd.vgrid.z_r[0,:] -37 depth = grdz.vgrid.z -38 mask = grd.hgrid.mask_rho -39 elif Cpos is 'u': -40 z = 0.5 * (grd.vgrid.z_r[0,:,:,:-1] + grd.vgrid.z_r[0,:,:,1:]) -41 depth = 0.5 * (grdz.vgrid.z[:,:,:-1] + grdz.vgrid.z[:,:,1:]) -42 mask = grd.hgrid.mask_u -43 elif Cpos is 'v': -44 z = 0.5 * (grd.vgrid.z_r[0,:,:-1,:] + grd.vgrid.z_r[0,:,1:,:]) -45 depth = 0.5 * (grdz.vgrid.z[:,:-1,:] + grdz.vgrid.z[:,1:,:]) -46 mask = grd.hgrid.mask_v -47 elif Cpos is 'w': -48 z = grd.vgrid.z_w[0,:] -49 depth = grdz.vgrid.z -50 mask = grd.hgrid.mask_rho -51 else: -52 raise Warning, '%s unknown position. Cpos must be rho, u, v or w.' % Cpos -53 -54 Nm, Mm, Lm = var.shape -55 nlev = grdz.vgrid.N -56 -57 var = np.concatenate((var, var[-2:-1,:,:]), 0) -58 z = np.concatenate((z, 100*np.ones((1,z.shape[1], z.shape[2]))), 0) -59 -60 if irange is None: -61 irange = (0,Lm) -62 else: -63 assert var.shape[2] == irange[1]-irange[0], \ -64 'var shape and irange must agreed' -65 -66 if jrange is None: -67 jrange = (0,Mm) -68 else: -69 assert var.shape[1] == jrange[1]-jrange[0], \ -70 'var shape and jrange must agreed' -71 -72 varz = np.zeros((nlev, jrange[1]-jrange[0], irange[1]-irange[0])) -73 -74 for k in range(nlev): -75 varz[k,:,:] = _interp.xhslice(var, \ -76 z[:,jrange[0]:jrange[1], irange[0]:irange[1]], \ -77 depth[k,jrange[0]:jrange[1], irange[0]:irange[1]], \ -78 mask[jrange[0]:jrange[1], irange[0]:irange[1]], \ -79 imode, spval) -80 -81 #mask -82 idx = np.where(abs((varz-spval)/spval)<=1e-5) -83 varz[idx] = spval -84 #varz = np.ma.masked_values(varz, spval, rtol=1e-5) -85 -86 return varz -
87 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.remapping.test_remap_weights'-module.html b/pyroms/docs/pyroms.remapping.test_remap_weights'-module.html deleted file mode 100644 index 9fa0c4a..0000000 --- a/pyroms/docs/pyroms.remapping.test_remap_weights'-module.html +++ /dev/null @@ -1,142 +0,0 @@ - - - - - pyroms.remapping.test_remap_weights' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Package remapping :: - Module test_remap_weights' - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module test_remap_weights'

source code

- - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
test_remap_weights(field_choice, - interp_file, - output_file)
- test addresses and weights computed in a setup phase
- source code - -
- -
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.remapping.test_remap_weights'-pysrc.html b/pyroms/docs/pyroms.remapping.test_remap_weights'-pysrc.html deleted file mode 100644 index 1bf1f9b..0000000 --- a/pyroms/docs/pyroms.remapping.test_remap_weights'-pysrc.html +++ /dev/null @@ -1,137 +0,0 @@ - - - - - pyroms.remapping.test_remap_weights' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Package remapping :: - Module test_remap_weights' - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms.remapping.test_remap_weights'

-
- 1  import os 
- 2  import pyroms 
- 3   
- 4   
-
5 -def test_remap_weights(field_choice, interp_file, output_file): -
6 ''' - 7 test addresses and weights computed in a setup phase - 8 ''' - 9 -10 # write test namelist file -11 f = open('test_remap_weights_in','w') -12 -13 f.write('&remap_inputs' + '\n') -14 f.write(' field_choice = ' + str(field_choice) + '\n') -15 f.write(' interp_file = \'' + str(interp_file) + '\'\n') -16 f.write(' output_file = \'' + str(output_file) + '\'\n') -17 f.write('/') -18 -19 f.close() -20 -21 # run test weights -22 pyroms.remapping.scrip.test_remap_weights('test_remap_weights_in') -23 -24 # clean -25 os.remove('test_remap_weights_in') -26 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.remapping.test_remap_weights-module.html b/pyroms/docs/pyroms.remapping.test_remap_weights-module.html deleted file mode 100644 index 2da7ba8..0000000 --- a/pyroms/docs/pyroms.remapping.test_remap_weights-module.html +++ /dev/null @@ -1,142 +0,0 @@ - - - - - pyroms.remapping.test_remap_weights - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Package remapping :: - Module test_remap_weights - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module test_remap_weights

source code

- - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
test_remap_weights(field_choice, - interp_file, - output_file)
- test addresses and weights computed in a setup phase
- source code - -
- -
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.remapping.test_remap_weights-pysrc.html b/pyroms/docs/pyroms.remapping.test_remap_weights-pysrc.html deleted file mode 100644 index 0f31db7..0000000 --- a/pyroms/docs/pyroms.remapping.test_remap_weights-pysrc.html +++ /dev/null @@ -1,137 +0,0 @@ - - - - - pyroms.remapping.test_remap_weights - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Package remapping :: - Module test_remap_weights - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms.remapping.test_remap_weights

-
- 1  import os 
- 2  import pyroms 
- 3   
- 4   
-
5 -def test_remap_weights(field_choice, interp_file, output_file): -
6 ''' - 7 test addresses and weights computed in a setup phase - 8 ''' - 9 -10 # write test namelist file -11 f = open('test_remap_weights_in','w') -12 -13 f.write('&remap_inputs' + '\n') -14 f.write(' field_choice = ' + str(field_choice) + '\n') -15 f.write(' interp_file = \'' + str(interp_file) + '\'\n') -16 f.write(' output_file = \'' + str(output_file) + '\'\n') -17 f.write('/') -18 -19 f.close() -20 -21 # run test weights -22 pyroms.remapping.scrip.test_remap_weights('test_remap_weights_in') -23 -24 # clean -25 os.remove('test_remap_weights_in') -26 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.remapping.z2roms'-module.html b/pyroms/docs/pyroms.remapping.z2roms'-module.html deleted file mode 100644 index 81c7359..0000000 --- a/pyroms/docs/pyroms.remapping.z2roms'-module.html +++ /dev/null @@ -1,232 +0,0 @@ - - - - - pyroms.remapping.z2roms' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Package remapping :: - Module z2roms' - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module z2roms'

source code

- - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
z2roms(varz, - grdz, - grd, - Cpos='rho', - irange=None, - jrange=None, - spval=1e+37, - flood=True, - dmax=0, - cdepth=0, - kk=0, - mode='linear')
- var = z2roms(var, grdz, grd)
- source code - -
- -
- - - - - - -
- - - - - -
Function Details[hide private]
-
- -
- -
- - -
-

z2roms(varz, - grdz, - grd, - Cpos='rho', - irange=None, - jrange=None, - spval=1e+37, - flood=True, - dmax=0, - cdepth=0, - kk=0, - mode='linear') -

-
source code  -
- -

var = z2roms(var, grdz, grd)

-

optional switch:

-
    -
  • - Cpos='rho', 'u' or 'v' specify the C-grid position where the - variable rely -
  • -
  • - irange specify grid sub-sample for i direction -
  • -
  • - jrange specify grid sub-sample for j direction -
  • -
  • - spval=1e37 define spval value -
  • -
  • - dmax=0 if dmax>0, maximum horizontal - flooding distance -
  • -
  • - cdepth=0 critical depth for flooding if - depth<cdepth => no flooding -
  • -
  • - kk -
  • -
  • - mode='linear' or 'spline' specify the type of interpolation -
  • -
-

Interpolate the variable from z vertical grid grdz to ROMS grid - grd

-
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.remapping.z2roms'-pysrc.html b/pyroms/docs/pyroms.remapping.z2roms'-pysrc.html deleted file mode 100644 index 6016281..0000000 --- a/pyroms/docs/pyroms.remapping.z2roms'-pysrc.html +++ /dev/null @@ -1,219 +0,0 @@ - - - - - pyroms.remapping.z2roms' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Package remapping :: - Module z2roms' - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms.remapping.z2roms'

-
-  1  # encoding: utf-8 
-  2   
-  3  import numpy as np 
-  4  import _interp 
-  5  import _remapping 
-  6   
-  7  import pyroms 
-  8   
-
9 -def z2roms(varz, grdz, grd, Cpos='rho', irange=None, jrange=None, \ - 10 spval=1e37, flood=True, dmax=0, cdepth=0, kk=0, \ - 11 mode='linear'): -
12 """ - 13 var = z2roms(var, grdz, grd) - 14 - 15 optional switch: - 16 - Cpos='rho', 'u' or 'v' specify the C-grid position where - 17 the variable rely - 18 - irange specify grid sub-sample for i direction - 19 - jrange specify grid sub-sample for j direction - 20 - spval=1e37 define spval value - 21 - dmax=0 if dmax>0, maximum horizontal - 22 flooding distance - 23 - cdepth=0 critical depth for flooding - 24 if depth<cdepth => no flooding - 25 - kk - 26 - mode='linear' or 'spline' specify the type of interpolation - 27 - 28 Interpolate the variable from z vertical grid grdz to ROMS grid grd - 29 """ - 30 - 31 varz = varz.copy() - 32 - 33 assert len(varz.shape) == 3, 'var must be 3D' - 34 - 35 if mode=='linear': - 36 imode=0 - 37 elif mode=='spline': - 38 imode=1 - 39 else: - 40 raise Warning, '%s not supported, defaulting to linear' % mode - 41 - 42 if Cpos is 'rho': - 43 z = grdz.vgrid.z[:] - 44 depth = grd.vgrid.z_r[0,:] - 45 mask = grd.hgrid.mask_rho - 46 elif Cpos is 'u': - 47 z = 0.5 * (grdz.vgrid.z[:,:,:-1] + grdz.vgrid.z[:,:,1:]) - 48 depth = 0.5 * (grd.vgrid.z_r[0,:,:,:-1] + grd.vgrid.z_r[0,:,:,1:]) - 49 mask = grd.hgrid.mask_u - 50 elif Cpos is 'v': - 51 z = 0.5 * (grdz.vgrid.z[:,:-1,:] + grdz.vgrid.z[:,1:,:]) - 52 depth = 0.5 * (grd.vgrid.z_r[0,:,:-1,:] + grd.vgrid.z_r[0,:,1:,:]) - 53 mask = grd.hgrid.mask_v - 54 elif Cpos is 'w': - 55 z = grdz.vgrid.z[:] - 56 depth = grd.vgrid.z_w[0,:] - 57 mask = grd.hgrid.mask_rho - 58 else: - 59 raise Warning, '%s bad position. Use depth at Arakawa-C \ - 60 rho points instead.' % Cpos - 61 - 62 nlev, Mm, Lm = varz.shape - 63 Nm = depth.shape[0] - 64 - 65 if irange is None: - 66 irange = (0,Lm) - 67 else: - 68 assert varz.shape[2] == irange[1]-irange[0], \ - 69 'var shape and irange must agreed' - 70 - 71 if jrange is None: - 72 jrange = (0,Mm) - 73 else: - 74 assert varz.shape[1] == jrange[1]-jrange[0], \ - 75 'var shape and jrange must agreed' - 76 - 77 # flood varz if requested - 78 if flood is True: - 79 varz = pyroms.remapping.flood(varz, grdz, Cpos=Cpos, \ - 80 irange=irange, jrange=jrange, spval=spval, \ - 81 dmax=dmax, cdepth=cdepth, kk=kk) - 82 - 83 varz = np.concatenate((varz[0:1,:,:], varz, varz[-1:,:,:]), 0) - 84 z = np.concatenate((-9999*np.ones((1,z.shape[1], z.shape[2])), \ - 85 z, \ - 86 100*np.ones((1,z.shape[1], z.shape[2]))), 0) - 87 - 88 var = np.ma.zeros((Nm, Mm, Lm)) - 89 - 90 for k in range(Nm): - 91 var[k,:,:] = _interp.xhslice(varz, \ - 92 z[:,jrange[0]:jrange[1], irange[0]:irange[1]], \ - 93 depth[k,jrange[0]:jrange[1], irange[0]:irange[1]], \ - 94 mask[jrange[0]:jrange[1], irange[0]:irange[1]], \ - 95 imode, spval) - 96 #mask - 97 var = np.ma.masked_values(var, spval, rtol=1e-5) - 98 #var[k,:,:] = np.ma.masked_where(mask == 0, var[k,:,:]) - 99 -100 return var -
101 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.remapping.z2roms-module.html b/pyroms/docs/pyroms.remapping.z2roms-module.html deleted file mode 100644 index a6d11f0..0000000 --- a/pyroms/docs/pyroms.remapping.z2roms-module.html +++ /dev/null @@ -1,232 +0,0 @@ - - - - - pyroms.remapping.z2roms - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Package remapping :: - Module z2roms - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module z2roms

source code

- - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
z2roms(varz, - grdz, - grd, - Cpos='rho', - irange=None, - jrange=None, - spval=1e37, - flood=True, - dmax=0, - cdepth=0, - kk=0, - mode='linear')
- var = z2roms(var, grdz, grd)
- source code - -
- -
- - - - - - -
- - - - - -
Function Details[hide private]
-
- -
- -
- - -
-

z2roms(varz, - grdz, - grd, - Cpos='rho', - irange=None, - jrange=None, - spval=1e37, - flood=True, - dmax=0, - cdepth=0, - kk=0, - mode='linear') -

-
source code  -
- -

var = z2roms(var, grdz, grd)

-

optional switch:

-
    -
  • - Cpos='rho', 'u' or 'v' specify the C-grid position where the - variable rely -
  • -
  • - irange specify grid sub-sample for i direction -
  • -
  • - jrange specify grid sub-sample for j direction -
  • -
  • - spval=1e37 define spval value -
  • -
  • - dmax=0 if dmax>0, maximum horizontal - flooding distance -
  • -
  • - cdepth=0 critical depth for flooding if - depth<cdepth => no flooding -
  • -
  • - kk -
  • -
  • - mode='linear' or 'spline' specify the type of interpolation -
  • -
-

Interpolate the variable from z vertical grid grdz to ROMS grid - grd

-
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.remapping.z2roms-pysrc.html b/pyroms/docs/pyroms.remapping.z2roms-pysrc.html deleted file mode 100644 index 5fb36c6..0000000 --- a/pyroms/docs/pyroms.remapping.z2roms-pysrc.html +++ /dev/null @@ -1,212 +0,0 @@ - - - - - pyroms.remapping.z2roms - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Package remapping :: - Module z2roms - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms.remapping.z2roms

-
-  1  # encoding: utf-8 
-  2   
-  3  import numpy as np 
-  4  import _interp 
-  5  import _remapping 
-  6   
-  7  import pyroms 
-  8   
-
9 -def z2roms(varz, grdz, grd, Cpos='rho', irange=None, jrange=None, \ - 10 spval=1e37, flood=True, dmax=0, cdepth=0, kk=0, \ - 11 mode='linear'): -
12 """ - 13 var = z2roms(var, grdz, grd) - 14 - 15 optional switch: - 16 - Cpos='rho', 'u' or 'v' specify the C-grid position where - 17 the variable rely - 18 - irange specify grid sub-sample for i direction - 19 - jrange specify grid sub-sample for j direction - 20 - spval=1e37 define spval value - 21 - dmax=0 if dmax>0, maximum horizontal - 22 flooding distance - 23 - cdepth=0 critical depth for flooding - 24 if depth<cdepth => no flooding - 25 - kk - 26 - mode='linear' or 'spline' specify the type of interpolation - 27 - 28 Interpolate the variable from z vertical grid grdz to ROMS grid grd - 29 """ - 30 - 31 varz = varz.copy() - 32 - 33 assert len(varz.shape) == 3, 'var must be 3D' - 34 - 35 if mode=='linear': - 36 imode=0 - 37 elif mode=='spline': - 38 imode=1 - 39 else: - 40 raise Warning, '%s not supported, defaulting to linear' % mode - 41 - 42 if Cpos is 'rho': - 43 z = grdz.vgrid.z[:] - 44 depth = grd.vgrid.z_r[0,:] - 45 mask = grd.hgrid.mask_rho - 46 elif Cpos is 'u': - 47 z = 0.5 * (grdz.vgrid.z[:,:,:-1] + grdz.vgrid.z[:,:,1:]) - 48 depth = 0.5 * (grd.vgrid.z_r[0,:,:,:-1] + grd.vgrid.z_r[0,:,:,1:]) - 49 mask = grd.hgrid.mask_u - 50 elif Cpos is 'v': - 51 z = 0.5 * (grdz.vgrid.z[:,:-1,:] + grdz.vgrid.z[:,1:,:]) - 52 depth = 0.5 * (grd.vgrid.z_r[0,:,:-1,:] + grd.vgrid.z_r[0,:,1:,:]) - 53 mask = grd.hgrid.mask_v - 54 elif Cpos is 'w': - 55 z = grdz.vgrid.z[:] - 56 depth = grd.vgrid.z_w[0,:] - 57 mask = grd.hgrid.mask_rho - 58 else: - 59 raise Warning, '%s bad position. Use depth at Arakawa-C \ - 60 rho points instead.' % Cpos - 61 - 62 nlev, Mm, Lm = varz.shape - 63 Nm = depth.shape[0] - 64 - 65 if irange is None: - 66 irange = (0,Lm) - 67 else: - 68 assert varz.shape[2] == irange[1]-irange[0], \ - 69 'var shape and irange must agreed' - 70 - 71 if jrange is None: - 72 jrange = (0,Mm) - 73 else: - 74 assert varz.shape[1] == jrange[1]-jrange[0], \ - 75 'var shape and jrange must agreed' - 76 - 77 # flood varz if requested - 78 if flood is True: - 79 varz = pyroms.remapping.flood(varz, grdz, Cpos=Cpos, \ - 80 irange=irange, jrange=jrange, spval=spval, \ - 81 dmax=dmax, cdepth=cdepth, kk=kk) - 82 - 83 varz = np.concatenate((varz[0:1,:,:], varz, varz[-1:,:,:]), 0) - 84 z = np.concatenate((-9999*np.ones((1,z.shape[1], z.shape[2])), \ - 85 z, \ - 86 100*np.ones((1,z.shape[1], z.shape[2]))), 0) - 87 - 88 var = np.ma.zeros((Nm, Mm, Lm)) - 89 - 90 for k in range(Nm): - 91 var[k,:,:] = _interp.xhslice(varz, \ - 92 z[:,jrange[0]:jrange[1], irange[0]:irange[1]], \ - 93 depth[k,jrange[0]:jrange[1], irange[0]:irange[1]], \ - 94 mask[jrange[0]:jrange[1], irange[0]:irange[1]], \ - 95 imode, spval) - 96 #mask - 97 var = np.ma.masked_values(var, spval, rtol=1e-5) - 98 #var[k,:,:] = np.ma.masked_where(mask == 0, var[k,:,:]) - 99 -100 return var -
101 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.tools-module.html b/pyroms/docs/pyroms.tools-module.html deleted file mode 100644 index cb241c0..0000000 --- a/pyroms/docs/pyroms.tools-module.html +++ /dev/null @@ -1,744 +0,0 @@ - - - - - pyroms.tools - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Module tools - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module tools

source code

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
zslice(var, - depth, - grd, - Cpos='rho', - vert=False, - mode='linear')
- zslice, lon, lat = zslice(var, depth, grd)
- source code - -
- -
-   - - - - - - -
sslice(var, - sindex, - grd, - Cpos='rho', - vert=False)
- sslice, lon, lat = sslice(var, sindex, grd)
- source code - -
- -
-   - - - - - - -
islice(var, - iindex, - grd, - Cpos='rho', - vert=False)
- islice, z, lon, lat = islice(var, iindex, grd)
- source code - -
- -
-   - - - - - - -
jslice(var, - jindex, - grd, - Cpos='rho', - vert=False)
- jslice, z, lon, lat = jslice(var, jindex, grd)
- source code - -
- -
-   - - - - - - -
isoslice(var, - prop, - isoval, - grd, - Cpos='rho', - masking=True, - vert=False)
- isoslice, lon, lat = isoslice(variable,property, isoval, grd)
- source code - -
- -
-   - - - - - - -
transect(var, - istart, - iend, - jstart, - jend, - grd, - Cpos='rho', - vert=False, - spval=1e+37)
- transect, z, lon, lat = transect(var, istart, iend, jstart, jend, - grd)
- source code - -
- -
-   - - - - - - -
lonslice(var, - longitude, - grd, - Cpos='rho', - vert=False, - spval=1e+37)
- lonslice, z, lon, lat = lonslice(var, longitude, grd)
- source code - -
- -
-   - - - - - - -
latslice(var, - latitude, - grd, - Cpos='rho', - vert=False, - spval=1e+37)
- latslice, z, lon, lat = latslice(var, latitude, grd)
- source code - -
- -
-   - - - - - - -
section_transport(u, - v, - istart, - iend, - jstart, - jend, - grd)
- transpu, transpv = section_transport(u, v, istart, iend, jstart, - jend, grd)
- source code - -
- -
-   - - - - - - -
interm_pt(pnear, - pk, - pai, - pbi, - paj, - pbj) - source code - -
- -
- - - - - - -
- - - - - -
Function Details[hide private]
-
- -
- -
- - -
-

zslice(var, - depth, - grd, - Cpos='rho', - vert=False, - mode='linear') -

-
source code  -
- -

zslice, lon, lat = zslice(var, depth, grd)

-

optional switch:

-
    -
  • - Cpos='rho', 'u', 'v' or 'w' specify the C-grid position where the - variable rely -
  • -
  • - vert=True/False If True, return the position of the - verticies -
  • -
  • - mode='linear' or 'spline' specify the type of interpolation -
  • -
-

return a constant-z slice at depth depth from 3D variable var lon and - lat contain the C-grid position of the slice for plotting. If vert=True, - lon and lat contain contain the position of the verticies (to be used - with pcolor)

-
-
-
-
- -
- -
- - -
-

sslice(var, - sindex, - grd, - Cpos='rho', - vert=False) -

-
source code  -
- -

sslice, lon, lat = sslice(var, sindex, grd)

-

optional switch:

-
    -
  • - Cpos='rho', 'u' or 'v' specify the C-grid position where the - variable rely -
  • -
  • - vert=True/False If True, return the position of the - verticies -
  • -
  • - mode='linear' or 'spline' specify the type of interpolation -
  • -
-

return a constant-s slice at index sindex from 3D variable var lon and - lat contain the C-grid position of the slice for plotting. If vert=True, - lon and lat contain contain the position of the verticies (to be used - with pcolor)

-
-
-
-
- -
- -
- - -
-

islice(var, - iindex, - grd, - Cpos='rho', - vert=False) -

-
source code  -
- -

islice, z, lon, lat = islice(var, iindex, grd)

-

optional switch:

-
    -
  • - Cpos='rho', 'u', 'v' or 'w' specify the C-grid position where the - variable rely -
  • -
  • - vert=True/False If True, return the position of the - verticies -
  • -
-

return a constant-i slice at index iindex from 3D variable var lon, - lat and z contain the C-grid position of the slice for plotting. If - vert=True, lon, lat and z contain contain the position of the verticies - (to be used with pcolor)

-
-
-
-
- -
- -
- - -
-

jslice(var, - jindex, - grd, - Cpos='rho', - vert=False) -

-
source code  -
- -

jslice, z, lon, lat = jslice(var, jindex, grd)

-

optional switch:

-
    -
  • - Cpos='rho', 'u', 'v' or 'w' specify the C-grid position where the - variable rely -
  • -
  • - vert=True/False If True, return the position of the - verticies -
  • -
-

return a constant-j slice at index jindex from 3D variable var lon, - lat and z contain the C-grid position of the slice for plotting. If - vert=True, lon, lat and z contain contain the position of the verticies - (to be used with pcolor)

-
-
-
-
- -
- -
- - -
-

isoslice(var, - prop, - isoval, - grd, - Cpos='rho', - masking=True, - vert=False) -

-
source code  -
- -

isoslice, lon, lat = isoslice(variable,property, isoval, grd)

-

optional switch:

-
    -
  • - Cpos='rho', 'u' or 'v' specify the C-grid position where the - variable rely -
  • -
  • - masking=True mask the output if True -
  • -
  • - vert=True/False If True, return the position of the - verticies -
  • -
  • - mode='linear' or 'spline' specify the type of interpolation -
  • -
-

result is a projection of variable at property == isoval in the first - nonsingleton dimension. In the case when there is more than one zero - crossing, the results are averaged. lon, and lat contain the C-grid - position of the slice for plotting. If vert=True, lon and lat and z - contain contain the position of the verticies (to be used with - pcolor)

-

EXAMPLE: s_at_m5 = isoslice(s,z,-5); # s at z == -5 h_at_s30 = - isoslice(z,s,30); # z at s == 30

-
-
-
-
- -
- -
- - -
-

transect(var, - istart, - iend, - jstart, - jend, - grd, - Cpos='rho', - vert=False, - spval=1e+37) -

-
source code  -
- -

transect, z, lon, lat = transect(var, istart, iend, jstart, jend, - grd)

-

optional switch:

-
    -
  • - Cpos='rho', 'u' or 'v' specify the C-grid position where the - variable rely -
  • -
  • - vert=True/False If True, return the position of the - verticies -
  • -
  • - spval special value -
  • -
  • - rtol tolerance parameter -
  • -
-

return a vertical transect between the points P1=(istart, jstart) and - P2=(iend, jend) from 3D variable var lon, lat and z contain the C-grid - position of the section for plotting. If vert=True, lon, lat and z - contain contain the position of the verticies (to be used with - pcolor)

-
-
-
-
- -
- -
- - -
-

lonslice(var, - longitude, - grd, - Cpos='rho', - vert=False, - spval=1e+37) -

-
source code  -
- -

lonslice, z, lon, lat = lonslice(var, longitude, grd)

-

optional switch:

-
    -
  • - Cpos='rho', 'u' or 'v' specify the C-grid position where the - variable rely -
  • -
  • - vert=True/False If True, return the position of the - verticies -
  • -
  • - spval special value -
  • -
  • - rtol tolerance parameter -
  • -
-

return a longitudinal slice along longitude=longitude from 3D variable - var lon, lat and z contain the C-grid position of the section for - plotting. If vert=True, lon, lat and z contain contain the position of - the verticies (to be used with pcolor) Returns a longitudinal slice of - the grid

-
-
-
-
- -
- -
- - -
-

latslice(var, - latitude, - grd, - Cpos='rho', - vert=False, - spval=1e+37) -

-
source code  -
- -

latslice, z, lon, lat = latslice(var, latitude, grd)

-

optional switch:

-
    -
  • - Cpos='rho', 'u' or 'v' specify the C-grid position where the - variable rely -
  • -
  • - vert=True/False If True, return the position of the - verticies -
  • -
  • - spval special value -
  • -
  • - rtol tolerance parameter -
  • -
-

return a latitudinal slice along latitude=latitude from 3D variable - var lon, lat and z contain the C-grid position of the section for - plotting. If vert=True, lon, lat and z contain contain the position of - the verticies (to be used with pcolor) Returns a longitudinal slice of - the grid

-
-
-
-
- -
- -
- - -
-

section_transport(u, - v, - istart, - iend, - jstart, - jend, - grd) -

-
source code  -
- -

transpu, transpv = section_transport(u, v, istart, iend, jstart, jend, - grd)

-

compute the transport through the section defined between the point P1 - (istart,jstart) and P2 (iend, jend). P1 and P2 are Arakawa-C psi points. - The transpot is positive right handside of the section.

-
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.tools-pysrc.html b/pyroms/docs/pyroms.tools-pysrc.html deleted file mode 100644 index 840a543..0000000 --- a/pyroms/docs/pyroms.tools-pysrc.html +++ /dev/null @@ -1,1461 +0,0 @@ - - - - - pyroms.tools - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Module tools - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms.tools

-
-   1  # encoding: utf-8 
-   2   
-   3  import numpy as np 
-   4  import _iso 
-   5   
-   6  import pyroms 
-   7   
-   8   
-
9 -def zslice(var, depth, grd, Cpos='rho', vert=False, mode='linear'): -
10 """ - 11 zslice, lon, lat = zslice(var, depth, grd) - 12 - 13 optional switch: - 14 - Cpos='rho', 'u', 'v' or 'w' specify the C-grid position where - 15 the variable rely - 16 - vert=True/False If True, return the position of - 17 the verticies - 18 - mode='linear' or 'spline' specify the type of interpolation - 19 - 20 return a constant-z slice at depth depth from 3D variable var - 21 lon and lat contain the C-grid position of the slice for plotting. - 22 If vert=True, lon and lat contain contain the position of the - 23 verticies (to be used with pcolor) - 24 """ - 25 - 26 if mode=='linear': - 27 imode=0 - 28 elif mode=='spline': - 29 imode=1 - 30 else: - 31 imode=0 - 32 raise Warning, '%s not supported, defaulting to linear' % mode - 33 - 34 - 35 # compute the depth on Arakawa-C grid position - 36 - 37 if Cpos is 'u': - 38 # average z_r at Arakawa-C u points - 39 z = 0.5 * (grd.vgrid.z_r[0,:,:,:-1] + grd.vgrid.z_r[0,:,:,1:]) - 40 if vert == True: - 41 lon = 0.5 * (grd.hgrid.lon_vert[:,:-1] + grd.hgrid.lon_vert[:,1:]) - 42 lat = 0.5 * (grd.hgrid.lat_vert[:,:-1] + grd.hgrid.lat_vert[:,1:]) - 43 else: - 44 lon = grd.hgrid.lon_u[:] - 45 lat = grd.hgrid.lat_u[:] - 46 mask = grd.hgrid.mask_u[:] - 47 - 48 elif Cpos is 'v': - 49 # average z_r at Arakawa-C v points - 50 z = 0.5 * (grd.vgrid.z_r[0,:,:-1,:] + grd.vgrid.z_r[0,:,1:,:]) - 51 if vert == True: - 52 lon = 0.5 * (grd.hgrid.lon_vert[:-1,:] + grd.hgrid.lon_vert[1:,:]) - 53 lat = 0.5 * (grd.hgrid.lat_vert[:-1,:] + grd.hgrid.lat_vert[1:,:]) - 54 else: - 55 lon = grd.hgrid.lon_v[:] - 56 lat = grd.hgrid.lat_v[:] - 57 mask = grd.hgrid.mask_v[:] - 58 - 59 elif Cpos is 'w': - 60 # for temp, salt, rho - 61 z = grd.vgrid.z_w[0,:] - 62 if vert == True: - 63 lon = grd.hgrid.lon_vert[:] - 64 lat = grd.hgrid.lat_vert[:] - 65 else: - 66 lon = grd.hgrid.lon_rho[:] - 67 lat = grd.hgrid.lat_rho[:] - 68 mask = grd.hgrid.mask_rho[:] - 69 - 70 elif Cpos is 'rho': - 71 # for temp, salt, rho - 72 z = grd.vgrid.z_r[0,:] - 73 if vert == True: - 74 lon = grd.hgrid.lon_vert[:] - 75 lat = grd.hgrid.lat_vert[:] - 76 else: - 77 lon = grd.hgrid.lon_rho[:] - 78 lat = grd.hgrid.lat_rho[:] - 79 mask = grd.hgrid.mask_rho[:] - 80 - 81 else: - 82 raise Warning, '%s bad position. Valid Arakawa-C are \ - 83 rho, u or v.' % Cpos - 84 - 85 assert len(z.shape) == 3, 'z must be 3D' - 86 assert len(var.shape) == 3, 'var must be 3D' - 87 assert z.shape == var.shape, 'data and prop must be the same size' - 88 - 89 depth = -abs(depth) - 90 depth = depth * np.ones(z.shape[1:]) - 91 - 92 zslice = _iso.zslice(z, var, depth, imode) - 93 - 94 # mask land - 95 zslice = np.ma.masked_where(mask == 0, zslice) - 96 # mask region with shalower depth than requisted depth - 97 zslice = np.ma.masked_where(zslice == 1e20, zslice) - 98 - 99 return zslice, lon, lat -
100 - 101 -
102 -def sslice(var, sindex, grd, Cpos='rho', vert=False): -
103 """ - 104 sslice, lon, lat = sslice(var, sindex, grd) - 105 - 106 optional switch: - 107 - Cpos='rho', 'u' or 'v' specify the C-grid position where - 108 the variable rely - 109 - vert=True/False If True, return the position of - 110 the verticies - 111 - mode='linear' or 'spline' specify the type of interpolation - 112 - 113 return a constant-s slice at index sindex from 3D variable var - 114 lon and lat contain the C-grid position of the slice for plotting. - 115 If vert=True, lon and lat contain contain the position of the - 116 verticies (to be used with pcolor) - 117 """ - 118 - 119 # compute the depth on Arakawa-C grid position - 120 - 121 if Cpos is 'u': - 122 # average z_r at Arakawa-C u points - 123 z = 0.5 * (grd.vgrid.z_r[0,:,:,:-1] + grd.vgrid.z_r[0,:,:,1:]) - 124 if vert == True: - 125 lon = 0.5 * (grd.hgrid.lon_vert[:,:-1] + grd.hgrid.lon_vert[:,1:]) - 126 lat = 0.5 * (grd.hgrid.lat_vert[:,:-1] + grd.hgrid.lat_vert[:,1:]) - 127 else: - 128 lon = grd.hgrid.lon_u[:] - 129 lat = grd.hgrid.lat_u[:] - 130 mask = grd.hgrid.mask_u[:] - 131 - 132 elif Cpos is 'v': - 133 # average z_r at Arakawa-C v points - 134 z = 0.5 * (grd.vgrid.z_r[0,:,:-1,:] + grd.vgrid.z_r[0,:,1:,:]) - 135 if vert == True: - 136 lon = 0.5 * (grd.hgrid.lon_vert[:-1,:] + grd.hgrid.lon_vert[1:,:]) - 137 lat = 0.5 * (grd.hgrid.lat_vert[:-1,:] + grd.hgrid.lat_vert[1:,:]) - 138 else: - 139 lon = grd.hgrid.lon_v[:] - 140 lat = grd.hgrid.lat_v[:] - 141 mask = grd.hgrid.mask_v[:] - 142 - 143 elif Cpos is 'rho': - 144 # for temp, salt, rho, w - 145 z = grd.vgrid.z_r[0,:] - 146 if vert == True: - 147 lon = grd.hgrid.lon_vert[:] - 148 lat = grd.hgrid.lat_vert[:] - 149 else: - 150 lon = grd.hgrid.lon_rho[:] - 151 lat = grd.hgrid.lat_rho[:] - 152 mask = grd.hgrid.mask_rho[:] - 153 - 154 else: - 155 raise Warning, '%s bad position. Valid Arakawa-C are \ - 156 rho, u or v.' % Cpos - 157 - 158 assert len(var.shape) == 3, 'var must be 3D' - 159 - 160 sslice = var[sindex,:,:] - 161 - 162 # mask land - 163 sslice = np.ma.masked_where(mask == 0, sslice) - 164 - 165 return sslice, lon, lat -
166 - 167 - 168 -
169 -def islice(var, iindex, grd, Cpos='rho', vert=False): -
170 """ - 171 islice, z, lon, lat = islice(var, iindex, grd) - 172 - 173 optional switch: - 174 - Cpos='rho', 'u', 'v' or 'w' specify the C-grid position where - 175 the variable rely - 176 - vert=True/False If True, return the position of - 177 the verticies - 178 - 179 - 180 return a constant-i slice at index iindex from 3D variable var - 181 lon, lat and z contain the C-grid position of the slice for plotting. - 182 If vert=True, lon, lat and z contain contain the position of the - 183 verticies (to be used with pcolor) - 184 """ - 185 - 186 # compute the depth on Arakawa-C grid position - 187 - 188 if Cpos is 'u': - 189 # average z_r at Arakawa-C u points - 190 if vert == True: - 191 z = grd.vgrid.z_w[0,:] - 192 z = 0.5 * (z[:,:,:-1] + z[:,:,1:]) - 193 z = 0.5 * (z[:,:-1,:] + z[:,1:,:]) - 194 z = np.concatenate((z[:,0:1,:], z, z[:,-2:-1,:]), 1) - 195 lon = grd.hgrid.lon_vert[:,1:-1] - 196 lat = grd.hgrid.lat_vert[:,1:-1] - 197 else: - 198 z = grd.vgrid.z_r[0,:] - 199 z = 0.5 * (z[:,:,:-1] + z[:,:,1:]) - 200 lon = grd.hgrid.lon_u[:] - 201 lat = grd.hgrid.lat_u[:] - 202 mask = grd.hgrid.mask_u[:] - 203 - 204 elif Cpos is 'v': - 205 # average z_r at Arakawa-C v points - 206 if vert == True: - 207 z = grd.vgrid.z_w[0,:] - 208 lon = grd.hgrid.lon_rho - 209 lat = grd.hgrid.lat_rho - 210 else: - 211 z = grd.vgrid.z_r[0,:] - 212 z = 0.5 * (z[:,:-1,:] + z[:,1:,:]) - 213 lon = grd.hgrid.lon_v[:] - 214 lat = grd.hgrid.lat_v[:] - 215 mask = grd.hgrid.mask_v[:] - 216 - 217 elif Cpos is 'w': - 218 # for w, AKt, ... - 219 if vert == True: - 220 z = grd.vgrid.z_w[0,:] - 221 z = 0.5 * (z[:-1,:,:] + z[1:,:,:]) - 222 z = np.concatenate((np.array(grd.vgrid.z_w[0,0,:,:], ndmin=3), \ - 223 z, \ - 224 np.array(grd.vgrid.z_w[0,-1,:,:], ndmin=3)), 0) - 225 z = 0.5 * (z[:,:,:-1] + z[:,:,1:]) - 226 z = np.concatenate((z[:,:,0:1], z, z[:,:,-2:-1]), 2) - 227 lon = 0.5 * (grd.hgrid.lon_vert[:-1,:] + grd.hgrid.lon_vert[1:,:]) - 228 lat = 0.5 * (grd.hgrid.lat_vert[:-1,:] + grd.hgrid.lat_vert[1:,:]) - 229 else: - 230 z = grd.vgrid.z_w[0,:] - 231 lon = grd.hgrid.lon_rho[:] - 232 lat = grd.hgrid.lat_rho[:] - 233 mask = grd.hgrid.mask_rho[:] - 234 - 235 - 236 elif Cpos is 'rho': - 237 # for temp, salt, rho, ... - 238 if vert == True: - 239 z = grd.vgrid.z_w[0,:] - 240 z = 0.5 * (z[:,:-1,:] + z[:,1:,:]) - 241 z = np.concatenate((z[:,0:1,:], z, z[:,-2:-1,:]), 1) - 242 lon = 0.5 * (grd.hgrid.lon_vert[:,:-1] + grd.hgrid.lon_vert[:,1:]) - 243 lat = 0.5 * (grd.hgrid.lat_vert[:,:-1] + grd.hgrid.lat_vert[:,1:]) - 244 else: - 245 z = grd.vgrid.z_r[0,:] - 246 lon = grd.hgrid.lon_rho[:] - 247 lat = grd.hgrid.lat_rho[:] - 248 mask = grd.hgrid.mask_rho[:] - 249 - 250 else: - 251 raise Warning, '%s bad position. Valid Arakawa-C are \ - 252 rho, u or v.' % Cpos - 253 - 254 # get constant-i slice - 255 vari = var[:,:,iindex] - 256 zi = z[:,:,iindex] - 257 loni = np.tile(lon[:,iindex], (zi.shape[0], 1)) - 258 lati = np.tile(lat[:,iindex], (zi.shape[0], 1)) - 259 - 260 # land/sea mask - 261 maski = np.tile(mask[:,iindex], (vari.shape[0], 1)) - 262 vari = np.ma.masked_where(maski[:,:] == 0, vari[:,:]) - 263 - 264 return vari, zi, loni, lati -
265 - 266 -
267 -def jslice(var, jindex, grd, Cpos='rho', vert=False): -
268 """ - 269 jslice, z, lon, lat = jslice(var, jindex, grd) - 270 - 271 optional switch: - 272 - Cpos='rho', 'u', 'v' or 'w' specify the C-grid position where - 273 the variable rely - 274 - vert=True/False If True, return the position of - 275 the verticies - 276 - 277 - 278 return a constant-j slice at index jindex from 3D variable var - 279 lon, lat and z contain the C-grid position of the slice for plotting. - 280 If vert=True, lon, lat and z contain contain the position of the - 281 verticies (to be used with pcolor) - 282 """ - 283 - 284 # compute the depth on Arakawa-C grid position - 285 - 286 if Cpos is 'u': - 287 # average z_r at Arakawa-C u points - 288 if vert == True: - 289 z = grd.vgrid.z_w[0,:] - 290 lon = grd.hgrid.lon_rho - 291 lat = grd.hgrid.lat_rho - 292 else: - 293 z = grd.vgrid.z_r[0,:] - 294 z = 0.5 * (z[:,:,:-1] + z[:,:,1:]) - 295 lon = grd.hgrid.lon_u[:] - 296 lat = grd.hgrid.lat_u[:] - 297 mask = grd.hgrid.mask_u[:] - 298 - 299 elif Cpos is 'v': - 300 # average z_r at Arakawa-C v points - 301 if vert == True: - 302 z = grd.vgrid.z_w[0,:] - 303 z = 0.5 * (z[:,:-1,:] + z[:,1:,:]) - 304 z = 0.5 * (z[:,:,:-1] + z[:,:,1:]) - 305 z = np.concatenate((z[:,:,0:1], z, z[:,:,-2:-1]), 2) - 306 lon = grd.hgrid.lon_vert[1:-1,:] - 307 lat = grd.hgrid.lat_vert[1:-1,:] - 308 else: - 309 z = grd.vgrid.z_r[0,:] - 310 z = 0.5 * (z[:,:-1,:] + z[:,1:,:]) - 311 lon = grd.hgrid.lon_v[:] - 312 lat = grd.hgrid.lat_v[:] - 313 mask = grd.hgrid.mask_v[:] - 314 - 315 elif Cpos is 'w': - 316 # for w, AKt, ... - 317 if vert == True: - 318 z = grd.vgrid.z_w[0,:] - 319 z = 0.5 * (z[:-1,:,:] + z[1:,:,:]) - 320 z = np.concatenate((np.array(grd.vgrid.z_w[0,0,:,:], ndmin=3), \ - 321 z, \ - 322 np.array(grd.vgrid.z_w[0,-1,:,:], ndmin=3)), 0) - 323 z = 0.5 * (z[:,:,:-1] + z[:,:,1:]) - 324 z = np.concatenate((z[:,:,0:1], z, z[:,:,-2:-1]), 2) - 325 lon = 0.5 * (grd.hgrid.lon_vert[:-1,:] + grd.hgrid.lon_vert[1:,:]) - 326 lat = 0.5 * (grd.hgrid.lat_vert[:-1,:] + grd.hgrid.lat_vert[1:,:]) - 327 else: - 328 z = grd.vgrid.z_w[0,:] - 329 lon = grd.hgrid.lon_rho[:] - 330 lat = grd.hgrid.lat_rho[:] - 331 mask = grd.hgrid.mask_rho[:] - 332 - 333 elif Cpos is 'rho': - 334 # for temp, salt, rho, ... - 335 if vert == True: - 336 z = grd.vgrid.z_w[0,:] - 337 z = 0.5 * (z[:,:,:-1] + z[:,:,1:]) - 338 z = np.concatenate((z[:,:,0:1], z, z[:,:,-2:-1]), 2) - 339 lon = 0.5 * (grd.hgrid.lon_vert[:-1,:] + grd.hgrid.lon_vert[1:,:]) - 340 lat = 0.5 * (grd.hgrid.lat_vert[:-1,:] + grd.hgrid.lat_vert[1:,:]) - 341 else: - 342 z = grd.vgrid.z_r[0,:] - 343 lon = grd.hgrid.lon_rho[:] - 344 lat = grd.hgrid.lat_rho[:] - 345 mask = grd.hgrid.mask_rho[:] - 346 - 347 else: - 348 raise Warning, '%s bad position. Valid Arakawa-C are \ - 349 rho, u or v.' % Cpos - 350 - 351 # get constant-j slice - 352 varj = var[:,jindex,:] - 353 zj = z[:,jindex,:] - 354 lonj = np.tile(lon[jindex,:], (zj.shape[0], 1)) - 355 latj = np.tile(lat[jindex,:], (zj.shape[0], 1)) - 356 - 357 # land/sea mask - 358 maskj = np.tile(mask[jindex,:], (varj.shape[0], 1)) - 359 varj = np.ma.masked_where(maskj[:,:] == 0, varj[:,:]) - 360 - 361 return varj, zj, lonj, latj -
362 - 363 - 364 -
365 -def isoslice(var,prop,isoval, grd, Cpos='rho', masking=True, vert=False): -
366 """ - 367 isoslice, lon, lat = isoslice(variable,property, isoval, grd) - 368 - 369 optional switch: - 370 - Cpos='rho', 'u' or 'v' specify the C-grid position where - 371 the variable rely - 372 - masking=True mask the output if True - 373 - vert=True/False If True, return the position of - 374 the verticies - 375 - mode='linear' or 'spline' specify the type of interpolation - 376 - 377 - 378 result is a projection of variable at property == isoval in the first - 379 nonsingleton dimension. In the case when there is more than one zero - 380 crossing, the results are averaged. - 381 lon, and lat contain the C-grid position of the slice for plotting. - 382 If vert=True, lon and lat and z contain contain the position of the - 383 verticies (to be used with pcolor) - 384 - 385 EXAMPLE: - 386 s_at_m5 = isoslice(s,z,-5); # s at z == -5 - 387 h_at_s30 = isoslice(z,s,30); # z at s == 30 - 388 """ - 389 if (len(var.squeeze().shape)<=2): - 390 raise ValueError, 'variable must have at least two dimensions' - 391 if not prop.shape == var.shape: - 392 raise ValueError, 'dimension of var and prop must be identical' - 393 - 394 # compute the depth on Arakawa-C grid position - 395 if Cpos is 'u': - 396 # average z_r at Arakawa-C u points - 397 z = 0.5 * (grd.vgrid.z_r[0,:,:,:-1] + grd.vgrid.z_r[0,:,:,1:]) - 398 if vert == True: - 399 lon = 0.5 * (grd.hgrid.lon_vert[:,:-1] + grd.hgrid.lon_vert[:,1:]) - 400 lat = 0.5 * (grd.hgrid.lat_vert[:,:-1] + grd.hgrid.lat_vert[:,1:]) - 401 else: - 402 lon = grd.hgrid.lon_u[:] - 403 lat = grd.hgrid.lat_u[:] - 404 mask = grd.hgrid.mask_u[:] - 405 - 406 elif Cpos is 'v': - 407 # average z_r at Arakawa-C v points - 408 z = 0.5 * (grd.vgrid.z_r[0,:,:-1,:] + grd.vgrid.z_r[0,:,1:,:]) - 409 if vert == True: - 410 lon = 0.5 * (grd.hgrid.lon_vert[:-1,:] + grd.hgrid.lon_vert[1:,:]) - 411 lat = 0.5 * (grd.hgrid.lat_vert[:-1,:] + grd.hgrid.lat_vert[1:,:]) - 412 else: - 413 lon = grd.hgrid.lon_v[:] - 414 lat = grd.hgrid.lat_v[:] - 415 mask = grd.hgrid.mask_v[:] - 416 - 417 elif Cpos is 'rho': - 418 # for temp, salt, rho, w - 419 z = grd.vgrid.z_r[0,:] - 420 if vert == True: - 421 lon = grd.hgrid.lon_vert[:] - 422 lat = grd.hgrid.lat_vert[:] - 423 else: - 424 lon = grd.hgrid.lon_rho[:] - 425 lat = grd.hgrid.lat_rho[:] - 426 mask = grd.hgrid.mask_rho[:] - 427 - 428 else: - 429 raise Warning, '%s bad position. Valid Arakawa-C are \ - 430 rho, u or v.' % Cpos - 431 - 432 prop = prop-isoval - 433 sz = np.shape(var) - 434 var = var.reshape(sz[0],-1) - 435 prop = prop.reshape(sz[0],-1) - 436 #find zero-crossings (zc == 1) - 437 zc = np.where( (prop[:-1,:] * prop[1:,:])<0 ,1., 0.) - 438 varl = var[:-1,:] * zc - 439 varh = var[1:,:] * zc - 440 propl = prop[:-1,:] * zc - 441 proph = prop[1:,:] * zc - 442 isoslice = varl - propl * (varh - varl) / (proph - propl) - 443 isoslice = np.where(zc==1., isoslice, 0.) - 444 szc = zc.sum(axis=0) - 445 szc = np.where(szc==0., 1, szc) - 446 isoslice = isoslice.sum(axis=0)/szc - 447 if masking: - 448 isoslice = np.ma.masked_where(zc.sum(axis=0)==0, isoslice) - 449 if all(isoslice.mask): - 450 raise Warning, 'property==%f out of range (%f, %f)' % \ - 451 (isoval, (prop+isoval).min(), (prop+isoval).max()) - 452 isoslice = isoslice.reshape(sz[1:]) - 453 - 454 # mask land - 455 isoslice = np.ma.masked_where(mask == 0, isoslice) - 456 - 457 return isoslice, lon, lat -
458 - 459 - 460 -
461 -def transect(var, istart, iend, jstart, jend, grd, Cpos='rho', vert=False, \ - 462 spval=1e37): -
463 """ - 464 transect, z, lon, lat = transect(var, istart, iend, jstart, jend, grd) - 465 - 466 optional switch: - 467 - Cpos='rho', 'u' or 'v' specify the C-grid position where - 468 the variable rely - 469 - vert=True/False If True, return the position of - 470 the verticies - 471 - spval special value - 472 - rtol tolerance parameter - 473 - 474 - 475 return a vertical transect between the points P1=(istart, jstart) - 476 and P2=(iend, jend) from 3D variable var - 477 lon, lat and z contain the C-grid position of the section for plotting. - 478 If vert=True, lon, lat and z contain contain the position of the - 479 verticies (to be used with pcolor) - 480 """ - 481 - 482 # compute the depth on Arakawa-C grid position and get grid information - 483 - 484 if Cpos is 'u': - 485 # average z_r and z_w at Arakawa-C u points - 486 if vert == True: - 487 z = grd.vgrid.z_w[0,:] - 488 z = 0.5 * (z[:,:-1,:] + z[:,1:,:]) - 489 z = np.concatenate((z[:,0:1,:], z, z[:,-2:-1,:]), 1) - 490 lon = 0.5 * (grd.hgrid.lon_vert[:,:-1] + grd.hgrid.lon_vert[:,1:]) - 491 lat = 0.5 * (grd.hgrid.lat_vert[:,:-1] + grd.hgrid.lat_vert[:,1:]) - 492 else: - 493 z = grd.vgrid.z_r[0,:] - 494 z = 0.5 * (z[:,:,:-1] + z[:,:,1:]) - 495 lon = grd.hgrid.lon_u[:] - 496 lat = grd.hgrid.lat_u[:] - 497 mask = grd.hgrid.mask_u[:] - 498 - 499 elif Cpos is 'v': - 500 # average z_r and z_w at Arakawa-C v points - 501 if vert == True: - 502 z = grd.vgrid.z_w[0,:] - 503 z = 0.5 * (z[:,:,:-1] + z[:,:,1:]) - 504 z = np.concatenate((z[:,:,0:1], z, z[:,:,-2:-1]), 2) - 505 lon = 0.5 * (grd.hgrid.lon_vert[:-1,:] + grd.hgrid.lon_vert[1:,:]) - 506 lat = 0.5 * (grd.hgrid.lat_vert[:-1,:] + grd.hgrid.lat_vert[1:,:]) - 507 else: - 508 z = grd.vgrid.z_r[0,:] - 509 z = 0.5 * (z[:,-1:,:] + z[:,1:,:]) - 510 lon = grd.hgrid.lon_v[:] - 511 lat = grd.hgrid.lat_v[:] - 512 mask = grd.hgrid.mask_v[:] - 513 - 514 elif Cpos is 'rho': - 515 # for temp, salt, rho - 516 if vert == True: - 517 z = grd.vgrid.z_w[0,:] - 518 z = 0.5 * (z[:,:,:-1] + z[:,:,1:]) - 519 z = 0.5 * (z[:,:-1,:] + z[:,1:,:]) - 520 z = np.concatenate((z[:,:,0:1], z, z[:,:,-2:-1]), 2) - 521 z = np.concatenate((z[:,0:1,:], z, z[:,-2:-1,:]), 1) - 522 lon = grd.hgrid.lon_vert[:] - 523 lat = grd.hgrid.lat_vert[:] - 524 else: - 525 z = grd.vgrid.z_r[0,:] - 526 lon = grd.hgrid.lon_rho[:] - 527 lat = grd.hgrid.lat_rho[:] - 528 mask = grd.hgrid.mask_rho[:] - 529 - 530 else: - 531 raise Warning, '%s bad position. Valid Arakawa-C are \ - 532 rho, u or v.' % Cpos - 533 - 534 - 535 # Find the nearest point between P1 (imin,jmin) and P2 (imax, jmax) - 536 # ----------------------------------------------------------------- - 537 # Initialization - 538 i0=istart; j0=jstart; i1=iend; j1=jend - 539 istart = float(istart); iend = float(iend) - 540 jstart = float(jstart); jend = float(jend) - 541 - 542 # Compute equation: j = aj i + bj - 543 if istart != iend: - 544 aj = (jend - jstart ) / (iend - istart) - 545 bj = jstart - aj * istart - 546 else: - 547 aj=10000. - 548 bj=0. - 549 - 550 # Compute equation: i = ai j + bi - 551 if jstart != jend: - 552 ai = (iend - istart ) / ( jend - jstart ) - 553 bi = istart - ai * jstart - 554 else: - 555 ai=10000. - 556 bi=0. - 557 - 558 # Compute the integer pathway: - 559 # Chose the strait line with the smallest slope - 560 if (abs(aj) <= 1 ): - 561 # Here, the best line is y(x) - 562 print 'Here, the best line is y(x)' - 563 # If i1 < i0 swap points and remember it has been swapped - 564 if (i1 < i0 ): - 565 i = i0 ; j = j0 - 566 i0 = i1 ; j0 = j1 - 567 i1 = i ; j1 = j - 568 - 569 # compute the nearest j point on the line crossing at i - 570 n=0 - 571 near = np.zeros(((i1-i0+1),4)) - 572 for i in range(i0,i1+1): - 573 jj = aj*i + bj - 574 near[n,0] = i - 575 near[n,1] = jj - 576 near[n,2] = np.floor(jj) - 577 near[n,3] = np.ceil(jj) - 578 n = n + 1 - 579 - 580 if vert == False: - 581 nearp = np.zeros(((i1-i0+1),4)) - 582 nearp = near - 583 else: - 584 # compute the nearest j vert point on the line crossing at i - 585 n=0 - 586 nearp = np.zeros(((i1-i0+2),4)) - 587 for i in range(i0,i1+2): - 588 jj = aj*(i-0.5) + bj - 589 nearp[n,0] = i - 590 nearp[n,1] = jj - 591 nearp[n,2] = np.floor(jj) - 592 nearp[n,3] = np.ceil(jj) - 593 n = n + 1 - 594 - 595 else: - 596 # Here, the best line is x(y) - 597 print 'Here, the best line is x(y)' - 598 # If j1 < j0 swap points and remember it has been swapped - 599 if (j1 < j0 ): - 600 i = i0 ; j = j0 - 601 i0 = i1 ; j0 = j1 - 602 i1 = i ; j1 = j - 603 - 604 # compute the nearest i point on the line crossing at j - 605 n=0 - 606 near = np.zeros(((j1-j0+1),4)) - 607 for j in range(j0,j1+1): - 608 ii = ai*j + bi - 609 near[n,0] = j - 610 near[n,1] = ii - 611 near[n,2] = np.floor(ii) - 612 near[n,3] = np.ceil(ii) - 613 n = n + 1 - 614 - 615 if vert == False: - 616 nearp = np.zeros(((j1-j0+1),4)) - 617 nearp = near - 618 else: - 619 # compute the nearest i vert point on the line crossing at j - 620 n=0 - 621 nearp = np.zeros(((j1-j0+2),4)) - 622 for j in range(j0,j1+2): - 623 ii = ai*(j-0.5) + bi - 624 nearp[n,0] = j - 625 nearp[n,1] = ii - 626 nearp[n,2] = np.floor(ii) - 627 nearp[n,3] = np.ceil(ii) - 628 n = n + 1 - 629 - 630 - 631 # Now interpolate between the nearest point through the section - 632 # ------------------------------------------------------------- - 633 # Initialize output variables - 634 nlev = z.shape[0] - 635 - 636 transect = np.zeros((grd.vgrid.N, near.shape[0])) - 637 zs = np.zeros((nlev, nearp.shape[0])) - 638 lons = np.zeros((nlev, nearp.shape[0])) - 639 lats = np.zeros((nlev, nearp.shape[0])) - 640 - 641 # mask variable - 642 for k in range(var.shape[0]): - 643 var[k,:,:] = np.ma.masked_where(mask == 0, var[k,:,:]) - 644 - 645 for n in range(near.shape[0]): - 646 if (abs(aj) <= 1 ): - 647 # check if our position match a grid cell - 648 if (near[n,2] == near[n,3]): - 649 transect[:,n] = var[:, near[n,2], near[n,0]] - 650 else: - 651 if mask[near[n,3], near[n,0]] == 0 or mask[near[n,2], near[n,0]] == 0: - 652 transect[:,n] = spval - 653 else: - 654 transect[:,n] = (near[n,1] - near[n,2]) * var[:, near[n,3], near[n,0]] + \ - 655 (near[n,3] - near[n,1]) * var[:, near[n,2], near[n,0]] - 656 else: - 657 # check if our position match a grid cell - 658 if (near[n,2] == near[n,3]): - 659 transect[:,n] = var[:, near[n,0], near[n,2]] - 660 else: - 661 if mask[near[n,0], near[n,3]] == 0 or mask[near[n,0], near[n,2]] == 0: - 662 transect[:,n] = spval - 663 else: - 664 transect[:,n] = (near[n,1] - near[n,2]) * var[:, near[n,0], near[n,3]] + \ - 665 (near[n,3] - near[n,1]) * var[:, near[n,0], near[n,2]] - 666 - 667 for n in range(nearp.shape[0]): - 668 if (abs(aj) <= 1 ): - 669 # check if our position match a grid cell - 670 if (nearp[n,2] == nearp[n,3]): - 671 zs[:,n] = z[:, nearp[n,2], nearp[n,0]] - 672 lons[:,n] = lon[nearp[n,2], nearp[n,0]] - 673 lats[:,n] = lat[nearp[n,2], nearp[n,0]] - 674 else: - 675 zs[:,n] = (nearp[n,1] - nearp[n,2]) * z[:, nearp[n,3], nearp[n,0]] + \ - 676 (nearp[n,3] - nearp[n,1]) * z[:, nearp[n,2], nearp[n,0]] - 677 lons[:,n] = (nearp[n,1] - nearp[n,2]) * lon[nearp[n,3], nearp[n,0]] + \ - 678 (nearp[n,3] - nearp[n,1]) * lon[nearp[n,2], nearp[n,0]] - 679 lats[:,n] = (nearp[n,1] - nearp[n,2]) * lat[nearp[n,3], nearp[n,0]] + \ - 680 (nearp[n,3] - nearp[n,1]) * lat[nearp[n,2], nearp[n,0]] - 681 else: - 682 # check if our position match a grid cell - 683 if (nearp[n,2] == nearp[n,3]): - 684 zs[:,n] = z[:, nearp[n,0], nearp[n,2]] - 685 lons[:,n] = lon[nearp[n,0], nearp[n,2]] - 686 lats[:,n] = lat[nearp[n,0], nearp[n,2]] - 687 else: - 688 zs[:,n] = (nearp[n,1] - nearp[n,2]) * z[:, nearp[n,0], nearp[n,3]] + \ - 689 (nearp[n,3] - nearp[n,1]) * z[:, nearp[n,0], nearp[n,2]] - 690 lons[:,n] = (nearp[n,1] - nearp[n,2]) * lon[nearp[n,0], nearp[n,3]] + \ - 691 (nearp[n,3] - nearp[n,1]) * lon[nearp[n,0], nearp[n,2]] - 692 lats[:,n] = (nearp[n,1] - nearp[n,2]) * lat[nearp[n,0], nearp[n,3]] + \ - 693 (nearp[n,3] - nearp[n,1]) * lat[nearp[n,0], nearp[n,2]] - 694 - 695 # mask transect - 696 transect = np.ma.masked_values(transect, spval) - 697 - 698 - 699 return transect, zs, lons, lats -
700 - 701 - 702 - 703 -
704 -def lonslice(var, longitude, grd, Cpos='rho', vert=False, spval=1e37): -
705 """ - 706 lonslice, z, lon, lat = lonslice(var, longitude, grd) - 707 - 708 optional switch: - 709 - Cpos='rho', 'u' or 'v' specify the C-grid position where - 710 the variable rely - 711 - vert=True/False If True, return the position of - 712 the verticies - 713 - spval special value - 714 - rtol tolerance parameter - 715 - 716 - 717 return a longitudinal slice along longitude=longitude from 3D variable var - 718 lon, lat and z contain the C-grid position of the section for plotting. - 719 If vert=True, lon, lat and z contain contain the position of the - 720 verticies (to be used with pcolor) - 721 Returns a longitudinal slice of the grid - 722 """ - 723 - 724 if Cpos == 'u': - 725 lon = grd.hgrid.lon_u - 726 lat = grd.hgrid.lat_u - 727 elif Cpos == 'v': - 728 lon = grd.hgrid.lon_v - 729 lat = grd.hgrid.lat_v - 730 elif Cpos == 'rho': - 731 lon = grd.hgrid.lon_rho - 732 lat = grd.hgrid.lat_rho - 733 else: - 734 raise Warning, '%s bad position. Valid Arakawa-C are \ - 735 rho, u or v.' % Cpos - 736 - 737 edge = np.concatenate((lon[1,1:-1], \ - 738 lon[1:-1,-2], \ - 739 lon[-2,-2:0:-1], \ - 740 lon[-2:0:-1,1])) - 741 idx = np.concatenate((range(1,lon[0,:].shape[0]-1), \ - 742 range(1,lon[:,-1].shape[0]-1), \ - 743 range(1,lon[-1,::-1].shape[0]-1)[::-1], \ - 744 range(1,lon[::-1,0].shape[0]-1)[::-1])) - 745 - 746 d = np.zeros(edge.shape) - 747 for i in range (edge.shape[0]): - 748 d[i] = edge[i] - longitude - 749 d[i] = d[i] / abs(d[i]) - 750 d = np.diff(d) - 751 - 752 pt_idx = np.where(d != 0)[0] - 753 - 754 Mp, Lp = lon.shape - 755 - 756 if len(pt_idx) != 2: - 757 raise ValueError, 'this function only works for simple quadrangle' - 758 - 759 # determine is latitude ligne is crossing a i or j edge - 760 side = np.zeros(2) - 761 if pt_idx[0] < Lp: side[0] = 1 - 762 if pt_idx[0] >= Lp and pt_idx[0] < Lp+Mp: side[0] = 2 - 763 if pt_idx[0] >= Lp+Mp and pt_idx[0] < Lp+Mp+Lp: side[0] = 3 - 764 if pt_idx[0] >= Lp+Mp+Lp: side[0] = 4 - 765 if pt_idx[1] < Lp: side[1] = 1 - 766 if pt_idx[1] >= Lp and pt_idx[1] < Lp+Mp: side[1] = 2 - 767 if pt_idx[1] >= Lp+Mp and pt_idx[1] < Lp+Mp+Lp: side[1] = 3 - 768 if pt_idx[1] >= Lp+Mp+Lp: side[1] = 4 - 769 - 770 - 771 if side[0] == 1 and side[1] == 2: - 772 lonslice, z, lon, lat = pyroms.tools.section(var, \ - 773 idx[pt_idx[0]], Lp-2, \ - 774 1, idx[pt_idx[1]], \ - 775 grd, Cpos=Cpos, vert=vert, spval=spval) - 776 elif side[0] == 1 and side[1] == 3: - 777 lonslice, z, lon, lat = pyroms.tools.section(var, \ - 778 idx[pt_idx[0]], idx[pt_idx[1]], \ - 779 1, Mp-2, \ - 780 grd, Cpos=Cpos, vert=vert, spval=spval) - 781 elif side[0] == 1 and side[1] == 4: - 782 lonslice, z, lon, lat = pyroms.tools.section(var, \ - 783 idx[pt_idx[0]], 1, \ - 784 1, idx[pt_idx[1]], \ - 785 grd, Cpos=Cpos, vert=vert, spval=spval) - 786 elif side[0] == 2 and side[1] == 3: - 787 lonslice, z, lon, lat = pyroms.tools.section(var, \ - 788 Lp-2, idx[pt_idx[1]], \ - 789 idx[pt_idx[0]], Mp-2, \ - 790 grd, Cpos=Cpos, vert=vert, spval=spval) - 791 elif side[0] == 2 and side[1] == 4: - 792 lonslice, z, lon, lat = pyroms.tools.section(var, \ - 793 Lp-2, 1, \ - 794 idx[pt_idx[0]], idx[pt_idx[0]], \ - 795 grd, Cpos=Cpos, vert=vert, spval=spval) - 796 elif side[0] == 3 and side[1] == 4: - 797 lonslice, z, lon, lat = pyroms.tools.section(var, \ - 798 idx[pt_idx[0]], 1, \ - 799 Mp-2, idx[pt_idx[1]], \ - 800 grd, Cpos=Cpos, vert=vert, spval=spval) - 801 - 802 - 803 return lonslice, z, lon, lat -
804 - 805 - 806 -
807 -def latslice(var, latitude, grd, Cpos='rho', vert=False, spval=1e37): -
808 """ - 809 latslice, z, lon, lat = latslice(var, latitude, grd) - 810 - 811 optional switch: - 812 - Cpos='rho', 'u' or 'v' specify the C-grid position where - 813 the variable rely - 814 - vert=True/False If True, return the position of - 815 the verticies - 816 - spval special value - 817 - rtol tolerance parameter - 818 - 819 - 820 return a latitudinal slice along latitude=latitude from 3D variable var - 821 lon, lat and z contain the C-grid position of the section for plotting. - 822 If vert=True, lon, lat and z contain contain the position of the - 823 verticies (to be used with pcolor) - 824 Returns a longitudinal slice of the grid - 825 """ - 826 - 827 if Cpos == 'u': - 828 lon = grd.hgrid.lon_u - 829 lat = grd.hgrid.lat_u - 830 elif Cpos == 'v': - 831 lon = grd.hgrid.lon_v - 832 lat = grd.hgrid.lat_v - 833 elif Cpos == 'rho': - 834 lon = grd.hgrid.lon_rho - 835 lat = grd.hgrid.lat_rho - 836 else: - 837 raise Warning, '%s bad position. Valid Arakawa-C are \ - 838 rho, u or v.' % Cpos - 839 - 840 edge = np.concatenate((lat[1,1:-1], \ - 841 lat[1:-1,-2], \ - 842 lat[-2,-2:0:-1], \ - 843 lat[-2:0:-1,1])) - 844 idx = np.concatenate((range(1,lat[0,:].shape[0]-1), \ - 845 range(1,lat[:,-1].shape[0]-1), \ - 846 range(1,lat[-1,::-1].shape[0]-1)[::-1], \ - 847 range(1,lat[::-1,0].shape[0]-1)[::-1])) - 848 - 849 d = np.zeros(edge.shape) - 850 for i in range (edge.shape[0]): - 851 d[i] = edge[i] - latitude - 852 d[i] = d[i] / abs(d[i]) - 853 d = np.diff(d) - 854 - 855 pt_idx = np.where(d != 0)[0] - 856 - 857 Mp, Lp = lon.shape - 858 - 859 if len(pt_idx) != 2: - 860 raise ValueError, 'this function only works for simple quadrangle' - 861 - 862 # determine is latitude ligne is crossing a i or j edge - 863 side = np.zeros(2) - 864 if pt_idx[0] < Lp: side[0] = 1 - 865 if pt_idx[0] >= Lp and pt_idx[0] < Lp+Mp: side[0] = 2 - 866 if pt_idx[0] >= Lp+Mp and pt_idx[0] < Lp+Mp+Lp: side[0] = 3 - 867 if pt_idx[0] >= Lp+Mp+Lp: side[0] = 4 - 868 if pt_idx[1] < Lp: side[1] = 1 - 869 if pt_idx[1] >= Lp and pt_idx[1] < Lp+Mp: side[1] = 2 - 870 if pt_idx[1] >= Lp+Mp and pt_idx[1] < Lp+Mp+Lp: side[1] = 3 - 871 if pt_idx[1] >= Lp+Mp+Lp: side[1] = 4 - 872 - 873 - 874 if side[0] == 1 and side[1] == 2: - 875 latslice, z, lon, lat = pyroms.tools.section(var, \ - 876 idx[pt_idx[0]], Lp-2, \ - 877 1, idx[pt_idx[1]], \ - 878 grd, Cpos=Cpos, vert=vert, spval=spval) - 879 elif side[0] == 1 and side[1] == 3: - 880 latslice, z, lon, lat = pyroms.tools.section(var, \ - 881 idx[pt_idx[0]], idx[pt_idx[1]], \ - 882 1, Mp-2, \ - 883 grd, Cpos=Cpos, vert=vert, spval=spval) - 884 elif side[0] == 1 and side[1] == 4: - 885 latslice, z, lon, lat = pyroms.tools.section(var, \ - 886 idx[pt_idx[0]], 1, \ - 887 1, idx[pt_idx[1]], \ - 888 grd, Cpos=Cpos, vert=vert, spval=spval) - 889 elif side[0] == 2 and side[1] == 3: - 890 latslice, z, lon, lat = pyroms.tools.section(var, \ - 891 Lp-2, idx[pt_idx[1]], \ - 892 idx[pt_idx[0]], Mp-2, \ - 893 grd, Cpos=Cpos, vert=vert, spval=spval) - 894 elif side[0] == 2 and side[1] == 4: - 895 latslice, z, lon, lat = pyroms.tools.section(var, \ - 896 Lp-2, 1, \ - 897 idx[pt_idx[0]], idx[pt_idx[0]], \ - 898 grd, Cpos=Cpos, vert=vert, spval=spval) - 899 elif side[0] == 3 and side[1] == 4: - 900 latslice, z, lon, lat = pyroms.tools.section(var, \ - 901 idx[pt_idx[0]], 1, \ - 902 Mp-2, idx[pt_idx[1]], \ - 903 grd, Cpos=Cpos, vert=vert, spval=spval) - 904 - 905 return latslice, z, lon, lat -
906 - 907 - 908 -
909 -def section_transport(u, v, istart, iend, jstart, jend, grd): -
910 """ - 911 transpu, transpv = section_transport(u, v, istart, iend, jstart, jend, grd) - 912 - 913 compute the transport through the section defined between - 914 the point P1 (istart,jstart) and P2 (iend, jend). - 915 P1 and P2 are Arakawa-C psi points. - 916 The transpot is positive right handside of the section. - 917 """ - 918 - 919 - 920 # Find the nearest point between P1 (imin,jmin) and P2 (imax, jmax) - 921 # ----------------------------------------------------------------- - 922 # Initialization - 923 i0=istart; j0=jstart; i1=iend; j1=jend - 924 istart = float(istart); iend = float(iend) - 925 jstart = float(jstart); jend = float(jend) - 926 - 927 # Compute equation: j = aj i + bj - 928 if istart != iend: - 929 aj = (jend - jstart ) / (iend - istart) - 930 bj = jstart - aj * istart - 931 else: - 932 aj=10000. - 933 bj=0. - 934 - 935 # Compute equation: i = ai j + bi - 936 if jstart != jend: - 937 ai = (iend - istart ) / ( jend - jstart ) - 938 bi = istart - ai * jstart - 939 else: - 940 ai=10000. - 941 bi=0. - 942 - 943 # Compute the integer pathway: - 944 # Chose the strait line with the smallest slope - 945 if (abs(aj) <= 1 ): - 946 # Here, the best line is y(x) - 947 print 'Here, the best line is y(x)' - 948 # If i1 < i0 swap points and remember it has been swapped - 949 if i1 < i0: - 950 i = i0 ; j = j0 - 951 i0 = i1 ; j0 = j1 - 952 i1 = i ; j1 = j - 953 norm = -1 - 954 else: - 955 norm = 1 - 956 - 957 if j1 >= j0: - 958 ist = 1; jst = 1 - 959 norm_u = 1; norm_v = -1 - 960 else: - 961 ist = 1; jst = 0 - 962 norm_u = -1; norm_v = -1 - 963 - 964 near = [] - 965 # compute the nearest j point on the line crossing at i - 966 for i in range(i0,i1+1): - 967 j = aj*i + bj - 968 near.append(i + round(j)*1j) - 969 - 970 else: - 971 # Here, the best line is x(y) - 972 print 'Here, the best line is x(y)' - 973 # If j1 < j0 swap points and remember it has been swapped - 974 if j1 < j0: - 975 i = i0 ; j = j0 - 976 i0 = i1 ; j0 = j1 - 977 i1 = i ; j1 = j - 978 norm = -1 - 979 else: - 980 norm = 1 - 981 - 982 if i1 >= i0: - 983 ist = 1; jst = 1 - 984 norm_u = 1; norm_v = -1 - 985 else: - 986 ist = 0; jst = 1 - 987 norm_u = 1; norm_v = 1 - 988 - 989 near = [] - 990 # compute the nearest i point on the line crossing at j - 991 for j in range(j0,j1+1): - 992 i = ai*j + bi - 993 near.append(round(i) + j*1j) - 994 - 995 - 996 # Look for intermediate points to be added - 997 # ------------------------------------------------------------- - 998 - 999 inear = copy(near) -1000 -1001 n = len(near) -1002 nn=1 -1003 -1004 for k in range(1,n): -1005 # distance between 2 neighbour points -1006 d = abs(inear[k] - inear[k-1]) -1007 -1008 if ( d > 1 ): -1009 # intermediate points required if d>1 -1010 neari = interm_pt(inear, k, ai, bi, aj, bj) -1011 near.insert(nn,neari) -1012 nn=nn+1 -1013 -1014 nn=nn+1 -1015 -1016 -1017 # Now extract the transport through a section -1018 # ------------------------------------------- -1019 -1020 #get metrics -1021 dx = grd.hgrid.dx -1022 dy = grd.hgrid.dy -1023 # average z_w at Arakawa-C u points -1024 zu = 0.5 * (grd.vgrid.z_w[:,:,:-1] + grd.vgrid.z_w[:,:,1:]) -1025 dzu = zu[1:,:,:] - zu[:-1,:,:] -1026 # average z_w at Arakawa-C v points -1027 zv = 0.5 * (grd.vgrid.z_w[:,:-1,:] + grd.vgrid.z_w[:,1:,:]) -1028 dzv = zv[1:,:,:] - zv[:-1,:,:] -1029 -1030 #set u and v to zero where u and v are masked for the sum -1031 for k in range(u.shape[0]): -1032 u[k,:] = np.where(grd.hgrid.mask_u == 1, u[k,:], 0) -1033 v[k,:] = np.where(grd.hgrid.mask_v == 1, v[k,:], 0) -1034 -1035 n = len(near) -1036 transpu = 0 -1037 transpv = 0 -1038 -1039 for l in range(0,n-1): -1040 ii = int(real(near[l])); jj = int(imag(near[l])) -1041 for k in range(0, dzu.shape[0]): -1042 if real(near[l]) == real(near[l+1]): -1043 trans = u[k, jj+jst, ii] * dy[jj+jst, ii] * \ -1044 dzu[k, jj+jst, ii] * norm_u * norm -1045 transpu = transpu + trans -1046 -1047 elif imag(near[l]) == imag(near[l+1]): -1048 trans = v[k, jj, ii+ist] * dx[jj, ii+ist] * \ -1049 dzv[k, jj, ii+ist] * norm_v * norm -1050 transpv = transpv + trans -1051 -1052 -1053 return transpu, transpv -
1054 -1055 -
1056 -def interm_pt(pnear, pk, pai, pbi, paj, pbj): -
1057 ### FIND THE BEST INTERMEDIATE POINT ON A PATHWAY -1058 # ----------------------------- -1059 # pnear : vector of the position of the nearest point -1060 # pk : current working index -1061 # pai, pbi: slope and original ordinate of x(y) -1062 # paj, pbj: slope and original ordinate of y(x) -1063 # pneari : vector holding the position of intermediate point -1064 # ----------------------------- -1065 -1066 # 1 - Compute intermediate point -1067 -1068 # Determine whether we use y(x) or x(y): -1069 if (abs(paj) <= 1): -1070 # y(x) -1071 # possible intermediate point -1072 ylptmp1 = pnear[pk-1] + 1 -1073 ylptmp2 = pnear[pk-1] + (paj/abs(paj))*1j -1074 # M is the candidate point: -1075 zxm = real(ylptmp1) -1076 zym = imag(ylptmp1) -1077 za0 = paj -1078 zb0 = pbj -1079 # -1080 za1 = -1./za0 -1081 zb1 = zym-za1*zxm -1082 # P is the projection of M in the strait line -1083 zxp = -(zb1-zb0)/(za1-za0) -1084 zyp = za0*zxp+zb0 -1085 # zd1 is the distance MP -1086 zd1 = (zxm-zxp) * (zxm-zxp) + (zym-zyp) * (zym-zyp) -1087 # -1088 # M is the candidate point: -1089 zxm = real(ylptmp2) -1090 zym = imag(ylptmp2) -1091 za1 = -1./za0 -1092 zb1 = zym-za1*zxm -1093 # P is the projection of M in the strait line -1094 zxp = -(zb1-zb0)/(za1-za0) -1095 zyp = za0*zxp+zb0 -1096 # zd1 is the distance MP -1097 zd2 = (zxm-zxp) * (zxm-zxp) + (zym-zyp) * (zym-zyp) -1098 # -1099 # choose the smallest (zd1,zd2) -1100 if (zd2 <= zd1): -1101 pneari = ylptmp2 -1102 else: -1103 pneari = ylptmp1 -1104 # -1105 else: -1106 # x(y) -1107 ylptmp1 = pnear[pk-1] + (pai/abs(pai)) -1108 ylptmp2 = pnear[pk-1] + 1*1j -1109 # M is the candidate point: -1110 zxm = real(ylptmp1) -1111 zym = imag(ylptmp1) -1112 za0 = pai -1113 zb0 = pbi -1114 # -1115 za1 = -1./za0 -1116 zb1 = zxm-za1*zym -1117 # P is the projection of M in the strait line -1118 zyp = -(zb1-zb0)/(za1-za0) -1119 zxp = za0*zyp+zb0 -1120 # zd1 is the distance MP -1121 zd1 = (zxm-zxp) * (zxm-zxp) + (zym-zyp) * (zym-zyp) -1122 # -1123 # M is the candidate point: -1124 zxm = real(ylptmp2) -1125 zym = imag(ylptmp2) -1126 za1 = -1./za0 -1127 zb1 = zxm-za1*zym -1128 # P is the projection of M in the strait line -1129 zyp = -(zb1-zb0)/(za1-za0) -1130 zxp = za0*zyp+zb0 -1131 # zd2 is the distance MP -1132 zd2 = (zxm-zxp) * (zxm-zxp) + (zym-zyp) * (zym-zyp) -1133 # -1134 # choose the smallest (zd1,zd2) -1135 if (zd2 <= zd1): -1136 pneari = ylptmp2 -1137 else: -1138 pneari = ylptmp1 -1139 -1140 return pneari -
1141 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.utility-module.html b/pyroms/docs/pyroms.utility-module.html deleted file mode 100644 index 2dd760e..0000000 --- a/pyroms/docs/pyroms.utility-module.html +++ /dev/null @@ -1,493 +0,0 @@ - - - - - pyroms.utility - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Module utility - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module utility

source code

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
get_lonlat(iindex, - jindex, - grd, - Cpos='rho')
- lon, lat = get_lonlat(iindex, jindex, grd)
- source code - -
- -
-   - - - - - - -
get_ij(longitude, - latitude, - grd, - Cpos='rho')
- i, j = get_ij(longitude, latitude, grd)
- source code - -
- -
-   - - - - - - -
find_nearestgridpoints(longitude, - latitude, - grd, - Cpos='rho') - source code - -
- -
-   - - - - - - -
get_grid_proj(grd, - type='merc', - resolution='h')
- map = get_grid_proj(grd)
- source code - -
- -
-   - - - - - - -
get_nc_var(varname, - filename)
- var = roms_nc_var(varname, filename)
- source code - -
- -
-   - - - - - - -
roms_varlist(option)
- varlist = roms_varlist(option)
- source code - -
- -
-   - - - - - - -
get_bottom(varz, - mask, - spval=1e+37) - source code - -
- -
-   - - - - - - -
get_surface(varz, - mask, - spval=1e+37) - source code - -
- -
-   - - - - - - -
move2grid(varin, - init_grid, - final_grid)
- tempu = move2grid(temp, 'rho', 'u')
- source code - -
- -
-   - - - - - - -
get_date_tag(roms_time, - ref=(2006, 1, 1), - format='%d %b %Y at %H:%M:%S')
- tag = get_date_tag(roms_time)
- source code - -
- -
- - - - - - -
- - - - - -
Function Details[hide private]
-
- -
- -
- - -
-

get_lonlat(iindex, - jindex, - grd, - Cpos='rho') -

-
source code  -
- -

lon, lat = get_lonlat(iindex, jindex, grd)

-

return the longitude (degree east) and latitude (degree north) for - grid point (iindex, jindex)

-
-
-
-
- -
- -
- - -
-

get_ij(longitude, - latitude, - grd, - Cpos='rho') -

-
source code  -
- -

i, j = get_ij(longitude, latitude, grd)

-

return the index of the closest point on the grid from the point - (longitude,latitude) in degree

-
-
-
-
- -
- -
- - -
-

get_grid_proj(grd, - type='merc', - resolution='h') -

-
source code  -
- -

map = get_grid_proj(grd)

-

optional arguments:

-
    -
  • - type set projection type (default is merc) -
  • -
  • - resolution set resolution parameter (default is high) -
  • -
-

return a Basemap object that can be use for plotting

-
-
-
-
- -
- -
- - -
-

get_nc_var(varname, - filename) -

-
source code  -
- -

var = roms_nc_var(varname, filename)

-

a simple wraper for netCDF4

-
-
-
-
- -
- -
- - -
-

roms_varlist(option) -

-
source code  -
- -

varlist = roms_varlist(option)

-

Return ROMS varlist.

-
-
-
-
- -
- -
- - -
-

move2grid(varin, - init_grid, - final_grid) -

-
source code  -
- -

tempu = move2grid(temp, 'rho', 'u')

-

Move var from init_grid to final_grid.

-
-
-
-
- -
- -
- - -
-

get_date_tag(roms_time, - ref=(2006, 1, 1), - format='%d %b %Y at %H:%M:%S') -

-
source code  -
- -

tag = get_date_tag(roms_time)

-

return date tag for roms_time (in second since initialisation). - default reference time is January 1st 2006.

-
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.utility-pysrc.html b/pyroms/docs/pyroms.utility-pysrc.html deleted file mode 100644 index 3f640cd..0000000 --- a/pyroms/docs/pyroms.utility-pysrc.html +++ /dev/null @@ -1,476 +0,0 @@ - - - - - pyroms.utility - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Module utility - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms.utility

-
-  1  #encoding: utf-8 
-  2   
-  3  import sys 
-  4  import numpy as np 
-  5  from mpl_toolkits.basemap import Basemap 
-  6  import time 
-  7  from datetime import datetime 
-  8  from matplotlib.nxutils import pnpoly 
-  9   
- 10  from pyroms import io 
- 11  import _interp 
- 12   
- 13   
-
14 -def get_lonlat(iindex, jindex, grd, Cpos='rho'): -
15 """ - 16 lon, lat = get_lonlat(iindex, jindex, grd) - 17 - 18 return the longitude (degree east) and latitude (degree north) - 19 for grid point (iindex, jindex) - 20 """ - 21 - 22 if Cpos is 'u': - 23 lon = grd.hgrid.lon_u[:,:] - 24 lat = grd.hgrid.lat_u[:,:] - 25 elif Cpos is 'v': - 26 lon = grd.hgrid.lon_v[:,:] - 27 lat = grd.hgrid.lat_v[:,:] - 28 elif Cpos is 'rho': - 29 lon = grd.hgrid.lon_rho[:,:] - 30 lat = grd.hgrid.lat_rho[:,:] - 31 else: - 32 raise Warning, '%s bad position. Cpos must be rho, u or v.' % Cpos - 33 - 34 return lon[jindex, iindex], lat[jindex, iindex] -
35 - 36 -
37 -def get_ij(longitude, latitude, grd, Cpos='rho'): -
38 """ - 39 i, j = get_ij(longitude, latitude, grd) - 40 - 41 return the index of the closest point on the grid from the - 42 point (longitude,latitude) in degree - 43 """ - 44 - 45 if Cpos is 'u': - 46 lon = grd.hgrid.lon_u[:,:] - 47 lat = grd.hgrid.lat_u[:,:] - 48 elif Cpos is 'v': - 49 lon = grd.hgrid.lon_v[:,:] - 50 lat = grd.hgrid.lat_v[:,:] - 51 elif Cpos is 'rho': - 52 lon = grd.hgrid.lon_rho[:,:] - 53 lat = grd.hgrid.lat_rho[:,:] - 54 else: - 55 raise Warning, '%s bad position. Cpos must be rho, u or v.' % Cpos - 56 - 57 lon = lon[:,:] - longitude - 58 lat = lat[:,:] - latitude - 59 - 60 diff = (lon * lon) + (lat * lat) - 61 - 62 jindex, iindex = np.where(diff==diff.min()) - 63 - 64 return iindex[0], jindex[0] -
65 - 66 - 67 -
68 -def find_nearestgridpoints(longitude, latitude, grd, Cpos='rho'): -
69 - 70 if Cpos is 'u': - 71 lon = grd.hgrid.lon_u[:,:] - 72 lat = grd.hgrid.lat_u[:,:] - 73 elif Cpos is 'v': - 74 lon = grd.hgrid.lon_v[:,:] - 75 lat = grd.hgrid.lat_v[:,:] - 76 elif Cpos is 'rho': - 77 lon = grd.hgrid.lon_rho[:,:] - 78 lat = grd.hgrid.lat_rho[:,:] - 79 else: - 80 raise Warning, '%s bad position. Cpos must be rho, u or v.' % Cpos - 81 - 82 dlon = lon[:,:] - longitude - 83 dlat = lat[:,:] - latitude - 84 - 85 diff = (dlon * dlon) + (dlat * dlat) - 86 - 87 jidx, iidx = np.where(diff==diff.min()) - 88 - 89 iidx = iidx[0] # take element 1 in case min dist is not unique - 90 jidx = jidx[0] - 91 - 92 try: - 93 - 94 iindex = [iidx, iidx+1, iidx+1, iidx] - 95 jindex = [jidx, jidx, jidx+1, jidx+1] - 96 xp = lon[jindex, iindex] - 97 yp = lat[jindex, iindex] - 98 verts = [] - 99 for n in range(4): -100 verts.append([xp[n], yp[n]]) -101 inside = pnpoly(longitude, latitude, verts) -102 -103 if inside == 0: -104 iindex = [iidx, iidx+1, iidx+1, iidx] -105 jindex = [jidx-1, jidx-1, jidx, jidx] -106 xp = lon[jindex, iindex] -107 yp = lat[jindex, iindex] -108 verts = [] -109 for n in range(4): -110 verts.append([xp[n], yp[n]]) -111 inside = pnpoly(longitude, latitude, verts) -112 -113 if inside == 0: -114 iindex = [iidx-1, iidx, iidx, iidx-1] -115 jindex = [jidx-1, jidx-1, jidx, jidx] -116 xp = lon[jindex, iindex] -117 yp = lat[jindex, iindex] -118 verts = [] -119 for n in range(4): -120 verts.append([xp[n], yp[n]]) -121 inside = pnpoly(longitude, latitude, verts) -122 -123 if inside == 0: -124 iindex = [iidx-1, iidx, iidx, iidx-1] -125 jindex = [jidx, jidx, jidx+1, jidx+1] -126 xp = lon[jindex, iindex] -127 yp = lat[jindex, iindex] -128 verts = [] -129 for n in range(4): -130 verts.append([xp[n], yp[n]]) -131 inside = pnpoly(longitude, latitude, verts) -132 -133 if inside == 0: -134 raise ValueError, 'well where is it then?' -135 -136 iindex = iindex[:2] -137 jindex = jindex[1:3] -138 -139 except: -140 print 'point (%f, %f) is not in the grid' %(longitude, latitude) -141 iindex = [] -142 jindex = [] -143 -144 return iindex, jindex -
145 -146 -
147 -def get_grid_proj(grd, type='merc', resolution='h'): -
148 """ -149 map = get_grid_proj(grd) -150 -151 optional arguments: -152 - type set projection type (default is merc) -153 - resolution set resolution parameter (default is high) -154 -155 return a Basemap object that can be use for plotting -156 """ -157 -158 lon_min = grd.hgrid.lon_vert.min() -159 lon_max = grd.hgrid.lon_vert.max() -160 lon_0 = (lon_min + lon_max) / 2. -161 -162 lat_min = grd.hgrid.lat_vert.min() -163 lat_max = grd.hgrid.lat_vert.max() -164 lat_0 = (lat_min + lat_max) / 2. -165 -166 map = Basemap(projection=type, llcrnrlon=lon_min, llcrnrlat=lat_min, \ -167 urcrnrlon=lon_max, urcrnrlat=lat_max, lat_0=lat_0, lon_0=lon_0, \ -168 resolution=resolution) -169 -170 return map -
171 -172 -
173 -def get_nc_var(varname, filename): -
174 """ -175 var = roms_nc_var(varname, filename) -176 -177 a simple wraper for netCDF4 -178 """ -179 -180 data = io.MFDataset(filename) -181 var = data.variables[varname] -182 -183 return var -
184 -185 -
186 -def roms_varlist(option): -
187 """ -188 varlist = roms_varlist(option) -189 -190 Return ROMS varlist. -191 """ -192 -193 if option == 'physics': -194 varlist = (['temp','salt','u','v','ubar','vbar','zeta']) -195 elif option == 'physics2d': -196 varlist = (['ubar','vbar','zeta']) -197 elif option == 'physics3d': -198 varlist = (['temp','salt','u','v']) -199 elif option == 'mixing3d': -200 varlist = (['AKv','AKt','AKs']) -201 elif option == 's-param': -202 varlist = (['theta_s','theta_b','Tcline','hc']) -203 elif option == 's-coord': -204 varlist = (['s_rho','s_w','Cs_r','Cs_w']) -205 elif option == 'coord': -206 varlist = (['lon_rho','lat_rho','lon_u','lat_u','lon_v','lat_v']) -207 elif option == 'grid': -208 varlist = (['h','f','pm','pn','angle','lon_rho','lat_rho', \ -209 'lon_u','lat_u','lon_v','lat_v','lon_psi','lat_psi', \ -210 'mask_rho','mask_u','mask_v','mask_psi']) -211 elif option == 'hgrid': -212 varlist = (['f','dx','dy','angle_rho','lon_rho','lat_rho', \ -213 'lon_u','lat_u','lon_v','lat_v','lon_psi','lat_psi', \ -214 'mask_rho','mask_u','mask_v','mask_psi']) -215 elif option == 'vgrid': -216 varlist = (['h','s_rho','s_w','Cs_r','Cs_w', \ -217 'theta_s','theta_b','Tcline','hc']) -218 else: -219 raise Warning, 'Unknow varlist id' -220 -221 return varlist -
222 -223 -
224 -def get_bottom(varz, mask, spval=1e37): -
225 -226 assert len(varz.shape) == 3, 'var must be 3D' -227 -228 N = varz.shape[0] -229 jj = varz.shape[1] -230 ii = varz.shape[2] -231 -232 bottom = spval * np.ones((jj, ii)) -233 -234 bottom[:,:] = _interp.get_bottom(varz,mask,spval) -235 -236 return bottom -
237 -238 -
239 -def get_surface(varz, mask, spval=1e37): -
240 -241 assert len(varz.shape) == 3, 'var must be 3D' -242 -243 N = varz.shape[0] -244 jj = varz.shape[1] -245 ii = varz.shape[2] -246 -247 surface = spval * np.ones((jj, ii)) -248 -249 surface[:,:] = _interp.get_surface(varz,mask,spval) -250 -251 return surface -
252 -253 -
254 -def move2grid(varin, init_grid, final_grid): -
255 ''' -256 tempu = move2grid(temp, 'rho', 'u') -257 -258 Move var from init_grid to final_grid. -259 ''' -260 -261 ndim = len(varin.shape) -262 -263 if ndim == 2: -264 -265 if (init_grid == 'rho' and final_grid == 'u'): -266 varout = 0.5 * (varin[:,1:] + varin[:,:-1]) -267 elif (init_grid == 'rho' and final_grid == 'v'): -268 varout = 0.5 * (varin[1:,:] + varin[:-1,:]) -269 elif (init_grid == 'rho' and final_grid == 'psi'): -270 varout = 0.25 * (varin[1:,1:] + varin[:-1,:-1] + \ -271 varin[1:,:-1] + varin[:-1,1:]) -272 elif (init_grid == 'u' and final_grid == 'psi'): -273 varout = 0.5 * (varin[1:,:] + varin[:-1,:]) -274 elif (init_grid == 'v' and final_grid == 'psi'): -275 varout = 0.5 * (varin[:,1:] + varin[:,:-1]) -276 else: -277 raise ValueError, 'Undefined combination for init_grid and final_grid' -278 -279 elif ndim == 3: -280 -281 if (init_grid == 'rho' and final_grid == 'u'): -282 varout = 0.5 * (varin[:,:,1:] + varin[:,:,:-1]) -283 elif (init_grid == 'rho' and final_grid == 'v'): -284 varout = 0.5 * (varin[:,1:,:] + varin[:,:-1,:]) -285 elif (init_grid == 'rho' and final_grid == 'psi'): -286 varout = 0.25 * (varin[:,1:,1:] + varin[:,:-1,:-1] + \ -287 varin[:,1:,:-1] + varin[:,:-1,1:]) -288 elif (init_grid == 'u' and final_grid == 'psi'): -289 varout = 0.5 * (varin[:,1:,:] + varin[:,:-1,:]) -290 elif (init_grid == 'v' and final_grid == 'psi'): -291 varout = 0.5 * (varin[:,:,1:] + varin[:,:,:-1]) -292 else: -293 raise ValueError, 'Undefined combination for init_grid and final_grid' -294 -295 else: -296 raise ValueError, 'varin must be 2D or 3D' -297 -298 return varout -
299 -300 -
301 -def get_date_tag(roms_time, ref=(2006, 01, 01), format="%d %b %Y at %H:%M:%S"): -
302 ''' -303 tag = get_date_tag(roms_time) -304 -305 return date tag for roms_time (in second since initialisation). -306 default reference time is January 1st 2006. -307 ''' -308 -309 ref = time.mktime(datetime(ref[0], ref[1], ref[2]).timetuple()) -310 timestamp = ref + roms_time -311 tag = datetime.fromtimestamp(timestamp).strftime(format) -312 -313 return tag -
314 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.vgrid-module.html b/pyroms/docs/pyroms.vgrid-module.html deleted file mode 100644 index d6162c1..0000000 --- a/pyroms/docs/pyroms.vgrid-module.html +++ /dev/null @@ -1,171 +0,0 @@ - - - - - pyroms.vgrid - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Module vgrid - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module vgrid

source code

-
-
-Various vertical coordinates
-
-Presently, only ocean s-coordinates are supported. Future plans will be to
-include all of the vertical coordinate systems defined by the CF conventions.
-
-
- - - - - - - - - - - - - - - - - - - - - - -
- - - - - -
Classes[hide private]
-
-   - - s_coordinate
- Original vertical coordinate transformation (Vtransform=1) -
-   - - s_coordinate_2
- New vertical coordinate transformation (Vtransform=2) -
-   - - z_r
- return an object that can be indexed to return depths of rho point -
-   - - z_w
- return an object that can be indexed to return depths of w point -
-   - - z_coordinate
- return an object that can be indexed to return depths -
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.vgrid-pysrc.html b/pyroms/docs/pyroms.vgrid-pysrc.html deleted file mode 100644 index 61fbf31..0000000 --- a/pyroms/docs/pyroms.vgrid-pysrc.html +++ /dev/null @@ -1,408 +0,0 @@ - - - - - pyroms.vgrid - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Module vgrid - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms.vgrid

-
-  1  # encoding: utf-8 
-  2  ''' 
-  3  Various vertical coordinates 
-  4   
-  5  Presently, only ocean s-coordinates are supported. Future plans will be to 
-  6  include all of the vertical coordinate systems defined by the CF conventions. 
-  7  ''' 
-  8   
-  9  __docformat__ = "restructuredtext en" 
- 10   
- 11  import numpy as np 
- 12  import warnings 
- 13   
- 14   
-
15 -class s_coordinate(object): -
16 """ - 17 Original vertical coordinate transformation (Vtransform=1) - 18 - 19 return an object that can be indexed to return depths - 20 - 21 s = s_coordinate(h, theta_b, theta_s, Tcline, N) - 22 """ - 23 -
24 - def __init__(self, h, theta_b, theta_s, Tcline, N, zeta=None): -
25 self.h = np.asarray(h) - 26 self.hmin = h.min() - 27 self.theta_b = theta_b - 28 self.theta_s = theta_s - 29 self.Tcline = Tcline - 30 self.N = int(N) - 31 self.Np = self.N+1 - 32 - 33 self.hc = min(self.hmin, self.Tcline) - 34 - 35 self.Vtrans = 1 - 36 - 37 #assert self.Tcline <= self.hmin, 'Vertical transformation parameters definition error: \n Tcline = %d and hmin = %d. \n You need to make sure that Tcline <= hmin when using the transformation (1).' %(self.Tcline,self.hmin) - 38 if (self.Tcline > self.hmin): - 39 warnings.warn('Vertical transformation parameters are not defined correctly in either gridid.txt or in the history files: \n Tcline = %d and hmin = %d. \n You need to make sure that Tcline <= hmin when using transformation 1.' %(self.Tcline,self.hmin)) - 40 - 41 - 42 self.c1 = 1.0 - 43 self.c2 = 2.0 - 44 self.p5 = 0.5 - 45 - 46 if zeta is None: - 47 self.zeta = np.zeros(h.shape) - 48 else: - 49 self.zeta = zeta - 50 - 51 self._get_s_rho() - 52 self._get_s_w() - 53 self._get_Cs_r() - 54 self._get_Cs_w() - 55 - 56 self.z_r = z_r(self.h, self.hc, self.N, self.s_rho, self.Cs_r, self.zeta, self.Vtrans) - 57 self.z_w = z_w(self.h, self.hc, self.Np, self.s_w, self.Cs_w, self.zeta, self.Vtrans) -
58 - 59 -
60 - def _get_s_rho(self): -
61 lev = np.arange(1,self.N+1,1) - 62 ds = 1.0 / self.N - 63 self.s_rho = -self.c1 + (lev - self.p5) * ds -
64 -
65 - def _get_s_w(self): -
66 lev = np.arange(0,self.Np,1) - 67 ds = 1.0 / (self.Np-1) - 68 self.s_w = -self.c1 + lev * ds -
69 -
70 - def _get_Cs_r(self): -
71 if (self.theta_s >= 0): - 72 Ptheta = np.sinh(self.theta_s * self.s_rho) / np.sinh(self.theta_s) - 73 Rtheta = np.tanh(self.theta_s * (self.s_rho + self.p5)) / \ - 74 (self.c2 * np.tanh(self.p5 * self.theta_s)) - self.p5 - 75 self.Cs_r = (self.c1 - self.theta_b) * Ptheta + self.theta_b * Rtheta - 76 else: - 77 self.Cs_r = self.s_rho -
78 -
79 - def _get_Cs_w(self): -
80 if (self.theta_s >= 0): - 81 Ptheta = np.sinh(self.theta_s * self.s_w) / np.sinh(self.theta_s) - 82 Rtheta = np.tanh(self.theta_s * (self.s_w + self.p5)) / \ - 83 (self.c2 * np.tanh(self.p5 * self.theta_s)) - self.p5 - 84 self.Cs_w = (self.c1 - self.theta_b) * Ptheta + self.theta_b * Rtheta - 85 else: - 86 self.Cs_w = self.s_w -
87 - 88 - 89 -
90 -class s_coordinate_2(s_coordinate): -
91 """ - 92 New vertical coordinate transformation (Vtransform=2) - 93 - 94 return an object that can be indexed to return depths - 95 - 96 s = s_coordinate_2(h, theta_b, theta_s, Tcline, N) - 97 """ - 98 -
99 - def __init__(self, h, theta_b, theta_s, Tcline, N, zeta=None): -
100 -101 self.Aweight = 1.0 -102 self.Bweight = 1.0 -103 -104 super(s_coordinate_2, self).__init__( h, theta_b, theta_s, Tcline, N, zeta) -105 -106 self.Vtrans = 2 -
107 -
108 - def _get_s_rho(self): -
109 super(s_coordinate_2, self)._get_s_rho() -
110 -
111 - def _get_s_w(self): -
112 super(s_coordinate_2, self)._get_s_w() -
113 -
114 - def _get_Cs_r(self): -
115 if (self.theta_s >= 0): -116 Csur = (self.c1 - np.cosh(self.theta_s * self.s_rho)) / \ -117 (np.cosh(self.theta_s) - self.c1) -118 if (self.theta_b >= 0): -119 Cbot = np.sinh(self.theta_b * (self.s_rho + self.c1)) / \ -120 np.sinh(self.theta_b) - self.c1 -121 Cweight = (self.s_rho + self.c1)**self.Aweight * \ -122 (self.c1 + (self.Aweight / self.Bweight) * \ -123 (self.c1 - (self.s_rho + self.c1)**self.Bweight)) -124 self.Cs_r = Cweight * Csur + (self.c1 - Cweight) * Cbot -125 else: -126 self.Cs_r = Csur -127 else: -128 self.Cs_r = self.s_rho -
129 -
130 - def _get_Cs_w(self): -
131 if (self.theta_s >= 0): -132 Csur = (self.c1 - np.cosh(self.theta_s * self.s_w)) / \ -133 (np.cosh(self.theta_s) - self.c1) -134 if (self.theta_b >= 0): -135 Cbot = np.sinh(self.theta_b * (self.s_w + self.c1)) / \ -136 np.sinh(self.theta_b) - self.c1 -137 Cweight = (self.s_w + self.c1)**self.Aweight * \ -138 (self.c1 + (self.Aweight / self.Bweight) * \ -139 (self.c1 - (self.s_w + self.c1)**self.Bweight)) -140 self.Cs_w = Cweight * Csur + (self.c1 - Cweight) * Cbot -141 else: -142 self.Cs_w = Csur -143 else: -144 self.Cs_w = self.s_w -
145 -146 -147 -
148 -class z_r(object): -
149 """ -150 return an object that can be indexed to return depths of rho point -151 -152 z_r = z_r(h, hc, N, s_rho, Cs_r, zeta, Vtrans) -153 """ -154 -
155 - def __init__(self, h, hc, N, s_rho, Cs_r, zeta, Vtrans): -
156 self.h = h -157 self.hc = hc -158 self.N = N -159 self.s_rho = s_rho -160 self.Cs_r = Cs_r -161 self.zeta = zeta -162 self.Vtrans = Vtrans -
163 -
164 - def __getitem__(self, key): -
165 -166 if isinstance(key, tuple) and len(self.zeta.shape) > len(self.h.shape): -167 zeta = self.zeta[key[0]] -168 res_index = (slice(None),) + key[1:] -169 elif len(self.zeta.shape) > len(self.h.shape): -170 zeta = self.zeta[key] -171 res_index = slice(None) -172 else: -173 zeta = self.zeta -174 res_index = key -175 -176 if self.h.ndim == zeta.ndim: # Assure a time-dimension exists -177 zeta = zeta[np.newaxis, :] -178 -179 ti = zeta.shape[0] -180 z_r = np.empty((ti, self.N) + self.h.shape, 'd') -181 if self.Vtrans == 1: -182 for n in range(ti): -183 for k in range(self.N): -184 z0 = self.hc * self.s_rho[k] + (self.h - self.hc) * self.Cs_r[k] -185 z_r[n,k,:] = z0 + zeta[n,:] * (1.0 + z0 / self.h) -186 elif self.Vtrans == 2: -187 for n in range(ti): -188 for k in range(self.N): -189 z0 = (self.hc * self.s_rho[k] + self.h * self.Cs_r[k]) / \ -190 (self.hc + self.h) -191 z_r[n,k,:] = zeta[n,:] + (zeta[n,:] + self.h * z0) -192 -193 return np.squeeze(z_r[res_index]) -
194 -195 -
196 -class z_w(object): -
197 """ -198 return an object that can be indexed to return depths of w point -199 -200 z_w = z_w(h, hc, Np, s_w, Cs_w, zeta, Vtrans) -201 """ -202 -
203 - def __init__(self, h, hc, Np, s_w, Cs_w, zeta, Vtrans): -
204 self.h = h -205 self.hc = hc -206 self.Np = Np -207 self.s_w = s_w -208 self.Cs_w = Cs_w -209 self.zeta = zeta -210 self.Vtrans = Vtrans -
211 -
212 - def __getitem__(self, key): -
213 -214 if isinstance(key, tuple) and len(self.zeta.shape) > len(self.h.shape): -215 zeta = self.zeta[key[0]] -216 res_index = (slice(None),) + key[1:] -217 elif len(self.zeta.shape) > len(self.h.shape): -218 zeta = self.zeta[key] -219 res_index = slice(None) -220 else: -221 zeta = self.zeta -222 res_index = key -223 -224 if self.h.ndim == zeta.ndim: # Assure a time-dimension exists -225 zeta = zeta[np.newaxis, :] -226 -227 ti = zeta.shape[0] -228 z_w = np.empty((ti, self.Np) + self.h.shape, 'd') -229 if self.Vtrans == 1: -230 for n in range(ti): -231 for k in range(self.Np): -232 z0 = self.hc * self.s_w[k] + (self.h - self.hc) * self.Cs_w[k] -233 z_w[n,k,:] = z0 + zeta[n,:] * (1.0 + z0 / self.h) -234 elif self.Vtrans == 2: -235 for n in range(ti): -236 for k in range(self.Np): -237 z0 = (self.hc * self.s_w[k] + self.h * self.Cs_w[k]) / \ -238 (self.hc + self.h) -239 z_w[n,k,:] = zeta[n,:] + (zeta[n,:] + self.h * z0) -240 -241 return np.squeeze(z_w[res_index]) -
242 -243 -244 -
245 -class z_coordinate(object): -
246 """ -247 return an object that can be indexed to return depths -248 -249 z = z_coordinate(h, depth, N) -250 """ -251 -
252 - def __init__(self, h, depth, N): -
253 self.h = np.asarray(h) -254 self.N = int(N) -255 -256 Mm, Lm = h.shape -257 -258 self.z = np.zeros((N, Mm, Lm)) -259 for k in range(N): -260 self.z[k,:] = depth[k] -
261 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.vgrid.s_coordinate-class.html b/pyroms/docs/pyroms.vgrid.s_coordinate-class.html deleted file mode 100644 index 822bcd7..0000000 --- a/pyroms/docs/pyroms.vgrid.s_coordinate-class.html +++ /dev/null @@ -1,323 +0,0 @@ - - - - - pyroms.vgrid.s_coordinate - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Module vgrid :: - Class s_coordinate - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Class s_coordinate

source code

-
-object --+
-         |
-        s_coordinate
-
- -
Known Subclasses:
-
- -
- -
-
-
-Original vertical coordinate transformation (Vtransform=1)
-
-return an object that can be indexed to return depths
-
-s = s_coordinate(h, theta_b, theta_s, Tcline, N)
-
-
- - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - -
Instance Methods[hide private]
-
-   - - - - - - -
__init__(self, - h, - theta_b, - theta_s, - Tcline, - N, - zeta=None)
- x.__init__(...) initializes x; see x.__class__.__doc__ for signature
- source code - -
- -
-   - - - - - - -
_get_s_rho(self) - source code - -
- -
-   - - - - - - -
_get_s_w(self) - source code - -
- -
-   - - - - - - -
_get_Cs_r(self) - source code - -
- -
-   - - - - - - -
_get_Cs_w(self) - source code - -
- -
-

Inherited from object: - __delattr__, - __getattribute__, - __hash__, - __new__, - __reduce__, - __reduce_ex__, - __repr__, - __setattr__, - __str__ -

-
- - - - - - - - - -
- - - - - -
Properties[hide private]
-
-

Inherited from object: - __class__ -

-
- - - - - - -
- - - - - -
Method Details[hide private]
-
- -
- -
- - -
-

__init__(self, - h, - theta_b, - theta_s, - Tcline, - N, - zeta=None) -
(Constructor) -

-
source code  -
- -

x.__init__(...) initializes x; see x.__class__.__doc__ for - signature

-
-
Overrides: - object.__init__ -
(inherited documentation)
- -
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.vgrid.s_coordinate_2-class.html b/pyroms/docs/pyroms.vgrid.s_coordinate_2-class.html deleted file mode 100644 index 6b95b38..0000000 --- a/pyroms/docs/pyroms.vgrid.s_coordinate_2-class.html +++ /dev/null @@ -1,407 +0,0 @@ - - - - - pyroms.vgrid.s_coordinate_2 - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Module vgrid :: - Class s_coordinate_2 - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Class s_coordinate_2

source code

-
-  object --+    
-           |    
-s_coordinate --+
-               |
-              s_coordinate_2
-
- -
-
-
-New vertical coordinate transformation (Vtransform=2)
-
-return an object that can be indexed to return depths
-
-s = s_coordinate_2(h, theta_b, theta_s, Tcline, N)
-
-
- - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - -
Instance Methods[hide private]
-
-   - - - - - - -
__init__(self, - h, - theta_b, - theta_s, - Tcline, - N, - zeta=None)
- x.__init__(...) initializes x; see x.__class__.__doc__ for signature
- source code - -
- -
-   - - - - - - -
_get_s_rho(self) - source code - -
- -
-   - - - - - - -
_get_s_w(self) - source code - -
- -
-   - - - - - - -
_get_Cs_r(self) - source code - -
- -
-   - - - - - - -
_get_Cs_w(self) - source code - -
- -
-

Inherited from object: - __delattr__, - __getattribute__, - __hash__, - __new__, - __reduce__, - __reduce_ex__, - __repr__, - __setattr__, - __str__ -

-
- - - - - - - - - -
- - - - - -
Properties[hide private]
-
-

Inherited from object: - __class__ -

-
- - - - - - -
- - - - - -
Method Details[hide private]
-
- -
- -
- - -
-

__init__(self, - h, - theta_b, - theta_s, - Tcline, - N, - zeta=None) -
(Constructor) -

-
source code  -
- -

x.__init__(...) initializes x; see x.__class__.__doc__ for - signature

-
-
Overrides: - object.__init__ -
(inherited documentation)
- -
-
-
- -
- -
- - -
-

_get_s_rho(self) -

-
source code  -
- - -
-
Overrides: - s_coordinate._get_s_rho -
-
-
-
- -
- -
- - -
-

_get_s_w(self) -

-
source code  -
- - -
-
Overrides: - s_coordinate._get_s_w -
-
-
-
- -
- -
- - -
-

_get_Cs_r(self) -

-
source code  -
- - -
-
Overrides: - s_coordinate._get_Cs_r -
-
-
-
- -
- -
- - -
-

_get_Cs_w(self) -

-
source code  -
- - -
-
Overrides: - s_coordinate._get_Cs_w -
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.vgrid.z_coordinate-class.html b/pyroms/docs/pyroms.vgrid.z_coordinate-class.html deleted file mode 100644 index 6d1ae31..0000000 --- a/pyroms/docs/pyroms.vgrid.z_coordinate-class.html +++ /dev/null @@ -1,245 +0,0 @@ - - - - - pyroms.vgrid.z_coordinate - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Module vgrid :: - Class z_coordinate - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Class z_coordinate

source code

-
-object --+
-         |
-        z_coordinate
-
- -
-
-
-return an object that can be indexed to return depths
-
-z = z_coordinate(h, depth, N)
-
-
- - - - - - - - - - - - - -
- - - - - -
Instance Methods[hide private]
-
-   - - - - - - -
__init__(self, - h, - depth, - N)
- x.__init__(...) initializes x; see x.__class__.__doc__ for signature
- source code - -
- -
-

Inherited from object: - __delattr__, - __getattribute__, - __hash__, - __new__, - __reduce__, - __reduce_ex__, - __repr__, - __setattr__, - __str__ -

-
- - - - - - - - - -
- - - - - -
Properties[hide private]
-
-

Inherited from object: - __class__ -

-
- - - - - - -
- - - - - -
Method Details[hide private]
-
- -
- -
- - -
-

__init__(self, - h, - depth, - N) -
(Constructor) -

-
source code  -
- -

x.__init__(...) initializes x; see x.__class__.__doc__ for - signature

-
-
Overrides: - object.__init__ -
(inherited documentation)
- -
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.vgrid.z_r-class.html b/pyroms/docs/pyroms.vgrid.z_r-class.html deleted file mode 100644 index 1d9ee6f..0000000 --- a/pyroms/docs/pyroms.vgrid.z_r-class.html +++ /dev/null @@ -1,270 +0,0 @@ - - - - - pyroms.vgrid.z_r - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Module vgrid :: - Class z_r - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Class z_r

source code

-
-object --+
-         |
-        z_r
-
- -
-
-
-return an object that can be indexed to return depths of rho point
-
-z_r = z_r(h, hc, N, s_rho, Cs_r, zeta, Vtrans)
-
-
- - - - - - - - - - - - - - - - -
- - - - - -
Instance Methods[hide private]
-
-   - - - - - - -
__init__(self, - h, - hc, - N, - s_rho, - Cs_r, - zeta, - Vtrans)
- x.__init__(...) initializes x; see x.__class__.__doc__ for signature
- source code - -
- -
-   - - - - - - -
__getitem__(self, - key) - source code - -
- -
-

Inherited from object: - __delattr__, - __getattribute__, - __hash__, - __new__, - __reduce__, - __reduce_ex__, - __repr__, - __setattr__, - __str__ -

-
- - - - - - - - - -
- - - - - -
Properties[hide private]
-
-

Inherited from object: - __class__ -

-
- - - - - - -
- - - - - -
Method Details[hide private]
-
- -
- -
- - -
-

__init__(self, - h, - hc, - N, - s_rho, - Cs_r, - zeta, - Vtrans) -
(Constructor) -

-
source code  -
- -

x.__init__(...) initializes x; see x.__class__.__doc__ for - signature

-
-
Overrides: - object.__init__ -
(inherited documentation)
- -
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/pyroms.vgrid.z_w-class.html b/pyroms/docs/pyroms.vgrid.z_w-class.html deleted file mode 100644 index c2cc04d..0000000 --- a/pyroms/docs/pyroms.vgrid.z_w-class.html +++ /dev/null @@ -1,270 +0,0 @@ - - - - - pyroms.vgrid.z_w - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms :: - Module vgrid :: - Class z_w - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Class z_w

source code

-
-object --+
-         |
-        z_w
-
- -
-
-
-return an object that can be indexed to return depths of w point
-
-z_w = z_w(h, hc, Np, s_w, Cs_w, zeta, Vtrans)
-
-
- - - - - - - - - - - - - - - - -
- - - - - -
Instance Methods[hide private]
-
-   - - - - - - -
__init__(self, - h, - hc, - Np, - s_w, - Cs_w, - zeta, - Vtrans)
- x.__init__(...) initializes x; see x.__class__.__doc__ for signature
- source code - -
- -
-   - - - - - - -
__getitem__(self, - key) - source code - -
- -
-

Inherited from object: - __delattr__, - __getattribute__, - __hash__, - __new__, - __reduce__, - __reduce_ex__, - __repr__, - __setattr__, - __str__ -

-
- - - - - - - - - -
- - - - - -
Properties[hide private]
-
-

Inherited from object: - __class__ -

-
- - - - - - -
- - - - - -
Method Details[hide private]
-
- -
- -
- - -
-

__init__(self, - h, - hc, - Np, - s_w, - Cs_w, - zeta, - Vtrans) -
(Constructor) -

-
source code  -
- -

x.__init__(...) initializes x; see x.__class__.__doc__ for - signature

-
-
Overrides: - object.__init__ -
(inherited documentation)
- -
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms/docs/redirect.html b/pyroms/docs/redirect.html deleted file mode 100644 index 07d64ad..0000000 --- a/pyroms/docs/redirect.html +++ /dev/null @@ -1,38 +0,0 @@ -Epydoc Redirect Page - - - - - - - - -

Epydoc Auto-redirect page

- -

When javascript is enabled, this page will redirect URLs of -the form redirect.html#dotted.name to the -documentation for the object with the given fully-qualified -dotted name.

-

 

- - - - - diff --git a/pyroms/docs/toc-everything.html b/pyroms/docs/toc-everything.html deleted file mode 100644 index 7bdcf0b..0000000 --- a/pyroms/docs/toc-everything.html +++ /dev/null @@ -1,141 +0,0 @@ - - - - - Everything - - - - - -

Everything

-
-

All Classes

- pyroms.cf.time
pyroms.extern.greatcircle.GreatCircle
pyroms.extern.kdtree.KDTree
pyroms.extern.kdtree.KDTree.innernode
pyroms.extern.kdtree.KDTree.leafnode
pyroms.extern.kdtree.KDTree.node
pyroms.extern.kdtree.Rectangle
pyroms.extern.pupynere.NetCDFFile
pyroms.extern.pupynere.NetCDFVariable
pyroms.grid.ROMS_Grid
pyroms.grid.ROMS_gridinfo
pyroms.hgrid.BoundaryInteractor
pyroms.hgrid.CGrid
pyroms.hgrid.CGrid_geo
pyroms.hgrid.Focus
pyroms.hgrid.Gridgen
- - pyroms.hgrid.edit_mask_mesh
pyroms.hgrid.get_position_from_map
pyroms.hgrid_old.BoundaryInteractor
pyroms.hgrid_old.CGrid
pyroms.hgrid_old.CGrid_geo
pyroms.hgrid_old.Focus
pyroms.hgrid_old.Gridgen
- - pyroms.hgrid_old.edit_mask_mesh
pyroms.hgrid_old.get_position_from_map
pyroms.vgrid.s_coordinate
pyroms.vgrid.s_coordinate_2
pyroms.vgrid.z_coordinate
pyroms.vgrid.z_r
pyroms.vgrid.z_w

All Functions

- pyroms.extern.greatcircle.vinc_dist
pyroms.extern.greatcircle.vinc_pt
pyroms.extern.kdtree.distance_matrix
pyroms.extern.kdtree.minkowski_distance
pyroms.extern.kdtree.minkowski_distance_p
- pyroms.grid.get_ROMS_grid
pyroms.grid.get_ROMS_hgrid
pyroms.grid.get_ROMS_vgrid
pyroms.grid.list_ROMS_gridid
pyroms.grid.print_ROMS_gridinfo
pyroms.grid.write_ROMS_grid
- pyroms.hgrid.rho_to_vert
pyroms.hgrid.rho_to_vert_geo
pyroms.hgrid.uvp_masks
- pyroms.hgrid_old.rho_to_vert
pyroms.hgrid_old.rho_to_vert_geo
pyroms.hgrid_old.uvp_masks
pyroms.io.Dataset
pyroms.io.MFDataset
pyroms.remapping.compute_remap_weights'.compute_remap_weights
pyroms.remapping.flood'.flood
pyroms.remapping.make_remap_grid_file'.make_remap_grid_file
pyroms.remapping.remap'.remap
pyroms.remapping.roms2z'.roms2z
pyroms.remapping.test_remap_weights'.test_remap_weights
pyroms.remapping.z2roms'.z2roms
pyroms.tools.interm_pt
pyroms.tools.islice
pyroms.tools.isoslice
pyroms.tools.jslice
pyroms.tools.latslice
pyroms.tools.lonslice
pyroms.tools.section_transport
pyroms.tools.sslice
pyroms.tools.transect
pyroms.tools.zslice
pyroms.utility.find_nearestgridpoints
pyroms.utility.get_bottom
pyroms.utility.get_date_tag
pyroms.utility.get_grid_proj
pyroms.utility.get_ij
pyroms.utility.get_lonlat
pyroms.utility.get_nc_var
pyroms.utility.get_surface
pyroms.utility.move2grid
pyroms.utility.roms_varlist

All Variables

- pyroms.extern.pupynere.ABSENT
pyroms.extern.pupynere.NC_ATTRIBUTE
pyroms.extern.pupynere.NC_BYTE
pyroms.extern.pupynere.NC_CHAR
pyroms.extern.pupynere.NC_DIMENSION
pyroms.extern.pupynere.NC_DOUBLE
pyroms.extern.pupynere.NC_FLOAT
pyroms.extern.pupynere.NC_INT
pyroms.extern.pupynere.NC_SHORT
pyroms.extern.pupynere.NC_VARIABLE
pyroms.extern.pupynere.ZERO
pyroms.grid.gridid_dictionary

-[hide private] - - - - diff --git a/pyroms/docs/toc-pyroms-module.html b/pyroms/docs/toc-pyroms-module.html deleted file mode 100644 index f29427e..0000000 --- a/pyroms/docs/toc-pyroms-module.html +++ /dev/null @@ -1,29 +0,0 @@ - - - - - pyroms - - - - - -

Module pyroms

-
-
-[hide private] - - - - diff --git a/pyroms/docs/toc-pyroms.cf-module.html b/pyroms/docs/toc-pyroms.cf-module.html deleted file mode 100644 index 2515160..0000000 --- a/pyroms/docs/toc-pyroms.cf-module.html +++ /dev/null @@ -1,31 +0,0 @@ - - - - - cf - - - - - -

Module cf

-
-

Classes

- time

-[hide private] - - - - diff --git a/pyroms/docs/toc-pyroms.extern-module.html b/pyroms/docs/toc-pyroms.extern-module.html deleted file mode 100644 index 21ec307..0000000 --- a/pyroms/docs/toc-pyroms.extern-module.html +++ /dev/null @@ -1,29 +0,0 @@ - - - - - extern - - - - - -

Module extern

-
-
-[hide private] - - - - diff --git a/pyroms/docs/toc-pyroms.extern.greatcircle-module.html b/pyroms/docs/toc-pyroms.extern.greatcircle-module.html deleted file mode 100644 index e2ea65e..0000000 --- a/pyroms/docs/toc-pyroms.extern.greatcircle-module.html +++ /dev/null @@ -1,34 +0,0 @@ - - - - - greatcircle - - - - - -

Module greatcircle

-
-

Classes

- GreatCircle

Functions

- vinc_dist
vinc_pt

-[hide private] - - - - diff --git a/pyroms/docs/toc-pyroms.extern.kdtree-module.html b/pyroms/docs/toc-pyroms.extern.kdtree-module.html deleted file mode 100644 index 8bdb620..0000000 --- a/pyroms/docs/toc-pyroms.extern.kdtree-module.html +++ /dev/null @@ -1,36 +0,0 @@ - - - - - kdtree - - - - - -

Module kdtree

-
-

Classes

- KDTree
Rectangle

Functions

- distance_matrix
minkowski_distance
minkowski_distance_p

-[hide private] - - - - diff --git a/pyroms/docs/toc-pyroms.extern.pupynere-module.html b/pyroms/docs/toc-pyroms.extern.pupynere-module.html deleted file mode 100644 index 70a7795..0000000 --- a/pyroms/docs/toc-pyroms.extern.pupynere-module.html +++ /dev/null @@ -1,48 +0,0 @@ - - - - - pupynere - - - - - -

Module pupynere

-
-

Classes

- NetCDFFile
NetCDFVariable

Functions

-
- _test
-

Variables

- ABSENT
NC_ATTRIBUTE
NC_BYTE
NC_CHAR
NC_DIMENSION
NC_DOUBLE
NC_FLOAT
NC_INT
NC_SHORT
NC_VARIABLE
ZERO

-[hide private] - - - - diff --git a/pyroms/docs/toc-pyroms.grid-module.html b/pyroms/docs/toc-pyroms.grid-module.html deleted file mode 100644 index 51ebc2f..0000000 --- a/pyroms/docs/toc-pyroms.grid-module.html +++ /dev/null @@ -1,41 +0,0 @@ - - - - - grid - - - - - -

Module grid

-
-

Classes

- ROMS_Grid
ROMS_gridinfo

Functions

- get_ROMS_grid
get_ROMS_hgrid
get_ROMS_vgrid
list_ROMS_gridid
print_ROMS_gridinfo
write_ROMS_grid

Variables

- gridid_dictionary

-[hide private] - - - - diff --git a/pyroms/docs/toc-pyroms.hgrid-module.html b/pyroms/docs/toc-pyroms.hgrid-module.html deleted file mode 100644 index cafd89e..0000000 --- a/pyroms/docs/toc-pyroms.hgrid-module.html +++ /dev/null @@ -1,50 +0,0 @@ - - - - - hgrid - - - - - -

Module hgrid

-
-

Classes

- BoundaryInteractor
CGrid
CGrid_geo
Focus
Gridgen
- - edit_mask_mesh
get_position_from_map

Functions

- - rho_to_vert
rho_to_vert_geo
uvp_masks

-[hide private] - - - - diff --git a/pyroms/docs/toc-pyroms.hgrid_old-module.html b/pyroms/docs/toc-pyroms.hgrid_old-module.html deleted file mode 100644 index 1735f9e..0000000 --- a/pyroms/docs/toc-pyroms.hgrid_old-module.html +++ /dev/null @@ -1,50 +0,0 @@ - - - - - hgrid_old - - - - - -

Module hgrid_old

-
-

Classes

- BoundaryInteractor
CGrid
CGrid_geo
Focus
Gridgen
- - edit_mask_mesh
get_position_from_map

Functions

- - rho_to_vert
rho_to_vert_geo
uvp_masks

-[hide private] - - - - diff --git a/pyroms/docs/toc-pyroms.io-module.html b/pyroms/docs/toc-pyroms.io-module.html deleted file mode 100644 index d501d91..0000000 --- a/pyroms/docs/toc-pyroms.io-module.html +++ /dev/null @@ -1,32 +0,0 @@ - - - - - io - - - - - -

Module io

-
-

Functions

- Dataset
MFDataset

-[hide private] - - - - diff --git a/pyroms/docs/toc-pyroms.remapping-module.html b/pyroms/docs/toc-pyroms.remapping-module.html deleted file mode 100644 index 852c7a9..0000000 --- a/pyroms/docs/toc-pyroms.remapping-module.html +++ /dev/null @@ -1,29 +0,0 @@ - - - - - remapping - - - - - -

Module remapping

-
-
-[hide private] - - - - diff --git a/pyroms/docs/toc-pyroms.remapping.compute_remap_weights'-module.html b/pyroms/docs/toc-pyroms.remapping.compute_remap_weights'-module.html deleted file mode 100644 index fc2c3fc..0000000 --- a/pyroms/docs/toc-pyroms.remapping.compute_remap_weights'-module.html +++ /dev/null @@ -1,31 +0,0 @@ - - - - - compute_remap_weights' - - - - - -

Module compute_remap_weights'

-
-

Functions

- compute_remap_weights

-[hide private] - - - - diff --git a/pyroms/docs/toc-pyroms.remapping.compute_remap_weights-module.html b/pyroms/docs/toc-pyroms.remapping.compute_remap_weights-module.html deleted file mode 100644 index 9b9d24a..0000000 --- a/pyroms/docs/toc-pyroms.remapping.compute_remap_weights-module.html +++ /dev/null @@ -1,31 +0,0 @@ - - - - - compute_remap_weights - - - - - -

Module compute_remap_weights

-
-

Functions

- compute_remap_weights

-[hide private] - - - - diff --git a/pyroms/docs/toc-pyroms.remapping.flood'-module.html b/pyroms/docs/toc-pyroms.remapping.flood'-module.html deleted file mode 100644 index fd9a91c..0000000 --- a/pyroms/docs/toc-pyroms.remapping.flood'-module.html +++ /dev/null @@ -1,31 +0,0 @@ - - - - - flood' - - - - - -

Module flood'

-
-

Functions

- flood

-[hide private] - - - - diff --git a/pyroms/docs/toc-pyroms.remapping.flood-module.html b/pyroms/docs/toc-pyroms.remapping.flood-module.html deleted file mode 100644 index 5acb3c6..0000000 --- a/pyroms/docs/toc-pyroms.remapping.flood-module.html +++ /dev/null @@ -1,31 +0,0 @@ - - - - - flood - - - - - -

Module flood

-
-

Functions

- flood

-[hide private] - - - - diff --git a/pyroms/docs/toc-pyroms.remapping.make_remap_grid_file'-module.html b/pyroms/docs/toc-pyroms.remapping.make_remap_grid_file'-module.html deleted file mode 100644 index f4371ad..0000000 --- a/pyroms/docs/toc-pyroms.remapping.make_remap_grid_file'-module.html +++ /dev/null @@ -1,31 +0,0 @@ - - - - - make_remap_grid_file' - - - - - -

Module make_remap_grid_file'

-
-

Functions

- make_remap_grid_file

-[hide private] - - - - diff --git a/pyroms/docs/toc-pyroms.remapping.make_remap_grid_file-module.html b/pyroms/docs/toc-pyroms.remapping.make_remap_grid_file-module.html deleted file mode 100644 index dd1dbdd..0000000 --- a/pyroms/docs/toc-pyroms.remapping.make_remap_grid_file-module.html +++ /dev/null @@ -1,31 +0,0 @@ - - - - - make_remap_grid_file - - - - - -

Module make_remap_grid_file

-
-

Functions

- make_remap_grid_file

-[hide private] - - - - diff --git a/pyroms/docs/toc-pyroms.remapping.remap'-module.html b/pyroms/docs/toc-pyroms.remapping.remap'-module.html deleted file mode 100644 index 91c39c8..0000000 --- a/pyroms/docs/toc-pyroms.remapping.remap'-module.html +++ /dev/null @@ -1,31 +0,0 @@ - - - - - remap' - - - - - -

Module remap'

-
-

Functions

- remap

-[hide private] - - - - diff --git a/pyroms/docs/toc-pyroms.remapping.remap-module.html b/pyroms/docs/toc-pyroms.remapping.remap-module.html deleted file mode 100644 index a148963..0000000 --- a/pyroms/docs/toc-pyroms.remapping.remap-module.html +++ /dev/null @@ -1,31 +0,0 @@ - - - - - remap - - - - - -

Module remap

-
-

Functions

- remap

-[hide private] - - - - diff --git a/pyroms/docs/toc-pyroms.remapping.roms2z'-module.html b/pyroms/docs/toc-pyroms.remapping.roms2z'-module.html deleted file mode 100644 index cafed66..0000000 --- a/pyroms/docs/toc-pyroms.remapping.roms2z'-module.html +++ /dev/null @@ -1,31 +0,0 @@ - - - - - roms2z' - - - - - -

Module roms2z'

-
-

Functions

- roms2z

-[hide private] - - - - diff --git a/pyroms/docs/toc-pyroms.remapping.roms2z-module.html b/pyroms/docs/toc-pyroms.remapping.roms2z-module.html deleted file mode 100644 index e23f39a..0000000 --- a/pyroms/docs/toc-pyroms.remapping.roms2z-module.html +++ /dev/null @@ -1,31 +0,0 @@ - - - - - roms2z - - - - - -

Module roms2z

-
-

Functions

- roms2z

-[hide private] - - - - diff --git a/pyroms/docs/toc-pyroms.remapping.test_remap_weights'-module.html b/pyroms/docs/toc-pyroms.remapping.test_remap_weights'-module.html deleted file mode 100644 index 9f20263..0000000 --- a/pyroms/docs/toc-pyroms.remapping.test_remap_weights'-module.html +++ /dev/null @@ -1,31 +0,0 @@ - - - - - test_remap_weights' - - - - - -

Module test_remap_weights'

-
-

Functions

- test_remap_weights

-[hide private] - - - - diff --git a/pyroms/docs/toc-pyroms.remapping.test_remap_weights-module.html b/pyroms/docs/toc-pyroms.remapping.test_remap_weights-module.html deleted file mode 100644 index 90d2828..0000000 --- a/pyroms/docs/toc-pyroms.remapping.test_remap_weights-module.html +++ /dev/null @@ -1,31 +0,0 @@ - - - - - test_remap_weights - - - - - -

Module test_remap_weights

-
-

Functions

- test_remap_weights

-[hide private] - - - - diff --git a/pyroms/docs/toc-pyroms.remapping.z2roms'-module.html b/pyroms/docs/toc-pyroms.remapping.z2roms'-module.html deleted file mode 100644 index b6613ec..0000000 --- a/pyroms/docs/toc-pyroms.remapping.z2roms'-module.html +++ /dev/null @@ -1,31 +0,0 @@ - - - - - z2roms' - - - - - -

Module z2roms'

-
-

Functions

- z2roms

-[hide private] - - - - diff --git a/pyroms/docs/toc-pyroms.remapping.z2roms-module.html b/pyroms/docs/toc-pyroms.remapping.z2roms-module.html deleted file mode 100644 index 064c690..0000000 --- a/pyroms/docs/toc-pyroms.remapping.z2roms-module.html +++ /dev/null @@ -1,31 +0,0 @@ - - - - - z2roms - - - - - -

Module z2roms

-
-

Functions

- z2roms

-[hide private] - - - - diff --git a/pyroms/docs/toc-pyroms.tools-module.html b/pyroms/docs/toc-pyroms.tools-module.html deleted file mode 100644 index bfdbef4..0000000 --- a/pyroms/docs/toc-pyroms.tools-module.html +++ /dev/null @@ -1,40 +0,0 @@ - - - - - tools - - - - - -

Module tools

-
-

Functions

- interm_pt
islice
isoslice
jslice
latslice
lonslice
section_transport
sslice
transect
zslice

-[hide private] - - - - diff --git a/pyroms/docs/toc-pyroms.utility-module.html b/pyroms/docs/toc-pyroms.utility-module.html deleted file mode 100644 index 4964fd8..0000000 --- a/pyroms/docs/toc-pyroms.utility-module.html +++ /dev/null @@ -1,40 +0,0 @@ - - - - - utility - - - - - -

Module utility

-
-

Functions

- find_nearestgridpoints
get_bottom
get_date_tag
get_grid_proj
get_ij
get_lonlat
get_nc_var
get_surface
move2grid
roms_varlist

-[hide private] - - - - diff --git a/pyroms/docs/toc-pyroms.vgrid-module.html b/pyroms/docs/toc-pyroms.vgrid-module.html deleted file mode 100644 index ce00f12..0000000 --- a/pyroms/docs/toc-pyroms.vgrid-module.html +++ /dev/null @@ -1,35 +0,0 @@ - - - - - vgrid - - - - - -

Module vgrid

-
-

Classes

- s_coordinate
s_coordinate_2
z_coordinate
z_r
z_w

-[hide private] - - - - diff --git a/pyroms/docs/toc.html b/pyroms/docs/toc.html deleted file mode 100644 index acd5924..0000000 --- a/pyroms/docs/toc.html +++ /dev/null @@ -1,53 +0,0 @@ - - - - - Table of Contents - - - - - -

Table of Contents

-
- Everything -
-

Modules

- pyroms
pyroms.cf
pyroms.extern
pyroms.extern.greatcircle
pyroms.extern.kdtree
pyroms.extern.pupynere
pyroms.grid
pyroms.hgrid
pyroms.hgrid_old
pyroms.io
pyroms.remapping
pyroms.remapping.compute_remap_weights'
pyroms.remapping.flood'
pyroms.remapping.make_remap_grid_file'
pyroms.remapping.remap'
pyroms.remapping.roms2z'
pyroms.remapping.test_remap_weights'
pyroms.remapping.z2roms'
pyroms.tools
pyroms.utility
pyroms.vgrid

- [hide private] - - - - diff --git a/pyroms/external/csa/makefile b/pyroms/external/csa/makefile index 72674db..cb29d86 100644 --- a/pyroms/external/csa/makefile +++ b/pyroms/external/csa/makefile @@ -6,11 +6,11 @@ SHELL = /bin/sh #### Start of system configuration section. #### -prefix = /u1/uaf/kshedstrom/python +prefix = /home/kshedstrom/python exec_prefix = ${prefix} bindir = ${exec_prefix}/bin -libdir = /u1/uaf/kshedstrom/python/lib +libdir = /home/kshedstrom/python/lib includedir = ${prefix}/include INSTALLDIRS =\ @@ -27,8 +27,8 @@ INSTALL_DATA = ${INSTALL} -m 644 #### End of system configuration section. #### CC = gcc -CFLAGS = -I/u1/uaf/kshedstrom/python/include -LDFLAGS = -L$DESTDIR/lib +CFLAGS = -g -O2 -Wall -pedantic +LDFLAGS = AR = ar ARFLAGS = cru diff --git a/pyroms/external/gridgen/makefile b/pyroms/external/gridgen/makefile index ae57fce..95963f5 100644 --- a/pyroms/external/gridgen/makefile +++ b/pyroms/external/gridgen/makefile @@ -6,11 +6,11 @@ SHELL = /bin/sh #### Start of system configuration section. #### -prefix = /u1/uaf/kshedstrom/python +prefix = /home/kshedstrom/python exec_prefix = ${prefix} bindir = ${exec_prefix}/bin -libdir = /u1/uaf/kshedstrom/python/lib +libdir = /home/kshedstrom/python/lib includedir = ${prefix}/include INSTALLDIRS =\ @@ -24,18 +24,18 @@ INSTALL = /usr/bin/install -c INSTALL_PROGRAM = ${INSTALL} INSTALL_DATA = ${INSTALL} -m 644 -NOCOMPLEX = -HAVE_GRIDNODES_H = +NOCOMPLEX = +HAVE_GRIDNODES_H = CC = gcc -CFLAGS = -I/u1/uaf/kshedstrom/python/include -CFLAGS_TRIANGLE = -I/u1/uaf/kshedstrom/python/include -w -ffloat-store -LDFLAGS = -L$DESTDIR/lib +CFLAGS = -I/home/kshedstrom/python/include +CFLAGS_TRIANGLE = -I/home/kshedstrom/python/include -w -ffloat-store +LDFLAGS = -L/home/kshedstrom/python/lib AR = ar ARFLAGS = cru -LIBS = -lm +LIBS = -lgu -lm MLIB = -lm diff --git a/pyroms/external/gridutils/makefile b/pyroms/external/gridutils/makefile index 053ff82..c91fb29 100644 --- a/pyroms/external/gridutils/makefile +++ b/pyroms/external/gridutils/makefile @@ -7,11 +7,11 @@ SHELL = /bin/sh #### Start of system configuration section. #### -prefix = /u1/uaf/kshedstrom/python +prefix = /home/kshedstrom/python exec_prefix = ${prefix} bindir = ${exec_prefix}/bin -libdir = /u1/uaf/kshedstrom/python/lib +libdir = /home/kshedstrom/python/lib includedir = ${prefix}/include INSTALLDIRS =\ @@ -28,9 +28,9 @@ INSTALL_DATA = ${INSTALL} -m 644 #### End of system configuration section. #### CC = gcc -CFLAGS = -I/u1/uaf/kshedstrom/python/include -LDFLAGS = -L$DESTDIR/lib -CPPFLAGS = -I/u1/uaf/kshedstrom/python/include +CFLAGS = -I/home/kshedstrom/python/include +LDFLAGS = -L/home/kshedstrom/python/lib +CPPFLAGS = -I/home/kshedstrom/python/include AR = ar ARFLAGS = cru diff --git a/pyroms/external/nn/makefile b/pyroms/external/nn/makefile index 008c6b5..e21ba3a 100644 --- a/pyroms/external/nn/makefile +++ b/pyroms/external/nn/makefile @@ -1,9 +1,9 @@ SHELL = /bin/sh -prefix = /u1/uaf/kshedstrom/python +prefix = /home/kshedstrom/python exec_prefix = ${prefix} bindir = ${exec_prefix}/bin -libdir = /u1/uaf/kshedstrom/python/lib +libdir = /home/kshedstrom/python/lib includedir = ${prefix}/include INSTALLDIRS =\ @@ -18,10 +18,10 @@ INSTALL_PROGRAM = ${INSTALL} INSTALL_DATA = ${INSTALL} -m 644 CC = gcc -CFLAGS = -I/u1/uaf/kshedstrom/python/include -CFLAGS_TRIANGLE = -I/u1/uaf/kshedstrom/python/include -w -ffloat-store +CFLAGS = -g -O2 -Wall -pedantic +CFLAGS_TRIANGLE = -g -O2 -Wall -pedantic -w -ffloat-store CFLAGS_VULNERABLE = -ffloat-store -LDFLAGS = -L$DESTDIR/lib +LDFLAGS = AR = ar ARFLAGS = cru diff --git a/pyroms/external/scrip/source/makefile b/pyroms/external/scrip/source/makefile index 4a2f85c..3336cc1 100644 --- a/pyroms/external/scrip/source/makefile +++ b/pyroms/external/scrip/source/makefile @@ -9,7 +9,7 @@ # SVN $URL$ #=============================================================================== -NC_CONFIG = nc-config +NC_CONFIG = nf-config LIBDIR = $(shell $(NC_CONFIG) --prefix)/lib INCDIR = $(shell $(NC_CONFIG) --prefix)/include SRCDIR = . @@ -125,7 +125,7 @@ remap_read.o: $(SRCDIR)/remap_read.f kinds_mod.o constants.o netcdf.o \ remap_vars.o grids.o $(COMPILE) $(FLAGS) $(INCLUDE) -c $(SRCDIR)/remap_read.f -remap.o: $(SRCDIR)/remap.f kinds_mod.o constants.o +remap.o: $(SRCDIR)/remap.f kinds_mod.o constants.o $(COMPILE) $(FLAGS) -c $(SRCDIR)/remap.f scrip.o: $(SRCDIR)/scrip.f kinds_mod.o constants.o iounits.o timers.o \ @@ -152,7 +152,7 @@ $(SRCDIR)/scrip.so: $(SRCDIR)/pyscrip.f90 $(OBJF2PY) install: /bin/cp $(SRCDIR)/scrip $(PREFIX)/bin /bin/cp $(SRCDIR)/scrip_test $(PREFIX)/bin - /bin/cp $(SRCDIR)/scrip.so $(PREFIX)/lib + /bin/cp $(SRCDIR)/scrip*.so $(PREFIX)/lib -clean: +clean: /bin/rm *.o *.mod *.so scrip scrip_test diff --git a/pyroms/install_pyroms.sh b/pyroms/install_pyroms.sh index ce50074..91e7aae 100755 --- a/pyroms/install_pyroms.sh +++ b/pyroms/install_pyroms.sh @@ -1,13 +1,9 @@ #!/bin/sh #DESTDIR=/usr/local -DESTDIR=/u1/uaf/kshedstrom/python +DESTDIR=$HOME/python +PYROMS_PATH=$DESTDIR/lib/python3.6/site-packages/pyroms CURDIR=`pwd` -export CPPFLAGS=-I$DESTDIR/include -export LDFLAGS='-L$DESTDIR/lib' -#export LDFLAGS='-L$DESTDIR/lib -L/usr/local/pkg/python/python-2.7.2/lib -shared' -export CFLAGS=-I$DESTDIR/include -export SHLIBS=-L$DESTDIR/lib echo echo "installing pyroms..." @@ -23,24 +19,27 @@ cd $CURDIR/external/csa ./configure --prefix=$DESTDIR make install cd $CURDIR/external/gridutils -./configure --prefix=$DESTDIR +./configure CPPFLAGS=-I$DESTDIR/include LDFLAGS=-L$DESTDIR/lib CFLAGS=-I$DESTDIR/include --prefix=$DESTDIR make install cd $CURDIR/external/gridgen -./configure --prefix=$DESTDIR +export SHLIBS=-L$DESTDIR/lib +./configure CPPFLAGS=-I$DESTDIR/include LDFLAGS=-L$DESTDIR/lib CFLAGS=-I$DESTDIR/include --prefix=$DESTDIR make make lib make shlib make install -PYROMS_PATH=`python -c 'import pyroms ; print pyroms.__path__[0]'` +# Now setting this above because this gave me an error: +#PYROMS_PATH=`python -c 'import pyroms ; print pyroms.__path__[0]'` +# $ echo $PYROMS_PATH cp libgridgen.so $PYROMS_PATH -#cp $LOCALDIR/lib/libgridgen.so $PYROMS_PATH echo "installing scrip..." cd $CURDIR/external/scrip/source perl -pe "s#\/usr\/local#$DESTDIR#" makefile > makefile2 make -f makefile2 make -f makefile2 f2py make -f makefile2 install -cp scrip.so $PYROMS_PATH ++# Write it this way for Darwin... +cp -r scrip*.so* $PYROMS_PATH cd $CURDIR echo echo "Done installing pyroms..." diff --git a/pyroms/pyroms/__init__.py b/pyroms/pyroms/__init__.py index b18fb4e..83b61b9 100644 --- a/pyroms/pyroms/__init__.py +++ b/pyroms/pyroms/__init__.py @@ -1,25 +1,25 @@ # encoding: utf-8 -''' +''' PYROMS is a toolkit for working with ROMS ocean models -pyroms is based on the python/numpy/matplotlib scientific python suite. -NetCDF I/O is based on the NetCDF4-python package. The toolkit contains -general modeling tools for dealing with arrays, diagnosing standard +pyroms is based on the python/numpy/matplotlib scientific python suite. +NetCDF I/O is based on the NetCDF4-python package. The toolkit contains +general modeling tools for dealing with arrays, diagnosing standard properties, curvilinear grid generation, and interpolation. ''' -import cf -import vgrid -import extern -import hgrid -import grid -import io -import sta_hgrid -import sta_grid -import tools -import remapping -import utility +from . import cf +from . import vgrid +from . import extern +from . import hgrid +from . import grid +from . import io +from . import sta_hgrid +from . import sta_grid +from . import tools +from . import remapping +from . import utility __authors__ = ['Frederic Castruccio (frederic@marine.rutgers.edu)'] - + __version__ = '0.1.0' diff --git a/pyroms/pyroms/cf.py b/pyroms/pyroms/cf.py index ea9e3c8..472e122 100644 --- a/pyroms/pyroms/cf.py +++ b/pyroms/pyroms/cf.py @@ -15,13 +15,16 @@ import numpy as np -import netcdftime +try: + import cftime +except: + import netcdftime import pyroms.io class time (np.ndarray): """Return time object from netCDF file - + Parameters ---------- nc : netCDF3/4 object or filename @@ -33,15 +36,15 @@ class time (np.ndarray): calendar : string, optional A string representing the calandar to use. See netcdftime documentation for possible values. - + Returns ------- nctime : ndarray A subclass of numpy.ndarray with values equal to the time variable in the netCDF file referenced with nc. - + """ - + _unit2sec={'seconds' : 1.0, 'minutes' : 60.0, 'hours' : 3600.0, @@ -62,52 +65,52 @@ def __new__(self, ncfile, name='time', units=None, calendar='standard'): units = self._nc.variables[name].units data.utime = netcdftime.utime(units, calendar=calendar) return data - + def __array_finalize__(self, obj): self.utime = getattr(obj, 'utime', {}) - + def arg_nearest_date(self, dateo): """Return index of date nearest to query date. - + Prameters --------- dateo : datetime object The query date - + Returns ------- idx : integer The index of the date closest to dateo. If two dates are equidistant, the smaller is returned. - + """ to = self.utime.date2num(dateo) return np.min(np.where(np.abs(self-to) == \ np.min(np.abs(self-to)))[0]) - + def nearest_date(self, dateo): """Return the nearest date to query date. - + Prameters --------- dateo : datetime object The query date - + Returns ------- nearest_date : datetime object A datetime object of the date closest to dateo. If two dates are equidistant, the smaller is returned. - + """ idx = np.where(np.abs(self.dates-dateo) == \ np.min(np.abs(self.dates-dateo)))[0] idx = np.min(idx) return self.dates[idx] - + def arg_nearest(self, to, units=None): """Return index of time nearest to query time. - + Prameters --------- to : float @@ -115,21 +118,21 @@ def arg_nearest(self, to, units=None): units : string, optional The units of the reference time. Defaults to the reference time string 'units' in the netcdf oject. - + Returns ------- idx : integer The index of the date closest to to. If two times are equidistant, the smaller is returned. - + """ if units is not None: to *= self._unit2sec[units] * self._sec2unit[self.utime.units] return np.min(np.where(np.abs(self-to) == np.min(np.abs(self-to)))[0]) - + def nearest(self, to, units=None): """Return time nearest to time query. - + Prameters --------- to : float @@ -137,24 +140,24 @@ def nearest(self, to, units=None): units : string, optional The units of the reference time. Defaults to the reference time string 'units' in the netcdf oject. - + Returns ------- idx : integer The index of the date closest to to. If two times are equidistant, the smaller is returned. - + """ if units is not None: to *= self._unit2sec[units] * self._sec2unit[self.utime.units] idx = np.where(np.abs(self-to) == np.min(np.abs(self-to)))[0] idx = np.min(idx) return self[idx] - + def get_seconds(self): fac = self._unit2sec[self.utime.units] * self._sec2unit['seconds'] return self*fac - + def get_minutes(self): fac = self._unit2sec[self.utime.units] * self._sec2unit['minutes'] return self*fac @@ -162,11 +165,11 @@ def get_minutes(self): def get_hours(self): fac = self._unit2sec[self.utime.units] * self._sec2unit['hours'] return self*fac - + def get_days(self): fac = self._unit2sec[self.utime.units] * self._sec2unit['days'] return np.asarray(self,dtype='float64')*fac - + def get_jd(self): utime = netcdftime.utime('days since 0001-01-01 00:00:00', \ calendar='proleptic_gregorian') @@ -174,7 +177,7 @@ def get_jd(self): def get_dates(self): return np.array([self.utime.num2date(tval) for tval in self]) - + jd = property(get_jd, None, doc="Julian day, for plotting in pylab") seconds = property(get_seconds, None, doc="seconds") minutes = property(get_minutes, None, doc="minutes") diff --git a/pyroms/pyroms/extern/__init__.py b/pyroms/pyroms/extern/__init__.py index 232d736..602f809 100644 --- a/pyroms/pyroms/extern/__init__.py +++ b/pyroms/pyroms/extern/__init__.py @@ -1,10 +1,10 @@ 'External packages' # from an old version Jeff Whitaker's Basemap -from greatcircle import GreatCircle +from .greatcircle import GreatCircle # from Roberto De Almeida -import pupynere +from . import pupynere # from Anne M. Archibald's scipy.spatial.kdtree pure python code -from kdtree import KDTree \ No newline at end of file +from .kdtree import KDTree \ No newline at end of file diff --git a/pyroms/pyroms/extern/greatcircle.py b/pyroms/pyroms/extern/greatcircle.py index c6f2037..81a958f 100644 --- a/pyroms/pyroms/extern/greatcircle.py +++ b/pyroms/pyroms/extern/greatcircle.py @@ -6,7 +6,7 @@ class GreatCircle(object): """ formula for perfect sphere from Ed Williams' 'Aviation Formulary' (http://williams.best.vwh.net/avform.htm) - + code for ellipsoid posted to GMT mailing list by Jim Leven in Dec 1999 Contact: Jeff Whitaker @@ -40,7 +40,7 @@ def __init__(self,rmajor,rminor,lon1,lat1,lon2,lat2): self.lon1 = lon1 self.lon2 = lon2 # distance along geodesic in meters. - d,a12,a21 = vinc_dist(self.f, self.a, lat1, lon1, lat2, lon2 ) + d,a12,a21 = vinc_dist(self.f, self.a, lat1, lon1, lat2, lon2 ) self.distance = d self.azimuth12 = a12 self.azimuth21 = a21 @@ -69,13 +69,13 @@ def points(self,npoints): """ # must ask for at least 2 points. if npoints <= 1: - raise ValueError,'npoints must be greater than 1' + raise ValueError('npoints must be greater than 1') elif npoints == 2: return [math.degrees(self.lon1),math.degrees(self.lon2)],[math.degrees(self.lat1),math.degrees(self.lat2)] # can't do it if endpoints are antipodal, since # route is undefined. if self.antipodal: - raise ValueError,'cannot compute intermediate points on a great circle whose endpoints are antipodal' + raise ValueError('cannot compute intermediate points on a great circle whose endpoints are antipodal') d = self.gcarclen delta = 1.0/(npoints-1) f = delta*NX.arange(npoints) # f=0 is point 1, f=1 is point 2. @@ -93,8 +93,8 @@ def points(self,npoints): z = A*math.sin(lat1) +B*math.sin(lat2) lats=NX.arctan2(z,NX.sqrt(x**2+y**2)) lons=NX.arctan2(y,x) - lons = map(math.degrees,lons.tolist()) - lats = map(math.degrees,lats.tolist()) + lons = list(map(math.degrees,lons.tolist())) + lats = list(map(math.degrees,lats.tolist())) # use ellipsoid formulas else: latpt = self.lat1 @@ -103,8 +103,8 @@ def points(self,npoints): lons = [math.degrees(lonpt)] lats = [math.degrees(latpt)] for n in range(npoints-2): - latptnew,lonptnew,alpha21=vinc_pt(self.f,self.a,latpt,lonpt,azimuth,incdist) - d,azimuth,a21=vinc_dist(self.f,self.a,latptnew,lonptnew,lat2,lon2) + latptnew,lonptnew,alpha21=vinc_pt(self.f,self.a,latpt,lonpt,azimuth,incdist) + d,azimuth,a21=vinc_dist(self.f,self.a,latptnew,lonptnew,lat2,lon2) lats.append(math.degrees(latptnew)) lons.append(math.degrees(lonptnew)) latpt = latptnew; lonpt = lonptnew @@ -112,13 +112,13 @@ def points(self,npoints): lats.append(math.degrees(self.lat2)) return lons,lats # -# --------------------------------------------------------------------- +# --------------------------------------------------------------------- # | | # | geodetic.py - a collection of geodetic functions | # | | -# --------------------------------------------------------------------- -# -# +# --------------------------------------------------------------------- +# +# # ---------------------------------------------------------------------- # | Algrothims from Geocentric Datum of Australia Technical Manual | # | | @@ -152,14 +152,14 @@ def points(self,npoints): # | Calculate: the ellipsoidal distance (s) and | # | forward and reverse azimuths between the points (alpha12, alpha21). | # | | -# ---------------------------------------------------------------------- +# ---------------------------------------------------------------------- def vinc_dist( f, a, phi1, lembda1, phi2, lembda2 ) : - """ + """ Returns the distance between two geographic points on the ellipsoid and the forward and reverse azimuths between these points. - lats, longs and azimuths are in radians, distance in metres + lats, longs and azimuths are in radians, distance in metres Returns ( s, alpha12, alpha21 ) as a tuple @@ -167,14 +167,14 @@ def vinc_dist( f, a, phi1, lembda1, phi2, lembda2 ) : if (abs( phi2 - phi1 ) < 1e-8) and ( abs( lembda2 - lembda1) < 1e-8 ) : return 0.0, 0.0, 0.0 - + two_pi = 2.0*math.pi b = a * (1.0 - f) TanU1 = (1-f) * math.tan( phi1 ) TanU2 = (1-f) * math.tan( phi2 ) - + U1 = math.atan(TanU1) U2 = math.atan(TanU2) @@ -182,67 +182,67 @@ def vinc_dist( f, a, phi1, lembda1, phi2, lembda2 ) : last_lembda = -4000000.0 # an impossibe value omega = lembda - # Iterate the following equations, - # until there is no significant change in lembda - + # Iterate the following equations, + # until there is no significant change in lembda + while ( last_lembda < -3000000.0 or lembda != 0 and abs( (last_lembda - lembda)/lembda) > 1.0e-9 ) : - + sqr_sin_sigma = pow( math.cos(U2) * math.sin(lembda), 2) + \ pow( (math.cos(U1) * math.sin(U2) - \ math.sin(U1) * math.cos(U2) * math.cos(lembda) ), 2 ) Sin_sigma = math.sqrt( sqr_sin_sigma ) - + Cos_sigma = math.sin(U1) * math.sin(U2) + math.cos(U1) * math.cos(U2) * math.cos(lembda) - + sigma = math.atan2( Sin_sigma, Cos_sigma ) Sin_alpha = math.cos(U1) * math.cos(U2) * math.sin(lembda) / math.sin(sigma) alpha = math.asin( Sin_alpha ) - + Cos2sigma_m = math.cos(sigma) - (2 * math.sin(U1) * math.sin(U2) / pow(math.cos(alpha), 2) ) - + C = (f/16) * pow(math.cos(alpha), 2) * (4 + f * (4 - 3 * pow(math.cos(alpha), 2))) - + last_lembda = lembda - + lembda = omega + (1-C) * f * math.sin(alpha) * (sigma + C * math.sin(sigma) * \ (Cos2sigma_m + C * math.cos(sigma) * (-1 + 2 * pow(Cos2sigma_m, 2) ))) - + u2 = pow(math.cos(alpha),2) * (a*a-b*b) / (b*b) - + A = 1 + (u2/16384) * (4096 + u2 * (-768 + u2 * (320 - 175 * u2))) - + B = (u2/1024) * (256 + u2 * (-128+ u2 * (74 - 47 * u2))) - + delta_sigma = B * Sin_sigma * (Cos2sigma_m + (B/4) * \ (Cos_sigma * (-1 + 2 * pow(Cos2sigma_m, 2) ) - \ (B/6) * Cos2sigma_m * (-3 + 4 * sqr_sin_sigma) * \ (-3 + 4 * pow(Cos2sigma_m,2 ) ))) - + s = b * A * (sigma - delta_sigma) - + alpha12 = math.atan2( (math.cos(U2) * math.sin(lembda)), \ (math.cos(U1) * math.sin(U2) - math.sin(U1) * math.cos(U2) * math.cos(lembda))) - + alpha21 = math.atan2( (math.cos(U1) * math.sin(lembda)), \ (-math.sin(U1) * math.cos(U2) + math.cos(U1) * math.sin(U2) * math.cos(lembda))) - if ( alpha12 < 0.0 ) : + if ( alpha12 < 0.0 ) : alpha12 = alpha12 + two_pi - if ( alpha12 > two_pi ) : + if ( alpha12 > two_pi ) : alpha12 = alpha12 - two_pi alpha21 = alpha21 + two_pi / 2.0 - if ( alpha21 < 0.0 ) : + if ( alpha21 < 0.0 ) : alpha21 = alpha21 + two_pi - if ( alpha21 > two_pi ) : + if ( alpha21 > two_pi ) : alpha21 = alpha21 - two_pi - return s, alpha12, alpha21 + return s, alpha12, alpha21 - # END of Vincenty's Inverse formulae + # END of Vincenty's Inverse formulae #---------------------------------------------------------------------------- @@ -263,19 +263,19 @@ def vinc_pt( f, a, phi1, lembda1, alpha12, s ) : given a reference point and a distance and azimuth to project. lats, longs and azimuths are passed in decimal degrees - Returns ( phi2, lambda2, alpha21 ) as a tuple + Returns ( phi2, lambda2, alpha21 ) as a tuple """ two_pi = 2.0*math.pi - if ( alpha12 < 0.0 ) : + if ( alpha12 < 0.0 ) : alpha12 = alpha12 + two_pi - if ( alpha12 > two_pi ) : + if ( alpha12 > two_pi ) : alpha12 = alpha12 - two_pi - + b = a * (1.0 - f) TanU1 = (1-f) * math.tan(phi1) @@ -283,53 +283,53 @@ def vinc_pt( f, a, phi1, lembda1, alpha12, s ) : sigma1 = math.atan2( TanU1, math.cos(alpha12) ) Sinalpha = math.cos(U1) * math.sin(alpha12) cosalpha_sq = 1.0 - Sinalpha * Sinalpha - + u2 = cosalpha_sq * (a * a - b * b ) / (b * b) A = 1.0 + (u2 / 16384) * (4096 + u2 * (-768 + u2 * \ (320 - 175 * u2) ) ) B = (u2 / 1024) * (256 + u2 * (-128 + u2 * (74 - 47 * u2) ) ) - + # Starting with the approximation sigma = (s / (b * A)) last_sigma = 2.0 * sigma + 2.0 # something impossible - - # Iterate the following three equations - # until there is no significant change in sigma + + # Iterate the following three equations + # until there is no significant change in sigma # two_sigma_m , delta_sigma while ( abs( (last_sigma - sigma) / sigma) > 1.0e-9 ) : two_sigma_m = 2 * sigma1 + sigma - + delta_sigma = B * math.sin(sigma) * ( math.cos(two_sigma_m) \ + (B/4) * (math.cos(sigma) * \ (-1 + 2 * math.pow( math.cos(two_sigma_m), 2 ) - \ (B/6) * math.cos(two_sigma_m) * \ (-3 + 4 * math.pow(math.sin(sigma), 2 )) * \ (-3 + 4 * math.pow( math.cos (two_sigma_m), 2 ))))) \ - + last_sigma = sigma sigma = (s / (b * A)) + delta_sigma - - + + phi2 = math.atan2 ( (math.sin(U1) * math.cos(sigma) + math.cos(U1) * math.sin(sigma) * math.cos(alpha12) ), \ ((1-f) * math.sqrt( math.pow(Sinalpha, 2) + \ pow(math.sin(U1) * math.sin(sigma) - math.cos(U1) * math.cos(sigma) * math.cos(alpha12), 2)))) - + lembda = math.atan2( (math.sin(sigma) * math.sin(alpha12 )), (math.cos(U1) * math.cos(sigma) - \ math.sin(U1) * math.sin(sigma) * math.cos(alpha12))) - + C = (f/16) * cosalpha_sq * (4 + f * (4 - 3 * cosalpha_sq )) - + omega = lembda - (1-C) * f * Sinalpha * \ (sigma + C * math.sin(sigma) * (math.cos(two_sigma_m) + \ C * math.cos(sigma) * (-1 + 2 * math.pow(math.cos(two_sigma_m),2) ))) - + lembda2 = lembda1 + omega - + alpha21 = math.atan2 ( Sinalpha, (-math.sin(U1) * math.sin(sigma) + \ math.cos(U1) * math.cos(sigma) * math.cos(alpha12))) @@ -340,39 +340,39 @@ def vinc_pt( f, a, phi1, lembda1, alpha12, s ) : alpha21 = alpha21 - two_pi - return phi2, lembda2, alpha21 + return phi2, lembda2, alpha21 # END of Vincenty's Direct formulae ##--------------------------------------------------------------------------- -# Notes: -# -# * "The inverse formulae may give no solution over a line -# between two nearly antipodal points. This will occur when +# Notes: +# +# * "The inverse formulae may give no solution over a line +# between two nearly antipodal points. This will occur when # lembda ... is greater than pi in absolute value". (Vincenty, 1975) -# -# * In Vincenty (1975) L is used for the difference in longitude, -# however for consistency with other formulae in this Manual, -# omega is used here. -# -# * Variables specific to Vincenty's formulae are shown below, -# others common throughout the manual are shown in the Glossary. -# -# +# +# * In Vincenty (1975) L is used for the difference in longitude, +# however for consistency with other formulae in this Manual, +# omega is used here. +# +# * Variables specific to Vincenty's formulae are shown below, +# others common throughout the manual are shown in the Glossary. +# +# # alpha = Azimuth of the geodesic at the equator # U = Reduced latitude -# lembda = Difference in longitude on an auxiliary sphere (lembda1 & lembda2 +# lembda = Difference in longitude on an auxiliary sphere (lembda1 & lembda2 # are the geodetic longitudes of points 1 & 2) # sigma = Angular distance on a sphere, from point 1 to point 2 # sigma1 = Angular distance on a sphere, from the equator to point 1 # sigma2 = Angular distance on a sphere, from the equator to point 2 -# sigma_m = Angular distance on a sphere, from the equator to the +# sigma_m = Angular distance on a sphere, from the equator to the # midpoint of the line from point 1 to point 2 # u, A, B, C = Internal variables -# -# +# +# # Sample Data -# +# # Flinders Peak # -37o57'03.72030" # 144o25'29.52440" @@ -381,14 +381,14 @@ def vinc_pt( f, a, phi1, lembda1, alpha12, s ) : # 143o55'35.38390" # Ellipsoidal Distance # 54,972.271 m -# +# # Forward Azimuth # 306o52'05.37" -# +# # Reverse Azimuth # 127o10'25.07" -# -# +# +# ##******************************************************************* # Test driver @@ -401,51 +401,51 @@ def vinc_pt( f, a, phi1, lembda1, alpha12, s ) : b = 6356752.3142 f = (a-b)/a - print "\n Ellipsoidal major axis = %12.3f metres\n" % ( a ) - print "\n Inverse flattening = %15.9f\n" % ( 1.0/f ) + print("\n Ellipsoidal major axis = %12.3f metres\n" % ( a )) + print("\n Inverse flattening = %15.9f\n" % ( 1.0/f )) - print "\n Test Flinders Peak to Buninyon" - print "\n ****************************** \n" + print("\n Test Flinders Peak to Buninyon") + print("\n ****************************** \n") phi1 = -(( 3.7203 / 60. + 57) / 60. + 37 ) lembda1 = ( 29.5244 / 60. + 25) / 60. + 144 - print "\n Flinders Peak = %12.6f, %13.6f \n" % ( phi1, lembda1 ) + print("\n Flinders Peak = %12.6f, %13.6f \n" % ( phi1, lembda1 )) deg = int(phi1) minn = int(abs( ( phi1 - deg) * 60.0 )) sec = abs(phi1 * 3600 - deg * 3600) - minn * 60 - print " Flinders Peak = %3i\xF8%3i\' %6.3f\", " % ( deg, minn, sec ), + print(" Flinders Peak = %3i\xF8%3i\' %6.3f\", " % ( deg, minn, sec )) deg = int(lembda1) minn = int(abs( ( lembda1 - deg) * 60.0 )) sec = abs(lembda1 * 3600 - deg * 3600) - minn * 60 - print " %3i\xF8%3i\' %6.3f\" \n" % ( deg, minn, sec ) + print(" %3i\xF8%3i\' %6.3f\" \n" % ( deg, minn, sec )) phi2 = -(( 10.1561 / 60. + 39) / 60. + 37 ) lembda2 = ( 35.3839 / 60. + 55) / 60. + 143 - print "\n Buninyon = %12.6f, %13.6f \n" % ( phi2, lembda2 ) + print("\n Buninyon = %12.6f, %13.6f \n" % ( phi2, lembda2 )) deg = int(phi2) minn = int(abs( ( phi2 - deg) * 60.0 )) sec = abs(phi2 * 3600 - deg * 3600) - minn * 60 - print " Buninyon = %3i\xF8%3i\' %6.3f\", " % ( deg, minn, sec ), + print(" Buninyon = %3i\xF8%3i\' %6.3f\", " % ( deg, minn, sec )) deg = int(lembda2) minn = int(abs( ( lembda2 - deg) * 60.0 )) sec = abs(lembda2 * 3600 - deg * 3600) - minn * 60 - print " %3i\xF8%3i\' %6.3f\" \n" % ( deg, minn, sec ) + print(" %3i\xF8%3i\' %6.3f\" \n" % ( deg, minn, sec )) dist, alpha12, alpha21 = vinc_dist ( f, a, math.radians(phi1), math.radians(lembda1), math.radians(phi2), math.radians(lembda2) ) alpha12 = math.degrees(alpha12) alpha21 = math.degrees(alpha21) - print "\n Ellipsoidal Distance = %15.3f metres\n should be 54972.271 m\n" % ( dist ) - print "\n Forward and back azimuths = %15.6f, %15.6f \n" % ( alpha12, alpha21 ) + print("\n Ellipsoidal Distance = %15.3f metres\n should be 54972.271 m\n" % ( dist )) + print("\n Forward and back azimuths = %15.6f, %15.6f \n" % ( alpha12, alpha21 )) deg = int(alpha12) minn =int( abs(( alpha12 - deg) * 60.0 ) ) sec = abs(alpha12 * 3600 - deg * 3600) - minn * 60 - print " Forward azimuth = %3i\xF8%3i\' %6.3f\"\n" % ( deg, minn, sec ) + print(" Forward azimuth = %3i\xF8%3i\' %6.3f\"\n" % ( deg, minn, sec )) deg = int(alpha21) minn =int(abs( ( alpha21 - deg) * 60.0 )) sec = abs(alpha21 * 3600 - deg * 3600) - minn * 60 - print " Reverse azimuth = %3i\xF8%3i\' %6.3f\"\n" % ( deg, minn, sec ) + print(" Reverse azimuth = %3i\xF8%3i\' %6.3f\"\n" % ( deg, minn, sec )) # Test the direct function */ @@ -462,21 +462,21 @@ def vinc_pt( f, a, phi1, lembda1, alpha12, s ) : lembda2 = math.degrees(lembda2) alpha21 = math.degrees(alpha21) - print "\n Projected point =%11.6f, %13.6f \n" % ( phi2, lembda2 ) + print("\n Projected point =%11.6f, %13.6f \n" % ( phi2, lembda2 )) deg = int(phi2) minn =int(abs( ( phi2 - deg) * 60.0 )) sec = abs( phi2 * 3600 - deg * 3600) - minn * 60 - print " Projected Point = %3i\xF8%3i\' %6.3f\", " % ( deg, minn, sec ), + print(" Projected Point = %3i\xF8%3i\' %6.3f\", " % ( deg, minn, sec )) deg = int(lembda2) minn =int(abs( ( lembda2 - deg) * 60.0 )) sec = abs(lembda2 * 3600 - deg * 3600) - minn * 60 - print " %3i\xF8%3i\' %6.3f\"\n" % ( deg, minn, sec ) - print " Should be Buninyon \n" - print "\n Reverse azimuth = %10.6f \n" % ( alpha21 ) + print(" %3i\xF8%3i\' %6.3f\"\n" % ( deg, minn, sec )) + print(" Should be Buninyon \n") + print("\n Reverse azimuth = %10.6f \n" % ( alpha21 )) deg = int(alpha21) minn =int(abs( ( alpha21 - deg) * 60.0 )) sec = abs(alpha21 * 3600 - deg * 3600) - minn * 60 - print " Reverse azimuth = %3i\xF8%3i\' %6.3f\"\n\n" % ( deg, minn, sec ) + print(" Reverse azimuth = %3i\xF8%3i\' %6.3f\"\n\n" % ( deg, minn, sec )) # lat/lon of New York lat1 = 40.78 @@ -484,16 +484,16 @@ def vinc_pt( f, a, phi1, lembda1, alpha12, s ) : # lat/lon of London. lat2 = 51.53 lon2 = 0.08 - print 'New York to London:' + print('New York to London:') gc = GreatCircle((2*a+b)/3.,(2*a+b)/3.,lon1,lat1,lon2,lat2) - print 'geodesic distance using a sphere with WGS84 mean radius = ',gc.distance - print 'lon/lat for 10 equally spaced points along geodesic:' + print('geodesic distance using a sphere with WGS84 mean radius = ',gc.distance) + print('lon/lat for 10 equally spaced points along geodesic:') lons,lats = gc.points(10) for lon,lat in zip(lons,lats): - print lon,lat + print(lon,lat) gc = GreatCircle(a,b,lon1,lat1,lon2,lat2) - print 'geodesic distance using WGS84 ellipsoid = ',gc.distance - print 'lon/lat for 10 equally spaced points along geodesic:' + print('geodesic distance using WGS84 ellipsoid = ',gc.distance) + print('lon/lat for 10 equally spaced points along geodesic:') lons,lats = gc.points(10) for lon,lat in zip(lons,lats): - print lon,lat + print(lon,lat) diff --git a/pyroms/pyroms/extern/kdtree.py b/pyroms/pyroms/extern/kdtree.py index 05350ef..eabc28f 100644 --- a/pyroms/pyroms/extern/kdtree.py +++ b/pyroms/pyroms/extern/kdtree.py @@ -40,7 +40,7 @@ def __init__(self, maxes, mins): self.m, = self.maxes.shape def __repr__(self): - return "" % zip(self.mins, self.maxes) + return "" % list(zip(self.mins, self.maxes)) def volume(self): """Total volume.""" diff --git a/pyroms/pyroms/grid.py b/pyroms/pyroms/grid.py index eb1bc66..32a1a45 100644 --- a/pyroms/pyroms/grid.py +++ b/pyroms/pyroms/grid.py @@ -43,10 +43,10 @@ class ROMS_gridinfo(object): There are two ways to define the grid information. If grid_file and hist_file are not passed to the object when it is created, the - information is retrieved from gridid.txt. - To add new grid please edit your gridid.txt. You need to define - an environment variable PYROMS_GRIDID_FILE pointing to your - gridid.txt file. Just copy an existing grid and modify the + information is retrieved from gridid.txt. + To add new grid please edit your gridid.txt. You need to define + an environment variable PYROMS_GRIDID_FILE pointing to your + gridid.txt file. Just copy an existing grid and modify the definition accordingly to your case (Be carefull with space and blank line). @@ -62,14 +62,14 @@ def __init__(self, gridid,grid_file=None,hist_file=None): if gridid in gridid_dictionary: #print 'CJMP> gridid found in gridid_dictionary, grid retrieved from dictionary' saved_self=gridid_dictionary[gridid] - for attrib in saved_self.__dict__.keys(): + for attrib in list(saved_self.__dict__.keys()): setattr(self,attrib,getattr(saved_self,attrib)) else: #nope, we need to get the information from gridid.txt or from #the grid and history files from the model self.id = gridid self._get_grid_info(grid_file,hist_file) - + #now save the data in the dictionary, so we don't need to get it again gridid_dictionary[gridid]=self @@ -107,7 +107,7 @@ def _get_grid_info(self,grid_file,hist_file): line_nb = line_nb + 1 if info == []: - raise ValueError, 'Unknow gridid. Please check your gridid.txt file' + raise ValueError('Unknown gridid. Please check your gridid.txt file') if info[4] == 'roms': self.name = info[1] @@ -119,7 +119,7 @@ def _get_grid_info(self,grid_file,hist_file): self.theta_b = np.float(info[7]) self.Tcline = np.float(info[8]) - elif info[4] == 'z': + elif info[4] == 'z': nline = len(info) dep = info[5] for line in range(6,nline): @@ -133,14 +133,14 @@ def _get_grid_info(self,grid_file,hist_file): self.depth = dep else: - raise ValueError, 'Unknow grid type. Please check your gridid.txt file' + raise ValueError('Unknown grid type. Please check your gridid.txt file') else: #lets get the grid information from the history and grid files #print 'CJMP> getting grid info from ROMS history and grid files' assert type(grid_file)!=type(None), 'if specify history file you must specify grid file' assert type(hist_file)!=type(None), 'if specify grid file you must specify history file' - #open history file and get necessary grid information from it. + #open history file and get necessary grid information from it. hist=netCDF.Dataset(hist_file,'r') #put data into ROMS_gridinfo object @@ -159,12 +159,12 @@ def _get_grid_info(self,grid_file,hist_file): try: self.Vtrans=np.float(hist.variables['Vstretching'][:]) except: - print 'variable Vtransform not found in history file. Defaulting to Vtrans=1' + print('variable Vtransform not found in history file. Defaulting to Vtrans=1') self.Vtrans=1 self.theta_s=np.float(hist.variables['theta_s'][:]) self.theta_b=np.float(hist.variables['theta_b'][:]) self.Tcline=np.float(hist.variables['Tcline'][:]) - + def print_ROMS_gridinfo(gridid): """ @@ -175,20 +175,20 @@ def print_ROMS_gridinfo(gridid): gridinfo = ROMS_gridinfo(gridid) - print ' ' - print 'grid information for gridid ', gridinfo.id, ':' - print ' ' - print 'grid name : ', gridinfo.name - print 'grid file path : ', gridinfo.grdfile - print 'number of vertical level : ', gridinfo.N - print 'grid type : ', gridinfo.grdtype + print(' ') + print('grid information for gridid ', gridinfo.id, ':') + print(' ') + print('grid name : ', gridinfo.name) + print('grid file path : ', gridinfo.grdfile) + print('number of vertical level : ', gridinfo.N) + print('grid type : ', gridinfo.grdtype) if gridinfo.grdtype == 'roms': - print 'theta_s = ', gridinfo.theta_s - print 'theta_b = ', gridinfo.theta_b - print 'Tcline = ', gridinfo.Tcline + print('theta_s = ', gridinfo.theta_s) + print('theta_b = ', gridinfo.theta_b) + print('Tcline = ', gridinfo.Tcline) #print 'hc = ', gridinfo.hc elif gridinfo.grdtype == 'z': - print 'depth = ', gridinfo.depth + print('depth = ', gridinfo.depth) def list_ROMS_gridid(): @@ -202,14 +202,14 @@ def list_ROMS_gridid(): data = open(gridid_file,'r') lines = data.readlines() data.close() - + gridid_list = [] for line in lines: s = line.split() if s[0] == 'id': gridid_list.append(s[2]) - print 'List of defined gridid : ', gridid_list + print('List of defined gridid : ', gridid_list) def get_ROMS_hgrid(gridid): @@ -225,17 +225,23 @@ def get_ROMS_hgrid(gridid): nc = io.Dataset(grdfile) #Check for cartesian or geographical grid - spherical = nc.variables['spherical'][:] + spherical = nc.variables['spherical'][0] + + #if it is type byte, then convert to string + try: + spherical=spherical.decode('utf8') + except: + print('Assuming spherical is integer',spherical, type(spherical)) - #Get horizontal grid + #Get horizontal grid if ((spherical == 0) or (spherical == 'F')): #cartesian grid - print 'Load cartesian grid from file' - if 'x_vert' in nc.variables.keys() and 'y_vert' in nc.variables.keys(): + print('Load cartesian grid from file') + if 'x_vert' in list(nc.variables.keys()) and 'y_vert' in list(nc.variables.keys()): x_vert = nc.variables['x_vert'][:] y_vert = nc.variables['y_vert'][:] - elif 'x_rho' in nc.variables.keys() and 'y_rho' in nc.variables.keys() \ - and 'pm' in nc.variables.keys() and 'pn' in nc.variables.keys(): + elif 'x_rho' in list(nc.variables.keys()) and 'y_rho' in list(nc.variables.keys()) \ + and 'pm' in list(nc.variables.keys()) and 'pn' in list(nc.variables.keys()): x_rho = nc.variables['x_rho'][:] y_rho = nc.variables['y_rho'][:] pm = nc.variables['pm'][:] @@ -245,13 +251,13 @@ def get_ROMS_hgrid(gridid): #compute verts from rho point, pm, pn, angle x_vert, y_vert = rho_to_vert(x_rho, y_rho, pm, pn, angle) else: - raise ValueError, 'NetCDF file must contain x_vert and y_vert \ - or x_rho, y_rho, pm, pn and angle for a cartesian grid' + raise ValueError('NetCDF file must contain x_vert and y_vert \ + or x_rho, y_rho, pm, pn and angle for a cartesian grid') - if 'x_rho' in nc.variables.keys() and 'y_rho' in nc.variables.keys() and \ - 'x_u' in nc.variables.keys() and 'y_u' in nc.variables.keys() and \ - 'x_v' in nc.variables.keys() and 'y_v' in nc.variables.keys() and \ - 'x_psi' in nc.variables.keys() and 'y_psi' in nc.variables.keys(): + if 'x_rho' in list(nc.variables.keys()) and 'y_rho' in list(nc.variables.keys()) and \ + 'x_u' in list(nc.variables.keys()) and 'y_u' in list(nc.variables.keys()) and \ + 'x_v' in list(nc.variables.keys()) and 'y_v' in list(nc.variables.keys()) and \ + 'x_psi' in list(nc.variables.keys()) and 'y_psi' in list(nc.variables.keys()): x_rho = nc.variables['x_rho'][:] y_rho = nc.variables['y_rho'][:] x_u = nc.variables['x_u'][:] @@ -270,7 +276,7 @@ def get_ROMS_hgrid(gridid): x_psi = None y_psi = None - if 'pm' in nc.variables.keys() and 'pn' in nc.variables.keys(): + if 'pm' in list(nc.variables.keys()) and 'pn' in list(nc.variables.keys()): pm = nc.variables['pm'][:] dx = 1. / pm pn = nc.variables['pn'][:] @@ -279,14 +285,14 @@ def get_ROMS_hgrid(gridid): dx = None dy = None - if 'dndx' in nc.variables.keys() and 'dmde' in nc.variables.keys(): + if 'dndx' in list(nc.variables.keys()) and 'dmde' in list(nc.variables.keys()): dndx = nc.variables['dndx'][:] dmde = nc.variables['dmde'][:] else: dndx = None dmde = None - if 'angle' in nc.variables.keys(): + if 'angle' in list(nc.variables.keys()): angle = nc.variables['angle'][:] else: angle = None @@ -297,15 +303,21 @@ def get_ROMS_hgrid(gridid): x_psi=x_psi, y_psi=y_psi, dx=dx, dy=dy, \ dndx=dndx, dmde=dmde, angle_rho=angle) + #load the mask + try: + hgrd.mask_rho = np.array(nc.variables['mask_rho'][:]) + except: + hgrd.mask_rho = np.ones(hgrd.x_rho.shape) + else: #geographical grid - print 'Load geographical grid from file' + print('Load geographical grid from file') proj = Basemap(projection='merc', resolution=None, lat_0=0, lon_0=0) - if 'lon_vert' in nc.variables.keys() and 'lat_vert' in nc.variables.keys(): + if 'lon_vert' in list(nc.variables.keys()) and 'lat_vert' in list(nc.variables.keys()): lon_vert = nc.variables['lon_vert'][:] lat_vert = nc.variables['lat_vert'][:] - elif 'lon_rho' in nc.variables.keys() and 'lat_rho' in nc.variables.keys() \ - and 'lon_psi' in nc.variables.keys() and 'lat_psi' in nc.variables.keys(): + elif 'lon_rho' in list(nc.variables.keys()) and 'lat_rho' in list(nc.variables.keys()) \ + and 'lon_psi' in list(nc.variables.keys()) and 'lat_psi' in list(nc.variables.keys()): lon_rho = nc.variables['lon_rho'][:] lat_rho = nc.variables['lat_rho'][:] lon_psi = nc.variables['lon_psi'][:] @@ -313,13 +325,13 @@ def get_ROMS_hgrid(gridid): #compute verts from rho and psi point lon_vert, lat_vert = rho_to_vert_geo(lon_rho, lat_rho, lon_psi, lat_psi) else: - raise ValueError, 'NetCDF file must contain lon_vert and lat_vert \ - or lon_rho, lat_rho, lon_psi, lat_psi for a geographical grid' + raise ValueError('NetCDF file must contain lon_vert and lat_vert \ + or lon_rho, lat_rho, lon_psi, lat_psi for a geographical grid') - if 'lon_rho' in nc.variables.keys() and 'lat_rho' in nc.variables.keys() and \ - 'lon_u' in nc.variables.keys() and 'lat_u' in nc.variables.keys() and \ - 'lon_v' in nc.variables.keys() and 'lat_v' in nc.variables.keys() and \ - 'lon_psi' in nc.variables.keys() and 'lat_psi' in nc.variables.keys(): + if 'lon_rho' in list(nc.variables.keys()) and 'lat_rho' in list(nc.variables.keys()) and \ + 'lon_u' in list(nc.variables.keys()) and 'lat_u' in list(nc.variables.keys()) and \ + 'lon_v' in list(nc.variables.keys()) and 'lat_v' in list(nc.variables.keys()) and \ + 'lon_psi' in list(nc.variables.keys()) and 'lat_psi' in list(nc.variables.keys()): lon_rho = nc.variables['lon_rho'][:] lat_rho = nc.variables['lat_rho'][:] lon_u = nc.variables['lon_u'][:] @@ -338,7 +350,7 @@ def get_ROMS_hgrid(gridid): lon_psi = None lat_psi = None - if 'pm' in nc.variables.keys() and 'pn' in nc.variables.keys(): + if 'pm' in list(nc.variables.keys()) and 'pn' in list(nc.variables.keys()): pm = nc.variables['pm'][:] dx = 1. / pm pn = nc.variables['pn'][:] @@ -347,30 +359,30 @@ def get_ROMS_hgrid(gridid): dx = None dy = None - if 'dndx' in nc.variables.keys() and 'dmde' in nc.variables.keys(): + if 'dndx' in list(nc.variables.keys()) and 'dmde' in list(nc.variables.keys()): dndx = nc.variables['dndx'][:] dmde = nc.variables['dmde'][:] else: dndx = None dmde = None - if 'angle' in nc.variables.keys(): + if 'angle' in list(nc.variables.keys()): angle = nc.variables['angle'][:] else: angle = None - #Get geographical grid + #Get geographical grid hgrd = CGrid_geo(lon_vert, lat_vert, proj, \ lon_rho=lon_rho, lat_rho=lat_rho, \ lon_u=lon_u, lat_u=lat_u, lon_v=lon_v, lat_v=lat_v, \ lon_psi=lon_psi, lat_psi=lat_psi, dx=dx, dy=dy, \ dndx=dndx, dmde=dmde, angle_rho=angle) - #load the mask - try: - hgrd.mask_rho = np.array(nc.variables['mask_rho'][:]) - except: - hgrd.mask_rho = np.ones(hgrd.lat_rho.shape) + #load the mask + try: + hgrd.mask_rho = np.array(nc.variables['mask_rho'][:]) + except: + hgrd.mask_rho = np.ones(hgrd.lat_rho.shape) return hgrd @@ -382,11 +394,11 @@ def get_ROMS_vgrid(gridid, zeta=None): Load ROMS vertical grid object. vgrid is a s_coordinate or a z_coordinate object, depending on gridid.grdtype. vgrid.z_r and vgrid.z_w (vgrid.z for a z_coordinate object) - can be indexed in order to retreive the actual depths. The - free surface time serie zeta can be provided as an optional - argument. Note that the values of zeta are not calculated - until z is indexed, so a netCDF variable for zeta may be passed, - even if the file is large, as only the values that are required + can be indexed in order to retreive the actual depths. The + free surface time serie zeta can be provided as an optional + argument. Note that the values of zeta are not calculated + until z is indexed, so a netCDF variable for zeta may be passed, + even if the file is large, as only the values that are required will be retrieved from the file. """ @@ -399,7 +411,7 @@ def get_ROMS_vgrid(gridid, zeta=None): try: h = nc.variables['h'][:] except: - raise ValueError, 'NetCDF file must contain the bathymetry h' + raise ValueError('NetCDF file must contain the bathymetry h') try: hraw = nc.variables['hraw'][:] @@ -418,8 +430,10 @@ def get_ROMS_vgrid(gridid, zeta=None): vgrid = s_coordinate_2(h, theta_b, theta_s, Tcline, N, hraw=hraw, zeta=zeta) elif Vtrans == 4: vgrid = s_coordinate_4(h, theta_b, theta_s, Tcline, N, hraw=hraw, zeta=zeta) + elif Vtrans == 5: + vgrid = s_coordinate_5(h, theta_b, theta_s, Tcline, N, hraw=hraw, zeta=zeta) else: - raise Warning, 'Unknow vertical transformation Vtrans' + raise Warning('Unknown vertical transformation Vtrans') elif gridinfo.grdtype == 'z': N = gridinfo.N @@ -427,7 +441,7 @@ def get_ROMS_vgrid(gridid, zeta=None): vgrid = z_coordinate(h, depth, N) else: - raise ValueError, 'Unknow grid type' + raise ValueError('Unknown grid type') return vgrid @@ -448,15 +462,15 @@ def get_ROMS_grid(gridid, zeta=None, hist_file=None,grid_file=None): grid information will be extracted from those files, and gridid will be used to name that grid for the rest of the python session. - + grd.vgrid is a s_coordinate or a z_coordinate object, depending on gridid.grdtype. - grd.vgrid.z_r and grd.vgrid.z_w (grd.vgrid.z for a - z_coordinate object) can be indexed in order to retreive the - actual depths. The free surface time serie zeta can be provided - as an optional argument. Note that the values of zeta are not - calculated until z is indexed, so a netCDF variable for zeta may - be passed, even if the file is large, as only the values that + grd.vgrid.z_r and grd.vgrid.z_w (grd.vgrid.z for a + z_coordinate object) can be indexed in order to retreive the + actual depths. The free surface time serie zeta can be provided + as an optional argument. Note that the values of zeta are not + calculated until z is indexed, so a netCDF variable for zeta may + be passed, even if the file is large, as only the values that are required will be retrieved from the file. """ @@ -486,7 +500,7 @@ def write_ROMS_grid(grd, filename='roms_grd.nc'): Mm, Lm = grd.hgrid.x_rho.shape - + # Write ROMS grid to file nc = netCDF.Dataset(filename, 'w', format='NETCDF3_64BIT') nc.Description = 'ROMS grid' @@ -498,7 +512,7 @@ def write_ROMS_grid(grd, filename='roms_grd.nc'): nc.createDimension('xi_u', Lm-1) nc.createDimension('xi_v', Lm) nc.createDimension('xi_psi', Lm-1) - + nc.createDimension('eta_rho', Mm) nc.createDimension('eta_u', Mm) nc.createDimension('eta_v', Mm-1) @@ -521,7 +535,7 @@ def write_nc_var(var, name, dimensions, long_name=None, units=None): if units is not None: nc.variables[name].units = units nc.variables[name][:] = var - print ' ... wrote ', name + print(' ... wrote ', name) if hasattr(grd.vgrid, 's_rho') is True and grd.vgrid.s_rho is not None: write_nc_var(grd.vgrid.theta_s, 'theta_s', (), 'S-coordinate surface control parameter') @@ -575,7 +589,7 @@ def write_nc_var(var, name, dimensions, long_name=None, units=None): nc.createVariable('spherical', 'c') nc.variables['spherical'].long_name = 'Grid type logical switch' nc.variables['spherical'][:] = grd.hgrid.spherical - print ' ... wrote ', 'spherical' + print(' ... wrote ', 'spherical') write_nc_var(grd.hgrid.angle_rho, 'angle', ('eta_rho', 'xi_rho'), 'angle between XI-axis and EAST', 'radians') diff --git a/pyroms/pyroms/hgrid.py b/pyroms/pyroms/hgrid.py index 9ae98ee..94432d6 100644 --- a/pyroms/pyroms/hgrid.py +++ b/pyroms/pyroms/hgrid.py @@ -5,7 +5,7 @@ import os import sys import ctypes -import cPickle +import pickle from warnings import warn from copy import deepcopy @@ -84,7 +84,7 @@ class BoundaryInteractor(object): def _update_beta_lines(self): """Update m/pline by finding the points where self.beta== -/+ 1""" - x, y = zip(*self._poly.xy) + x, y = list(zip(*self._poly.xy)) num_points = len(x)-1 # the first and last point are repeated xp = [x[n] for n in range(num_points) if self.beta[n]==1] @@ -132,7 +132,7 @@ def _poly_changed(self, poly): def _get_ind_under_point(self, event): 'get the index of the vertex under point if within epsilon tolerance' try: - x, y = zip(*self._poly.xy) + x, y = list(zip(*self._poly.xy)) # display coords xt, yt = self._poly.get_transform().numerix_x_y(x, y) @@ -188,7 +188,7 @@ def _key_press_callback(self, event): if ind is not None: self._poly.xy = [tup for i,tup in enumerate(self._poly.xy) \ if i!=ind] - self._line.set_data(zip(*self._poly.xy)) + self._line.set_data(list(zip(*self._poly.xy))) self.beta = [beta for i,beta in enumerate(self.beta) \ if i!=ind] elif event.key=='p': @@ -219,7 +219,7 @@ def _key_press_callback(self, event): list(self._poly.xy[:i+1]) + [(event.xdata, event.ydata)] + list(self._poly.xy[i+1:])) - self._line.set_data(zip(*self._poly.xy)) + self._line.set_data(list(zip(*self._poly.xy))) self.beta.insert(i+1, 0) break s0 = xys[-1] @@ -229,7 +229,7 @@ def _key_press_callback(self, event): self._poly.xy = np.array( list(self._poly.xy) + [(event.xdata, event.ydata)]) - self._line.set_data(zip(*self._poly.xy)) + self._line.set_data(list(zip(*self._poly.xy))) self.beta.append(0) elif event.key=='G' or event.key == '1': options = deepcopy(self.gridgen_options) @@ -276,7 +276,7 @@ def _motion_notify_callback(self, event): if self._ind == 0: self._poly.xy[-1] = x, y - x, y = zip(*self._poly.xy) + x, y = list(zip(*self._poly.xy)) self._line.set_data(x[:-1], y[:-1]) self._update_beta_lines() @@ -311,7 +311,7 @@ def __init__(self, x, y=None, beta=None, ax=None, proj=None, # Set default gridgen option, and copy over specified options. self.gridgen_options = {'ul_idx': 0, 'shp': (32, 32)} - for key, value in gridgen_options.iteritems(): + for key, value in gridgen_options.items(): self.gridgen_options[key] = gridgen_options[key] x = list(x); y = list(y) @@ -364,7 +364,7 @@ def __init__(self, x, y=None, beta=None, ax=None, proj=None, def save_bry(self, bry_file='bry.pickle'): f = open(bry_file, 'wb') bry_dict = {'x': self.x, 'y': self.y, 'beta': self.beta} - cPickle.dump(bry_dict, f, protocol=-1) + pickle.dump(bry_dict, f, protocol=-1) f.close() def load_bry(self, bry_file='bry.pickle'): @@ -374,17 +374,17 @@ def load_bry(self, bry_file='bry.pickle'): self._line.set_data(x, y) self.beta = bry_dict['beta'] if hasattr(self, '_poly'): - self._poly.xy = zip(x, y) + self._poly.xy = list(zip(x, y)) self._update_beta_lines() self._draw_callback(None) self._canvas.draw() def save_grid(self, grid_file='grid.pickle'): f = open(grid_file, 'wb') - cPickle.dump(self.grd, f, protocol=-1) + pickle.dump(self.grd, f, protocol=-1) f.close() - def _get_verts(self): return zip(self.x, self.y) + def _get_verts(self): return list(zip(self.x, self.y)) verts = property(_get_verts) def get_xdata(self): return self._line.get_xdata() x = property(get_xdata) @@ -934,9 +934,9 @@ def __init__(self, lon_vert, lat_vert, proj, use_gcdist=True, ellipse='WGS84', \ def mask_polygon_geo(lonlat_verts, mask_value=0.0): - lon, lat = zip(*lonlat_verts) + lon, lat = list(zip(*lonlat_verts)) x, y = proj(lon, lat, inverse=True) - self.mask_polygon(zip(x, y), mask_value) + self.mask_polygon(list(zip(x, y)), mask_value) lon = property(lambda self: self.lon_vert, None, None, 'Shorthand for lon_vert') lat = property(lambda self: self.lat_vert, None, None, 'Shorthand for lat_vert') @@ -1376,25 +1376,25 @@ def __init__(self, grd, coast=None, **kwargs): if type(grd).__name__ == 'ROMS_Grid': try: - x = range(grd.hgrid.lon_vert.shape[1]) - y = range(grd.hgrid.lat_vert.shape[0]) + x = list(range(grd.hgrid.lon_vert.shape[1])) + y = list(range(grd.hgrid.lat_vert.shape[0])) xv, yv = np.meshgrid(x,y) mask = grd.hgrid.mask_rho except: - x = range(grd.hgrid.x_vert.shape[1]) - y = range(grd.hgrid.y_vert.shape[0]) + x = list(range(grd.hgrid.x_vert.shape[1])) + y = list(range(grd.hgrid.y_vert.shape[0])) xv, yv = np.meshgrid(x,y) mask = grd.hgrid.mask_rho if type(grd).__name__ == 'CGrid_geo': try: - x = range(grd.lon_vert.shape[1]) - y = range(grd.lat_vert.shape[0]) + x = list(range(grd.lon_vert.shape[1])) + y = list(range(grd.lat_vert.shape[0])) xv, yv = np.meshgrid(x,y) mask = grd.mask_rho except: - x = range(grd.x_vert.shape[1]) - y = range(grd.y_vert.shape[0]) + x = list(range(grd.x_vert.shape[1])) + y = list(range(grd.y_vert.shape[0])) xv, yv = np.meshgrid(x,y) mask = grd.mask_rho @@ -1498,12 +1498,12 @@ def _on_click(self, event): else: idx = np.argwhere(d.flatten() == d.min()) j, i = np.argwhere(d == d.min())[0] - print 'Position on the grid (rho point): i =', i, ', j =', j + print('Position on the grid (rho point): i =', i, ', j =', j) if self.proj is not None: lon, lat = self.proj(self._xc[j,i], self._yc[j,i], inverse=True) - print 'corresponding geographical position : lon = ', lon, ', lat =', lat + print('corresponding geographical position : lon = ', lon, ', lat =', lat) else: - print 'corresponding cartesian position : x = ', self._xc[j,i], ', y =', self._yc[j,i] + print('corresponding cartesian position : x = ', self._xc[j,i], ', y =', self._yc[j,i]) def __init__(self, grd, proj=None, **kwargs): diff --git a/pyroms/pyroms/io.py b/pyroms/pyroms/io.py index b796a21..2254159 100644 --- a/pyroms/pyroms/io.py +++ b/pyroms/pyroms/io.py @@ -14,22 +14,22 @@ with an input of a string: # returns netCDF4.Dataset object based on file - nc = pyroms.io.Dataset(file) - + nc = pyroms.io.Dataset(file) + # returns MFnetCDF4.Dataset object based on file (with wildcard chars) - nc = pyroms.io.MFDataset(file) + nc = pyroms.io.MFDataset(file) with an input of a list of files: # returns MFDataset object based on list of files - nc = pyroms.io.Dataset(files) - + nc = pyroms.io.Dataset(files) + # returns MFDataset object based on list of files nc = pyroms.io.MFDataset(files) with an input of a netCDF4.Dataset or MFnetCDF4.Dataset object: # passes through netCDF4.Dataset or MFnetCDF4.Dataset object nc = pyroms.io.Dataset(nc) - + # passes through MFDataset object based on file (with wildcard chars) nc = pyroms.io.MFDataset(nc) ''' @@ -42,12 +42,12 @@ import netCDF4 as netCDF except: import netCDF3 as netCDF - + def Dataset(ncfile): """Return an appropriate netcdf object: netCDF4 object given a file string MFnetCDF4 object given a list of files - + A netCDF4 or MFnetCDF4 object returns itself.""" if isinstance(ncfile, str): return netCDF.Dataset(ncfile, 'r') @@ -58,8 +58,8 @@ def Dataset(ncfile): 'variables attribute must be a dictionary' return ncfile else: - raise TypeError, 'type %s not supported' % type(ncfile) - + raise TypeError('type %s not supported' % type(ncfile)) + Dataset.__doc__ = __doc__ def MFDataset(ncfile): @@ -75,29 +75,29 @@ def MFDataset(ncfile): 'variables attribute must be a dictionary' return ncfile else: - raise TypeError, 'type %s not supported' % type(ncfile) + raise TypeError('type %s not supported' % type(ncfile)) return MFnetCDF4.Dataset(files) - + MFDataset.__doc__ = __doc__ except: import pyroms.extern.pupynere import warnings - + warnings.warn('netCDF[3/4] not found -- using pupynere.') - + def Dataset(ncfile): if isinstance(ncfile, str): return pupynere.NetCDFFile(ncfile) elif isinstance(ncfile, pupynere.NetCDFFile): return ncfile else: - raise TypeError, 'type %s not supported' % type(ncfile) - + raise TypeError('type %s not supported' % type(ncfile)) + Dataset.__doc__ = __doc__ if __name__ == '__main__': pass - + diff --git a/pyroms/pyroms/remapping/__init__.py b/pyroms/pyroms/remapping/__init__.py index 80077f9..1836723 100644 --- a/pyroms/pyroms/remapping/__init__.py +++ b/pyroms/pyroms/remapping/__init__.py @@ -3,18 +3,18 @@ A set of tools for remapping ''' -from make_remap_grid_file import make_remap_grid_file -from compute_remap_weights import compute_remap_weights -from test_remap_weights import test_remap_weights -from remap import remap -from remap2 import remap2 +from .make_remap_grid_file import make_remap_grid_file +from .compute_remap_weights import compute_remap_weights +from .test_remap_weights import test_remap_weights +from .remap import remap +from .remap2 import remap2 try: import scrip except: - print 'scrip.so not found. Remapping function will not be available' -from roms2z import roms2z -from sta2z import sta2z -from z2roms import z2roms -from flood import flood -from flood2d import flood2d + print('scrip.so not found. Remapping function will not be available') +from .roms2z import roms2z +from .sta2z import sta2z +from .z2roms import z2roms +from .flood import flood +from .flood2d import flood2d diff --git a/pyroms/pyroms/remapping/flood.py b/pyroms/pyroms/remapping/flood.py index 4d9e066..1690620 100644 --- a/pyroms/pyroms/remapping/flood.py +++ b/pyroms/pyroms/remapping/flood.py @@ -11,12 +11,12 @@ def flood(varz, grdz, Cpos='rho', irange=None, jrange=None, \ var = flood(var, grdz) optional switch: - - Cpos='rho', 'u' or 'v' specify the C-grid position where - the variable rely + - Cpos='rho', 'u' or 'v' specify the C-grid position where + the variable rely - irange specify grid sub-sample for i direction - jrange specify grid sub-sample for j direction - spval=1e37 define spval value - - dmax=0 if dmax>0, maximum horizontal + - dmax=0 if dmax>0, maximum horizontal flooding distance - cdepth=0 critical depth for flooding if depth no flooding @@ -59,7 +59,7 @@ def flood(varz, grdz, Cpos='rho', irange=None, jrange=None, \ h = grdz.vgrid.h mask = grdz.hgrid.mask_rho else: - raise Warning, '%s bad position. Use depth at Arakawa-C rho points instead.' % Cpos + raise Warning('%s bad position. Use depth at Arakawa-C rho points instead.' % Cpos) nlev, Mm, Lm = varz.shape @@ -118,7 +118,7 @@ def flood(varz, grdz, Cpos='rho', irange=None, jrange=None, \ for i in range(Lm): for j in range(Mm): if mask[j,i] == 1: - varz[:bottom[j,i],j,i] = varz[bottom[j,i],j,i] - varz[surface[j,i]:,j,i] = varz[surface[j,i],j,i] + varz[:int(bottom[j,i]),j,i] = varz[int(bottom[j,i]),j,i] + varz[int(surface[j,i]):,j,i] = varz[int(surface[j,i]),j,i] return varz diff --git a/pyroms/pyroms/remapping/flood2d.py b/pyroms/pyroms/remapping/flood2d.py index a2b5671..80d2a72 100644 --- a/pyroms/pyroms/remapping/flood2d.py +++ b/pyroms/pyroms/remapping/flood2d.py @@ -3,20 +3,18 @@ import numpy as np import _remapping -import pyroms - def flood2d(varz, grdz, Cpos='rho', irange=None, jrange=None, \ spval=1e37, dmax=0, cdepth=0, kk=0): """ var = flood(var, grdz) optional switch: - - Cpos='rho', 'u' or 'v' specify the C-grid position where - the variable rely + - Cpos='rho', 'u' or 'v' specify the C-grid position where + the variable rely - irange specify grid sub-sample for i direction - jrange specify grid sub-sample for j direction - spval=1e37 define spval value - - dmax=0 if dmax>0, maximum horizontal + - dmax=0 if dmax>0, maximum horizontal flooding distance Flood varz on gridz """ @@ -51,7 +49,7 @@ def flood2d(varz, grdz, Cpos='rho', irange=None, jrange=None, \ z = grdz.vgrid.z[:] mask = grdz.hgrid.mask_rho else: - raise Warning, '%s bad position. Use depth at Arakawa-C rho points instead.' % Cpos + raise Warning('%s bad position. Use depth at Arakawa-C rho points instead.' % Cpos) Mm, Lm = varz.shape @@ -82,7 +80,7 @@ def flood2d(varz, grdz, Cpos='rho', irange=None, jrange=None, \ idxnan = np.where(c2 == True) idx = np.where(c2 == False) if list(idx[0]): -# print "inside test", len(idx[0]), len(idxnan[0]) +# print "inside test", len(idx[0]), len(idxnan[0]) wet = np.zeros((len(idx[0]),2)) dry = np.zeros((len(idxnan[0]),2)) wet[:,0] = idx[0]+1 diff --git a/pyroms/pyroms/remapping/make_remap_grid_file.py b/pyroms/pyroms/remapping/make_remap_grid_file.py index 76ccf19..001dfe6 100644 --- a/pyroms/pyroms/remapping/make_remap_grid_file.py +++ b/pyroms/pyroms/remapping/make_remap_grid_file.py @@ -45,17 +45,17 @@ def make_remap_grid_file(grid, Cpos='rho', irange=None, jrange=None): if Cpos == 'rho': if jrange != (0,Mp-1) or irange != (0,Lp-1): lon_corner = grd.hgrid.lon_vert[jrange[0]:jrange[1]+1, \ - irange[0]:irange[1]+1] + irange[0]:irange[1]+1] lat_corner = grd.hgrid.lat_vert[jrange[0]:jrange[1]+1, \ - irange[0]:irange[1]+1] + irange[0]:irange[1]+1] grid_center_lon = grd.hgrid.lon_rho[jrange[0]:jrange[1], \ - irange[0]:irange[1]].flatten() + irange[0]:irange[1]].flatten() grid_center_lat = grd.hgrid.lat_rho[jrange[0]:jrange[1], \ - irange[0]:irange[1]].flatten() + irange[0]:irange[1]].flatten() grid_imask = grd.hgrid.mask_rho[jrange[0]:jrange[1], \ - irange[0]:irange[1]].flatten() - Lp = irange[1] - irange[0] - Mp = jrange[1] - jrange[0] + irange[0]:irange[1]].flatten() + Lp = irange[1] - irange[0] + Mp = jrange[1] - jrange[0] else: lon_corner = grd.hgrid.lon_vert lat_corner = grd.hgrid.lat_vert @@ -66,20 +66,20 @@ def make_remap_grid_file(grid, Cpos='rho', irange=None, jrange=None): elif Cpos == 'u': if jrange != (0,Mp-1) or irange != (0,Lp-1): lon_corner = 0.5 * \ - (grd.hgrid.lon_vert[jrange[0]:jrange[1]+1,irange[0]:irange[1]] + \ + (grd.hgrid.lon_vert[jrange[0]:jrange[1]+1,irange[0]:irange[1]] + \ grd.hgrid.lon_vert[jrange[0]:jrange[1]+1,1+irange[0]:irange[1]+1]) lat_corner = 0.5 * \ - (grd.hgrid.lat_vert[jrange[0]:jrange[1]+1,irange[0]:irange[1]] + \ + (grd.hgrid.lat_vert[jrange[0]:jrange[1]+1,irange[0]:irange[1]] + \ grd.hgrid.lat_vert[jrange[0]:jrange[1]+1,1+irange[0]:irange[1]+1]) grid_center_lon = grd.hgrid.lon_u[jrange[0]:jrange[1], \ - irange[0]:irange[1]-1].flatten() + irange[0]:irange[1]-1].flatten() grid_center_lat = grd.hgrid.lat_u[jrange[0]:jrange[1], \ - irange[0]:irange[1]-1].flatten() + irange[0]:irange[1]-1].flatten() grid_imask = grd.hgrid.mask_u[jrange[0]:jrange[1], \ - irange[0]:irange[1]-1].flatten() - Lp = irange[1] - irange[0] - 1 - Mp = jrange[1] - jrange[0] - else: + irange[0]:irange[1]-1].flatten() + Lp = irange[1] - irange[0] - 1 + Mp = jrange[1] - jrange[0] + else: lon_corner = 0.5 * (grd.hgrid.lon_vert[:,:-1] + \ grd.hgrid.lon_vert[:,1:]) lat_corner = 0.5 * (grd.hgrid.lat_vert[:,:-1] + \ @@ -91,20 +91,20 @@ def make_remap_grid_file(grid, Cpos='rho', irange=None, jrange=None): elif Cpos == 'v': if jrange != (0,Mp-1) or irange != (0,Lp-1): lon_corner = 0.5 * \ - (grd.hgrid.lon_vert[jrange[0]:jrange[1],irange[0]:irange[1]+1] + \ + (grd.hgrid.lon_vert[jrange[0]:jrange[1],irange[0]:irange[1]+1] + \ grd.hgrid.lon_vert[1+jrange[0]:jrange[1]+1,irange[0]:irange[1]+1]) lat_corner = 0.5 * \ - (grd.hgrid.lat_vert[jrange[0]:jrange[1],irange[0]:irange[1]+1] + \ + (grd.hgrid.lat_vert[jrange[0]:jrange[1],irange[0]:irange[1]+1] + \ grd.hgrid.lat_vert[1+jrange[0]:jrange[1]+1,irange[0]:irange[1]+1]) grid_center_lon = grd.hgrid.lon_v[jrange[0]:jrange[1]-1, \ - irange[0]:irange[1]].flatten() + irange[0]:irange[1]].flatten() grid_center_lat = grd.hgrid.lat_v[jrange[0]:jrange[1]-1, \ - irange[0]:irange[1]].flatten() + irange[0]:irange[1]].flatten() grid_imask = grd.hgrid.mask_v[jrange[0]:jrange[1]-1, \ - irange[0]:irange[1]].flatten() - Lp = irange[1] - irange[0] - Mp = jrange[1] - jrange[0] - 1 - else: + irange[0]:irange[1]].flatten() + Lp = irange[1] - irange[0] + Mp = jrange[1] - jrange[0] - 1 + else: lon_corner = 0.5 * (grd.hgrid.lon_vert[:-1,:] + \ grd.hgrid.lon_vert[1:,:]) lat_corner = 0.5 * (grd.hgrid.lat_vert[:-1,:] + \ @@ -114,10 +114,10 @@ def make_remap_grid_file(grid, Cpos='rho', irange=None, jrange=None): grid_imask = grd.hgrid.mask_v.flatten() Mp, Lp = grd.hgrid.mask_v.shape else: - raise ValueError, 'Cpos must be rho, u or v' + raise ValueError('Cpos must be rho, u or v') grid_size = Lp * Mp - print 'grid shape', Mp, Lp + print('grid shape', Mp, Lp) grid_corner_lon = np.zeros((grid_size, 4)) grid_corner_lat = np.zeros((grid_size, 4)) diff --git a/pyroms/pyroms/remapping/remap.py b/pyroms/pyroms/remapping/remap.py index 734b5c8..a936055 100644 --- a/pyroms/pyroms/remapping/remap.py +++ b/pyroms/pyroms/remapping/remap.py @@ -40,14 +40,14 @@ def remap(src_array, remap_file, src_grad1=None, src_grad2=None, \ iorder = 1 if verbose is True: - print 'Reading remapping: ', title - print 'From file: ', remap_file - print ' ' - print 'Remapping between:' - print src_grid_name - print 'and' - print dst_grid_name - print 'Remapping method: ', map_method + print('Reading remapping: ', title) + print('From file: ', remap_file) + print(' ') + print('Remapping between:') + print(src_grid_name) + print('and') + print(dst_grid_name) + print('Remapping method: ', map_method) ndim = len(src_array.squeeze().shape) @@ -85,7 +85,7 @@ def remap(src_array, remap_file, src_grad1=None, src_grad2=None, \ tmp_src_grad1, tmp_src_grad2, \ tmp_src_grad3) else: - raise ValueError, 'Unknown method' + raise ValueError('Unknown method') # mask dst_array idx = np.where(dst_mask == 0) @@ -133,7 +133,7 @@ def remap(src_array, remap_file, src_grad1=None, src_grad2=None, \ tmp_src_grad1, tmp_src_grad2, \ tmp_src_grad3) else: - raise ValueError, 'Unknown method' + raise ValueError('Unknown method') # mask dst_array @@ -146,7 +146,7 @@ def remap(src_array, remap_file, src_grad1=None, src_grad2=None, \ dst_grid_dims[0])) else: - raise ValueError, 'src_array must have two or three dimensions' + raise ValueError('src_array must have two or three dimensions') # close data file diff --git a/pyroms/pyroms/remapping/remap2.py b/pyroms/pyroms/remapping/remap2.py index 38a08e1..953da18 100644 --- a/pyroms/pyroms/remapping/remap2.py +++ b/pyroms/pyroms/remapping/remap2.py @@ -40,17 +40,17 @@ def remap2(src_array, remap_file, src_grad1=None, src_grad2=None, \ iorder = 1 if verbose is True: - print 'Reading remapping: ', title - print 'From file: ', remap_file - print ' ' - print 'Remapping between:' - print src_grid_name - print 'and' - print dst_grid_name - print 'Remapping method: ', map_method + print('Reading remapping: ', title) + print('From file: ', remap_file) + print(' ') + print('Remapping between:') + print(src_grid_name) + print('and') + print(dst_grid_name) + print('Remapping method: ', map_method) ndim = len(src_array.squeeze().shape) - + if (ndim == 2): tmp_dst_array = np.zeros((dst_grid_size)) tmp_src_array = src_array.flatten() @@ -85,7 +85,7 @@ def remap2(src_array, remap_file, src_grad1=None, src_grad2=None, \ tmp_src_grad1, tmp_src_grad2, \ tmp_src_grad3) else: - raise ValueError, 'Unknown method' + raise ValueError('Unknown method') # mask dst_array idx = np.where(dst_mask == 0) @@ -95,7 +95,7 @@ def remap2(src_array, remap_file, src_grad1=None, src_grad2=None, \ # reshape # dst_array = np.reshape(tmp_dst_array, (dst_grid_dims[1], \ # dst_grid_dims[0])) - dst_array = tmp_dst_array + dst_array = tmp_dst_array elif (ndim == 3): @@ -134,7 +134,7 @@ def remap2(src_array, remap_file, src_grad1=None, src_grad2=None, \ tmp_src_grad1, tmp_src_grad2, \ tmp_src_grad3) else: - raise ValueError, 'Unknow method' + raise ValueError('Unknown method') # mask dst_array @@ -147,7 +147,7 @@ def remap2(src_array, remap_file, src_grad1=None, src_grad2=None, \ dst_grid_dims[0])) else: - raise ValueError, 'src_array must have two or three dimensions' + raise ValueError('src_array must have two or three dimensions') # close data file diff --git a/pyroms/pyroms/remapping/roms2z.py b/pyroms/pyroms/remapping/roms2z.py index a911f87..da6d38d 100644 --- a/pyroms/pyroms/remapping/roms2z.py +++ b/pyroms/pyroms/remapping/roms2z.py @@ -9,8 +9,8 @@ def roms2z(var, grd, grdz, Cpos='rho', irange=None, jrange=None, \ varz = roms2z(var, grd, grdz) optional switch: - - Cpos='rho', 'u' 'v' or 'w' specify the C-grid position where - the variable rely + - Cpos='rho', 'u' 'v' or 'w' specify the C-grid position where + the variable rely - irange specify grid sub-sample for i direction - jrange specify grid sub-sample for j direction - spval=1e37 define spval value @@ -29,7 +29,7 @@ def roms2z(var, grd, grdz, Cpos='rho', irange=None, jrange=None, \ imode=1 else: imode=0 - raise Warning, '%s not supported, defaulting to linear' % mode + raise Warning('%s not supported, defaulting to linear' % mode) if Cpos is 'rho': @@ -49,7 +49,7 @@ def roms2z(var, grd, grdz, Cpos='rho', irange=None, jrange=None, \ depth = grdz.vgrid.z mask = grd.hgrid.mask_rho else: - raise Warning, '%s unknown position. Cpos must be rho, u, v or w.' % Cpos + raise Warning('%s unknown position. Cpos must be rho, u, v or w.' % Cpos) Nm, Mm, Lm = var.shape nlev = grdz.vgrid.N @@ -82,5 +82,5 @@ def roms2z(var, grd, grdz, Cpos='rho', irange=None, jrange=None, \ idx = np.where(abs((varz-spval)/spval)<=1e-5) varz[idx] = spval #varz = np.ma.masked_values(varz, spval, rtol=1e-5) - + return varz diff --git a/pyroms/pyroms/remapping/sta2z.py b/pyroms/pyroms/remapping/sta2z.py index 451f8d4..3a94184 100644 --- a/pyroms/pyroms/remapping/sta2z.py +++ b/pyroms/pyroms/remapping/sta2z.py @@ -28,7 +28,7 @@ def sta2z(var, grd, grdz, Cpos='rho', srange=None, \ imode=1 else: imode=0 - raise Warning, '%s not supported, defaulting to linear' % mode + raise Warning('%s not supported, defaulting to linear' % mode) if Cpos is 'rho': z = grd.vgrid.z_r[0,:] @@ -37,7 +37,7 @@ def sta2z(var, grd, grdz, Cpos='rho', srange=None, \ z = grd.vgrid.z_w[0,:] depth = grdz.vgrid.z else: - raise Warning, '%s unknown position. Cpos must be rho or w.' % Cpos + raise Warning('%s unknown position. Cpos must be rho or w.' % Cpos) var = var.T Nm, Sm = var.shape diff --git a/pyroms/pyroms/remapping/z2roms.py b/pyroms/pyroms/remapping/z2roms.py index 2a72842..1a4f241 100644 --- a/pyroms/pyroms/remapping/z2roms.py +++ b/pyroms/pyroms/remapping/z2roms.py @@ -13,12 +13,12 @@ def z2roms(varz, grdz, grd, Cpos='rho', irange=None, jrange=None, \ var = z2roms(var, grdz, grd) optional switch: - - Cpos='rho', 'u' or 'v' specify the C-grid position where - the variable rely + - Cpos='rho', 'u' or 'v' specify the C-grid position where + the variable rely - irange specify grid sub-sample for i direction - jrange specify grid sub-sample for j direction - spval=1e37 define spval value - - dmax=0 if dmax>0, maximum horizontal + - dmax=0 if dmax>0, maximum horizontal flooding distance - cdepth=0 critical depth for flooding if depth no flooding @@ -37,7 +37,7 @@ def z2roms(varz, grdz, grd, Cpos='rho', irange=None, jrange=None, \ elif mode=='spline': imode=1 else: - raise Warning, '%s not supported, defaulting to linear' % mode + raise Warning('%s not supported, defaulting to linear' % mode) if Cpos is 'rho': z = grdz.vgrid.z[:] @@ -56,8 +56,8 @@ def z2roms(varz, grdz, grd, Cpos='rho', irange=None, jrange=None, \ depth = grd.vgrid.z_w[0,:] mask = grd.hgrid.mask_rho else: - raise Warning, '%s bad position. Use depth at Arakawa-C \ - rho points instead.' % Cpos + raise Warning('%s bad position. Use depth at Arakawa-C \ + rho points instead.' % Cpos) nlev, Mm, Lm = varz.shape Nm = depth.shape[0] @@ -84,7 +84,7 @@ def z2roms(varz, grdz, grd, Cpos='rho', irange=None, jrange=None, \ z = np.concatenate((-9999*np.ones((1,z.shape[1], z.shape[2])), \ z, \ 100*np.ones((1,z.shape[1], z.shape[2]))), 0) - + var = np.ma.zeros((Nm, Mm, Lm)) for k in range(Nm): @@ -96,5 +96,5 @@ def z2roms(varz, grdz, grd, Cpos='rho', irange=None, jrange=None, \ #mask var = np.ma.masked_values(var, spval, rtol=1e-5) #var[k,:,:] = np.ma.masked_where(mask == 0, var[k,:,:]) - + return var diff --git a/pyroms/pyroms/sandbox/googleearth.py b/pyroms/pyroms/sandbox/googleearth.py index 0103176..dfaac56 100644 --- a/pyroms/pyroms/sandbox/googleearth.py +++ b/pyroms/pyroms/sandbox/googleearth.py @@ -15,6 +15,7 @@ import pylab import zipfile import octant +import pyroms import os kml_preamble = ''' @@ -68,15 +69,15 @@ def kmz_anim(lon, lat, time, prop, **kwargs): lon = asarray(lon) lat = asarray(lat) - + jd = pylab.date2num(time) - jd_edges = hstack((1.5*jd[0]-0.5*jd[1], - 0.5*(jd[1:]+jd[:-1]), + jd_edges = hstack((1.5*jd[0]-0.5*jd[1], + 0.5*(jd[1:]+jd[:-1]), 1.5*jd[-1]-0.5*jd[-2])) time_edges = pylab.num2date(jd_edges) time_starts = time_edges[:-1] time_stops = time_edges[1:] - + name = kwargs.pop('name', 'overlay') color = kwargs.pop('color', '9effffff') visibility = str( kwargs.pop('visibility', 1) ) @@ -87,29 +88,29 @@ def kmz_anim(lon, lat, time, prop, **kwargs): kwargs['vmax'] = vmax vmin = kwargs.pop('vmin', prop.min()) kwargs['vmin'] = vmin - + geo_aspect = cos(lat.mean()*pi/180.0) xsize = lon.ptp()*geo_aspect ysize = lat.ptp() - + aspect = ysize/xsize if aspect > 1.0: figsize = (10.0/aspect, 10.0) else: figsize = (10.0, 10.0*aspect) - + kml_text = kml_preamble - + ioff() fig = figure(figsize=figsize, dpi=pixels//10, facecolor=None, frameon=False) ax = fig.add_axes([0, 0, 1, 1]) - + f = zipfile.ZipFile(kmzfile, 'w') - + for frame in range(prop.shape[0]): tstart = time_starts[frame] tstop = time_stops[frame] - print 'Writing frame ', frame, tstart.isoformat(), tstop.isoformat() + print('Writing frame ', frame, tstart.isoformat(), tstop.isoformat()) ax.cla() pc = ax.pcolor(lon, lat, prop[frame], **kwargs) ax.set_xlim(lon.min(), lon.max()) @@ -127,10 +128,10 @@ def kmz_anim(lon, lat, time, prop, **kwargs): .replace('__FRAME__', icon)\ .replace('__TIMEBEGIN__', tstart.isoformat())\ .replace('__TIMEEND__', tstop.isoformat()) - + f.write(icon) os.remove(icon) - + # legend fig = figure(figsize=(1.0, 4.0), facecolor=None, frameon=False) cax = fig.add_axes([0.0, 0.05, 0.2, 0.90]) @@ -138,13 +139,13 @@ def kmz_anim(lon, lat, time, prop, **kwargs): cb.set_label(units, color='0.9') for lab in cb.ax.get_yticklabels(): setp(lab, 'color', '0.9') - + savefig('legend.png') f.write('legend.png') os.remove('legend.png') - + kml_text += kml_legend - + kml_text += kml_closing f.writestr('overlay.kml', kml_text) f.close() @@ -153,21 +154,21 @@ def kmz_anim(lon, lat, time, prop, **kwargs): if __name__ == '__main__': ncll = octant.io.Dataset('/Users/rob/Archive/GWB/bodden/latlon.nc') nc = octant.io.Dataset('/Users/rob/Archive/GWB/bodden/bsh_elev_2001-10.nc') - + lat = ncll.variables['lat'][:] lon = ncll.variables['lon'][:] - + lon, lat = meshgrid(lon, lat) - + time = octant.ocean_time(nc, name='time')[:200:4] - + propname = 'elev' - + prop = nc.variables[propname][:200:4] mask = prop == nc.variables[propname].missing_value prop = ma.masked_where(mask, prop) - - kmz_anim(lon, lat, time.dates, prop, kmzfile='bsh_anim.kmz', + + kmz_anim(lon, lat, time.dates, prop, kmzfile='bsh_anim.kmz', name='BSH model -- sea surface height', units='sea surface height [m]') @@ -207,7 +208,7 @@ def kmz_anim(lon, lat, time, prop, **kwargs): def geo_pcolor(lon, lat, prop, **kwargs): """docstring for geo_pcolor""" - + name = kwargs.pop('name', 'overlay') color = kwargs.pop('color', '9effffff') visibility = str( kwargs.pop('visibility', 1) ) @@ -218,17 +219,17 @@ def geo_pcolor(lon, lat, prop, **kwargs): kwargs['vmax'] = vmax vmin = kwargs.pop('vmin', prop.min()) kwargs['vmin'] = vmin - + geo_aspect = cos(lat.mean()*pi/180.0) xsize = lon.ptp()*geo_aspect ysize = lat.ptp() - + aspect = ysize/xsize if aspect > 1.0: figsize = (10.0/aspect, 10.0) else: figsize = (10.0, 10.0*aspect) - + ioff() fig = figure(figsize=figsize, facecolor=None, frameon=False, dpi=pixels//10) ax = fig.add_axes([0, 0, 1, 1]) @@ -237,7 +238,7 @@ def geo_pcolor(lon, lat, prop, **kwargs): ax.set_ylim(lat.min(), lat.max()) ax.set_axis_off() savefig('overlay.png') - + f = zipfile.ZipFile(kmzfile, 'w') f.writestr('overlay.kml', kml_groundoverlay.replace('__NAME__', name)\ .replace('__COLOR__', color)\ @@ -248,35 +249,35 @@ def geo_pcolor(lon, lat, prop, **kwargs): .replace('__WEST__', str(lon.min()))) f.write('overlay.png') os.remove('overlay.png') - + fig = figure(figsize=(1.0, 4.0), facecolor=None, frameon=False) ax = fig.add_axes([0.0, 0.05, 0.2, 0.9]) cb = colorbar(pc, cax=ax) cb.set_label(units, color='0.9') for lab in cb.ax.get_yticklabels(): setp(lab, 'color', '0.9') - + savefig('legend.png') f.write('legend.png') - os.remove('legend.png') + os.remove('legend.png') f.close() if __name__ == '__main__': ncll = pyroms.Dataset('/Users/rob/Archive/GWB/bodden/latlon.nc') nc = pyroms.Dataset('/Users/rob/Archive/GWB/bodden/bsh_elev_2001-10.nc') - + lat = ncll.variables['lat'][:] lon = ncll.variables['lon'][:] - + lon, lat = meshgrid(lon, lat) - + propname = 'elev' - + prop = nc.variables[propname][-1] mask = prop == nc.variables[propname].missing_value prop = ma.masked_where(mask, prop) - + geo_pcolor(lon, lat, prop, kmzfile='bsh.kmz', \ name='BSH model -- sea surface height',\ units='sea surface height [m]') diff --git a/pyroms/pyroms/sandbox/sea_level.py b/pyroms/pyroms/sandbox/sea_level.py index b346b19..e3c385b 100644 --- a/pyroms/pyroms/sandbox/sea_level.py +++ b/pyroms/pyroms/sandbox/sea_level.py @@ -2,7 +2,7 @@ # sample url #http://tidesandcurrents.noaa.gov/data_listing.shtml?bdate=20080323&edate=20080423&datum=6&unit=0&shift=g&stn=8762075&type=Tide%20Data&format=View+Data&listing=1 -import urllib2 +import urllib.request, urllib.error, urllib.parse from datetime import datetime import numpy as np @@ -28,8 +28,8 @@ def __init__(self, station_id, start_date, end_date=None): self.data_dict['stn'] = str(station_id) url = self.root + '&'.join([ '='.join(keyval) for (keyval) in - self.data_dict.iteritems()]) - lines = urllib2.urlopen(url).readlines() + self.data_dict.items()]) + lines = urllib.request.urlopen(url).readlines() pr = False date = [] @@ -54,6 +54,6 @@ def __init__(self, station_id, start_date, end_date=None): sl = sea_level('8762075', 'foo') -print sl.date -print sl.ssh +print(sl.date) +print(sl.ssh) \ No newline at end of file diff --git a/pyroms/pyroms/setup.py b/pyroms/pyroms/setup.py index aab68fc..d0ea53e 100644 --- a/pyroms/pyroms/setup.py +++ b/pyroms/pyroms/setup.py @@ -33,14 +33,14 @@ get_ROMS_hgrid get_ROMS_vgrid get_ROMS_grid - write_ROMS_grid + write_ROMS_grid io - wrapper for netCDF4 Dataset MFDataset cf - CF compliant files tools - time + time tools - Tools specific to the Regional Ocean Modeling System roms2z @@ -55,7 +55,7 @@ latslice section_transport - utility - Some basic tools + utility - Some basic tools get_lonlat get_ij roms_varlist @@ -95,14 +95,12 @@ version = '0.1.0', description = doclines[0], long_description = "\n".join(doclines[2:]), - author = "Frederic Castruccio", - author_email = "frederic@marine.rutgers.edu", - url = "ftp://marine.rutgers.edu/pub/frederic/pyroms/", + url = "https://github.com/ESMG/pyroms", packages = ['pyroms', 'pyroms.remapping', 'pyroms.extern'], license = 'BSD', platforms = ["any"], ext_modules = [iso,interp,remapping], - classifiers = filter(None, classifiers.split("\n")), + classifiers = [_f for _f in classifiers.split("\n") if _f], ) diff --git a/pyroms/pyroms/src/interp.f b/pyroms/pyroms/src/interp.f index b80277a..fbd7c79 100644 --- a/pyroms/pyroms/src/interp.f +++ b/pyroms/pyroms/src/interp.f @@ -3,7 +3,6 @@ ! SUBROUTINE xhslice (f2d,f3d,z,depth,mask,im,jm,km,vintrp,spval) ! -!svn $Id: xhslice.F 159 2008-03-05 18:02:32Z arango $ !================================================== Hernan G. Arango === ! Copyright (c) 2002-2008 The ROMS/TOMS Group ! ! Licensed under a MIT/X style license ! @@ -86,7 +85,9 @@ SUBROUTINE xhslice (f2d,f3d,z,depth,mask,im,jm,km,vintrp,spval) real*8 f3d(km,jm,im), z(km,jm,im) real*8 depth(jm,im), mask(jm,im) real*8 f2d(jm,im) -cf2py intent(out) f2d +!f2py intent(out) f2d +!f2py intent(hide) im +!f2py intent(hide) jm real*8 fk(NK), zk(NK), wk(NK) parameter (der1=c1ep30,derkm=c1ep30) ! @@ -140,7 +141,6 @@ SUBROUTINE xhslice (f2d,f3d,z,depth,mask,im,jm,km,vintrp,spval) ! SUBROUTINE lintrp (n,x,y,ni,xi,yi) ! -!svn $Id: lintrp.F 159 2008-03-05 18:02:32Z arango $ !================================================== Hernan G. Arango === ! Copyright (c) 2002-2008 The ROMS/TOMS Group ! ! Licensed under a MIT/X style license ! @@ -162,6 +162,9 @@ SUBROUTINE lintrp (n,x,y,ni,xi,yi) integer i, ii, j, n, ni real*8 d1, d2 real*8 x(n), y(n), xi(ni), yi(ni) +!f2py intent(out) yi +!f2py intent(hide) n +!f2py intent(hide) ni ! !----------------------------------------------------------------------- ! Begin executable code. @@ -195,7 +198,6 @@ SUBROUTINE lintrp (n,x,y,ni,xi,yi) ! SUBROUTINE spline (x,y,n,yp1,ypn,y2) ! -!svn $Id: spline.F 159 2008-03-05 18:02:32Z arango $ !================================================== Hernan G. Arango === ! Copyright (c) 2002-2008 The ROMS/TOMS Group ! ! Licensed under a MIT/X style license ! @@ -268,6 +270,8 @@ SUBROUTINE spline (x,y,n,yp1,ypn,y2) parameter (nmax=10000) real*8 p, qn, sig, un, ypn, yp1 real*8 x(n), y(n), y2(n), u(nmax) +!f2py intent(out) :: y2 +!f2py intent(hide) :: n ! !----------------------------------------------------------------------- ! Begin excutable code. @@ -329,7 +333,6 @@ SUBROUTINE spline (x,y,n,yp1,ypn,y2) ! SUBROUTINE splint (x,y,y2,n,xx,yy,dydx) ! -!svn $Id: splint.F 159 2008-03-05 18:02:32Z arango $ !================================================== Hernan G. Arango === ! Copyright (c) 2002-2008 The ROMS/TOMS Group ! ! Licensed under a MIT/X style license ! @@ -400,6 +403,8 @@ SUBROUTINE splint (x,y,y2,n,xx,yy,dydx) integer k, khi, klo, n real*8 a, b, c, d, dydx, e, f, h, xx, yy real*8 x(n), y(n), y2(n) +!f2py intent(out) :: yy +!f2py intent(hide) :: n ! !----------------------------------------------------------------------- ! Begin executable code. @@ -454,7 +459,9 @@ SUBROUTINE get_bottom (bottom,var,mask,im,jm,km,spval) real*8 spval, bot real*8 var(km,jm,im) real*8 mask(jm,im), bottom(jm,im) -cf2py intent(out) bottom +!f2py intent(out) bottom +!f2py intent(hide) :: jm +!f2py intent(hide) :: im DO j=1,jm DO i=1,im @@ -490,7 +497,9 @@ SUBROUTINE get_surface (surface,var,mask,im,jm,km,spval) real*8 spval, surf real*8 var(km,jm,im) real*8 mask(jm,im), surface(jm,im) -cf2py intent(out) surface +!f2py intent(out) surface +!f2py intent(hide) :: jm +!f2py intent(hide) :: im DO j=1,jm DO i=1,im diff --git a/pyroms/pyroms/src/iso.f b/pyroms/pyroms/src/iso.f index 4a045d5..a5a515f 100644 --- a/pyroms/pyroms/src/iso.f +++ b/pyroms/pyroms/src/iso.f @@ -1,8 +1,8 @@ -ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc -c SUBROUTINE INTEGRATE -ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc +!cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc +! SUBROUTINE INTEGRATE +!cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc subroutine integrate(z_w,q,z_iso,iqu,iql,L,M,N) - + implicit none integer L, M, N real*8 z_w(N+1,M,L) @@ -10,12 +10,15 @@ subroutine integrate(z_w,q,z_iso,iqu,iql,L,M,N) real*8 q(N,M,L) real*8 iqu(M,L) real*8 iql(M,L) -cf2py intent(out) iqu -cf2py intent(out) iql +!f2py intent(out) iqu +!f2py intent(out) iql +!f2py intent(hide) :: L +!f2py intent(hide) :: M +!f2py intent(hide) :: N integer i, j, k real*8 dz(N) real*8 dzp - + do i=1,L do j=1,M do k=1,N @@ -37,27 +40,30 @@ subroutine integrate(z_w,q,z_iso,iqu,iql,L,M,N) enddo enddo enddo - + return end -ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc -c SUBROUTINE SURFACE -ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc +!cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc +! SUBROUTINE SURFACE +!cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc subroutine surface(z, q, q0, z_iso, L, M, N) -c Assume q is sorted - +! Assume q is sorted + implicit none integer L, M, N real*8 z(N,M,L) real*8 q(N,M,L) real*8 q0(M,L) real*8 z_iso(M,L) -cf2py intent(out) z_iso +!f2py intent(out) z_iso +!f2py intent(hide) :: L +!f2py intent(hide) :: M +!f2py intent(hide) :: N integer i, j, k real*8 dz, dq, dq0 - + do i=1,L do j=1,M z_iso(j,i)=1.0d20 ! default value - isoline not in profile @@ -72,26 +78,29 @@ subroutine surface(z, q, q0, z_iso, L, M, N) enddo enddo enddo - + return end -cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc -c SUBROUTINE ZSLICE -cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc +!ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc +! SUBROUTINE ZSLICE +!ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc subroutine zslice (z,f3d,depths,f2d,vinterp,L,M,N) - + implicit none integer L, M, N, vinterp real*8 depths(M,L) real*8 f3d(N,M,L), z(N,M,L) real*8 f2d(M,L) -cf2py intent(out) f2d +!f2py intent(out) f2d +!f2py intent(hide) :: L +!f2py intent(hide) :: M +!f2py intent(hide) :: N integer i, j, k real*8 zk(N), fk(N) real*8 wk(N), dfdz(N) - -c Linear Interpolation. + +! Linear Interpolation. if (vinterp.eq.0) then do i=1,L do j=1,M @@ -106,7 +115,7 @@ subroutine zslice (z,f3d,depths,f2d,vinterp,L,M,N) endif enddo enddo -c Cubic spline interpolation. +! Cubic spline interpolation. else if (vinterp.eq.1) then do i=1,L do j=1,M @@ -123,39 +132,42 @@ subroutine zslice (z,f3d,depths,f2d,vinterp,L,M,N) enddo enddo endif - + return end - - - - + + + + subroutine lintrp (n,x,y,ni,xi,yi) -c -c======================================================================= -c Copyright (c) 1996 Rutgers University === -c======================================================================= -c === -c Given arrays X and Y of length N, which tabulate a function, === -c Y = F(X), with the Xs in ascending order, and given array === -c XI of lenght NI, this routine returns a linear interpolated === -c array YI. === -c === -c======================================================================= -c -c----------------------------------------------------------------------- -c Define local variable. -c----------------------------------------------------------------------- -c +! +!======================================================================= +! Copyright (c) 1996 Rutgers University === +!======================================================================= +! === +! Given arrays X and Y of length N, which tabulate a function, === +! Y = F(X), with the Xs in ascending order, and given array === +! XI of lenght NI, this routine returns a linear interpolated === +! array YI. === +! === +!======================================================================= +! +!----------------------------------------------------------------------- +! Define local variable. +!----------------------------------------------------------------------- +! implicit none integer i, ii, j, n, ni real*8 d1, d2 real*8 x(n), y(n), xi(ni), yi(ni) -c -c----------------------------------------------------------------------- -c Begin executable code. -c----------------------------------------------------------------------- -c +!f2py intent(out) yi +!f2py intent(hide) n +!f2py intent(hide) ni +! +!----------------------------------------------------------------------- +! Begin executable code. +!----------------------------------------------------------------------- +! do 30 j=1,ni if (xi(j).le.x(1)) then ii=1 @@ -179,41 +191,41 @@ subroutine lintrp (n,x,y,ni,xi,yi) subroutine spline (x,y,n,yp1,ypn,y2) -c -c======================================================================= -c Copyright (c) 1996 Rutgers University === -c======================================================================= -c === -c Given X, Y of length N containing a tabulated function, Y=f(X), === -c with the Xs in ascending order, and given values Yp1 and Ypn === -c for the first derivative of the interpolating function at points === -c 1 and N, respectively this routine returns an array Y2 of length === -c N which contains the second derivatives of the interpolating === -c function at the tabulated points X. If Yp1 and/or Ypn are equal === -c to 1.0E+30 or larger, the routine is signalled to set the === -c corresponding boundary condition for a natural spline, with zero === -c second derivative on that boundary. === -c === -c Reference : === -c === -c Press, W.H, B.P. Flannery, S.A. Teukolsky, and W.T. Vetterling, === -c 1986: Numerical Recipes, the art of scientific computing. === -c Cambridge University Press. === -c === -c======================================================================= -c -c----------------------------------------------------------------------- -c Define global data. -c----------------------------------------------------------------------- -c - real*8 cm1,cm3,c0,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c20,c25,c50, +! +!======================================================================= +! Copyright (c) 1996 Rutgers University === +!======================================================================= +! === +! Given X, Y of length N containing a tabulated function, Y=f(X), === +! with the Xs in ascending order, and given values Yp1 and Ypn === +! for the first derivative of the interpolating function at points === +! 1 and N, respectively this routine returns an array Y2 of length === +! N which contains the second derivatives of the interpolating === +! function at the tabulated points X. If Yp1 and/or Ypn are equal === +! to 1.0E+30 or larger, the routine is signalled to set the === +! corresponding boundary condition for a natural spline, with zero === +! second derivative on that boundary. === +! === +! Reference : === +! === +! Press, W.H, B.P. Flannery, S.A. Teukolsky, and W.T. Vetterling, === +! 1986: Numerical Recipes, the art of scientific computing. === +! Cambridge University Press. === +! === +!======================================================================= +! +!----------------------------------------------------------------------- +! Define global data. +!----------------------------------------------------------------------- +! + real*8 cm1,cm3,c0,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c20,c25,c50, & c90,c100,c180,c200,c255,c300,c360,c366,c500,c1000,c5000, & c10000,c1em9,c1em10,c1em12,c1em20,c1ep30,p006,p009,p035, & p015,p012,p08,p06,p5,p25,p75,p98,r3,r10,r20,r33,r35,r40, & r50,r80,r100,r200,r250,r400,r1000 real*8 day2sec,deg2rad,grav,cm2m,m2cm,m2km,pi,rad2deg,re,root2, & sec2day,spval0,spval1,spval2,spvgeo -c +! parameter (cm1=-1.0,cm3=-3.0,c0=0.0,c1=1.0,c2=2.0,c3=3.0,c4=4.0, & c5=5.0,c6=6.0,c7=7.0,c8=8.0,c9=9.0,c10=10,c11=11.0, & c20=20.0,c25=25.0,c50=50.0,c90=90.0,c100=100.0, @@ -233,30 +245,32 @@ subroutine spline (x,y,n,yp1,ypn,y2) & spval2=0.99e+30) parameter (deg2rad=pi/c180,rad2deg=c180/pi) -c -c----------------------------------------------------------------------- -c Define local data. Change NMAX as desired to be the largest -c anticipated value of N. -c----------------------------------------------------------------------- -c +! +!----------------------------------------------------------------------- +! Define local data. Change NMAX as desired to be the largest +! anticipated value of N. +!----------------------------------------------------------------------- +! integer i, k, n, nmax parameter (nmax=10000) real*8 p, qn, sig, un, ypn, yp1 real*8 x(n), y(n), y2(n), u(nmax) -c -c----------------------------------------------------------------------- -c Begin excutable code. -c----------------------------------------------------------------------- -c +!f2py intent(out) :: y2 +!f2py intent(hide) :: n +! +!----------------------------------------------------------------------- +! Begin excutable code. +!----------------------------------------------------------------------- +! if (n.gt.nmax) then print 10, n,nmax 10 format(/' SPLINE: underdimensioned array, N, NMAX = ',2i5) call crash ('SPLINE',1) endif -c -c The lower boundary condition is set either to be "natural" or else -c to have a specified first derivative. -c +! +! The lower boundary condition is set either to be "natural" or else +! to have a specified first derivative. +! if (yp1.gt.spval2) then y2(1)=c0 u(1)=c0 @@ -264,10 +278,10 @@ subroutine spline (x,y,n,yp1,ypn,y2) y2(1)=-p5 u(1)=(c3/(x(2)-x(1)))*((y(2)-y(1))/(x(2)-x(1))-yp1) endif -c -c This is the decomposition loop of the tridiagonal algorithm. Y2 and -c U are used for temporary storage of the decomposition factors. -c +! +! This is the decomposition loop of the tridiagonal algorithm. Y2 and +! U are used for temporary storage of the decomposition factors. +! do i=2,n-1 sig=(x(i)-x(i-1))/(x(i+1)-x(i-1)) p=sig*y2(i-1)+c2 @@ -276,10 +290,10 @@ subroutine spline (x,y,n,yp1,ypn,y2) & (y(i)-y(i-1))/(x(i)-x(i-1)))/ & (x(i+1)-x(i-1))-sig*u(i-1))/p enddo -c -c The upper boundary condition is set either to be "natural" or else -c to have a specified first derivative. -c +! +! The upper boundary condition is set either to be "natural" or else +! to have a specified first derivative. +! if (ypn.gt.spval2) then qn=c0 un=c0 @@ -288,9 +302,9 @@ subroutine spline (x,y,n,yp1,ypn,y2) un=(c3/(x(n)-x(n-1)))*(ypn-(y(n)-y(n-1))/(x(n)-x(n-1))) endif y2(n)=(un-qn*u(n-1))/(qn*y2(n-1)+c1) -c -c This is the back-substitution loop of the tridiagonal algorithm. -c +! +! This is the back-substitution loop of the tridiagonal algorithm. +! do k=n-1,1,-1 y2(k)=y2(k)*y2(k+1)+u(k) enddo @@ -298,33 +312,33 @@ subroutine spline (x,y,n,yp1,ypn,y2) end subroutine splint (x,y,y2,n,xx,yy,dydx) -c -c======================================================================= -c Copyright (c) 1996 Rutgers University === -c======================================================================= -c === -c Given arrays X and Y of length N, which tabulate a function, === -c Y=f(X), with the Xs in ascending order, and given the array === -c Y2 which contains the second derivative of the interpolating === -c function at the tabulated points X as computed by routine === -c SPLINE, and given a value XX, this routine returns a cubic- === -c spline interpolated value YY. === -c === -c Reference : === -c === -c Press, W.H, B.P. Flannery, S.A. Teukolsky, and W.T. Vetterling, === -c 1986: Numerical Recipes, the art of scientific computing. === -c Cambridge University Press. === -c === -c Modified by H.G. Arango (1989) to output the first derivative === -c DYDX at a given value XX. === -c === -c======================================================================= -c -c----------------------------------------------------------------------- -c Define global data. -c----------------------------------------------------------------------- -c +! +!======================================================================= +! Copyright (c) 1996 Rutgers University === +!======================================================================= +! === +! Given arrays X and Y of length N, which tabulate a function, === +! Y=f(X), with the Xs in ascending order, and given the array === +! Y2 which contains the second derivative of the interpolating === +! function at the tabulated points X as computed by routine === +! SPLINE, and given a value XX, this routine returns a cubic- === +! spline interpolated value YY. === +! === +! Reference : === +! === +! Press, W.H, B.P. Flannery, S.A. Teukolsky, and W.T. Vetterling, === +! 1986: Numerical Recipes, the art of scientific computing. === +! Cambridge University Press. === +! === +! Modified by H.G. Arango (1989) to output the first derivative === +! DYDX at a given value XX. === +! === +!======================================================================= +! +!----------------------------------------------------------------------- +! Define global data. +!----------------------------------------------------------------------- +! implicit none real*8 cm1,cm3,c0,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c20,c25,c50, & c90,c100,c180,c200,c255,c300,c360,c366,c500,c1000,c5000, @@ -333,7 +347,7 @@ subroutine splint (x,y,y2,n,xx,yy,dydx) & r50,r80,r100,r200,r250,r400,r1000 real*8 day2sec,deg2rad,grav,cm2m,m2cm,m2km,pi,rad2deg,re,root2, & sec2day,spval0,spval1,spval2,spvgeo -c +! parameter (cm1=-1.0,cm3=-3.0,c0=0.0,c1=1.0,c2=2.0,c3=3.0,c4=4.0, & c5=5.0,c6=6.0,c7=7.0,c8=8.0,c9=9.0,c10=10,c11=11.0, & c20=20.0,c25=25.0,c50=50.0,c90=90.0,c100=100.0, @@ -353,21 +367,23 @@ subroutine splint (x,y,y2,n,xx,yy,dydx) & spval2=0.99e+30) parameter (deg2rad=pi/c180,rad2deg=c180/pi) -c -c----------------------------------------------------------------------- -c Define local data. -c----------------------------------------------------------------------- -c +! +!----------------------------------------------------------------------- +! Define local data. +!----------------------------------------------------------------------- +! integer k, khi, klo, n real*8 a, b, c, d, dydx, e, f, h, xx, yy real*8 x(n), y(n), y2(n) -c -c----------------------------------------------------------------------- -c Begin executable code. -c----------------------------------------------------------------------- -c -c Found the right place of XX in the table by means of bisection. -c +!f2py intent(out) :: yy +!f2py intent(hide) :: n +! +!----------------------------------------------------------------------- +! Begin executable code. +!----------------------------------------------------------------------- +! +! Found the right place of XX in the table by means of bisection. +! klo=1 khi=n 10 if ((khi-klo).gt.1) then @@ -379,17 +395,17 @@ subroutine splint (x,y,y2,n,xx,yy,dydx) endif goto 10 endif -c -c KLO and KHI now bracket the input value XX. -c +! +! KLO and KHI now bracket the input value XX. +! h=x(khi)-x(klo) if (h.eq.c0) then print *, ' SPLINT: bad X input, they must be distinct.' call crash ('SPLINT',1) endif -c -c Evaluate cubic spline polynomial. -c +! +! Evaluate cubic spline polynomial. +! a=(x(khi)-xx)/h b=(xx-x(klo))/h c=(a*a*a-a)*(h*h)/c6 @@ -400,7 +416,7 @@ subroutine splint (x,y,y2,n,xx,yy,dydx) dydx=(y(khi)-y(klo))/h-e*y2(klo)+f*y2(khi) return end - + subroutine crash (string,ierr) integer ierr character*(*) string diff --git a/pyroms/pyroms/sta_grid.py b/pyroms/pyroms/sta_grid.py index 7a0e6dd..327ea7c 100644 --- a/pyroms/pyroms/sta_grid.py +++ b/pyroms/pyroms/sta_grid.py @@ -47,20 +47,20 @@ def get_Stations_hgrid(gridid, sta_file): #Get horizontal grid if ((spherical == 0) or (spherical == 'F')): #cartesian grid - print 'Load cartesian grid from file' - if 'x_rho' in nc.variables.keys() and 'y_rho' in nc.variables.keys(): + print('Load cartesian grid from file') + if 'x_rho' in list(nc.variables.keys()) and 'y_rho' in list(nc.variables.keys()): x_rho = nc.variables['x_rho'][:] y_rho = nc.variables['y_rho'][:] try: angle = nc.variables['angle'][:] except: angle = np.zeros(x_rho.shape) else: - raise ValueError, 'NetCDF file must contain x_rho and y_rho \ - and possibly angle for a cartesian grid' + raise ValueError('NetCDF file must contain x_rho and y_rho \ + and possibly angle for a cartesian grid') x_rho = nc.variables['x_rho'][:] y_rho = nc.variables['y_rho'][:] - if 'angle' in nc.variables.keys(): + if 'angle' in list(nc.variables.keys()): angle = nc.variables['angle'][:] else: angle = None @@ -70,19 +70,19 @@ def get_Stations_hgrid(gridid, sta_file): else: #geographical grid - print 'Load geographical grid from file' + print('Load geographical grid from file') proj = Basemap(projection='merc', resolution=None, lat_0=0, lon_0=0) - if 'lon_rho' in nc.variables.keys() and 'lat_rho' in nc.variables.keys(): + if 'lon_rho' in list(nc.variables.keys()) and 'lat_rho' in list(nc.variables.keys()): lon_rho = nc.variables['lon_rho'][:] lat_rho = nc.variables['lat_rho'][:] else: - raise ValueError, 'NetCDF file must contain lon_rho and lat_rho \ - for a geographical grid' + raise ValueError('NetCDF file must contain lon_rho and lat_rho \ + for a geographical grid') lon_rho = nc.variables['lon_rho'][:] lat_rho = nc.variables['lat_rho'][:] - if 'angle' in nc.variables.keys(): + if 'angle' in list(nc.variables.keys()): angle = nc.variables['angle'][:] else: angle = None @@ -156,7 +156,7 @@ def write_nc_var(var, name, dimensions, long_name=None, units=None): if units is not None: nc.variables[name].units = units nc.variables[name][:] = var - print ' ... wrote ', name + print(' ... wrote ', name) if hasattr(grd.vgrid, 's_rho') is True and grd.vgrid.s_rho is not None: write_nc_var(grd.vgrid.theta_s, 'theta_s', (), 'S-coordinate surface control parameter') @@ -179,7 +179,7 @@ def write_nc_var(var, name, dimensions, long_name=None, units=None): nc.createVariable('spherical', 'c') nc.variables['spherical'].long_name = 'Grid type logical switch' nc.variables['spherical'][:] = grd.hgrid.spherical - print ' ... wrote ', 'spherical' + print(' ... wrote ', 'spherical') write_nc_var(grd.hgrid.angle_rho, 'angle', ('station'), 'angle between XI-axis and EAST', 'radians') diff --git a/pyroms/pyroms/sta_hgrid.py b/pyroms/pyroms/sta_hgrid.py index 8e84d83..b8e1b3a 100644 --- a/pyroms/pyroms/sta_hgrid.py +++ b/pyroms/pyroms/sta_hgrid.py @@ -5,7 +5,7 @@ import os import sys import ctypes -import cPickle +import pickle from warnings import warn from copy import deepcopy @@ -18,38 +18,38 @@ class Sta_CGrid(object): """ Stations Grid - + EXAMPLES: -------- - + >>> x = arange(8) >>> y = arange(8)*2-1 >>> grd = pyroms.grid.StaGrid(x, y) >>> print grd.x_rho [4.5 4.5 4.5 4.5 4.5 4.5 4.5] """ - + def __init__(self, x_rho, y_rho, angle_rho=None): - + assert np.ndim(x_rho)==1 and np.ndim(y_rho)==1 and \ - np.shape(x_rho)==np.shape(y_rho), \ + np.shape(x_rho)==np.shape(y_rho), \ 'x and y must be 2D arrays of the same size.' - + if np.any(np.isnan(x_rho)) or np.any(np.isnan(y_rho)): x_rho = np.ma.masked_where( (isnan(x_rho)) | (isnan(y_rho)) , x_rho) y_rho = np.ma.masked_where( (isnan(x_rho)) | (isnan(y_rho)) , y_rho) - + self.x_rho = x_rho self.y_rho = y_rho self.spherical = 'F' - + if angle_rho is None: self.angle_rho = np.zeros(len(self.y_rho)) else: self.angle_rho = angle_rho - + x = property(lambda self: self.x_rho, None, None, 'Return x_rho') y = property(lambda self: self.y_rho, None, None, 'Return y_rho') @@ -57,11 +57,11 @@ def __init__(self, x_rho, y_rho, angle_rho=None): class Sta_CGrid_geo(Sta_CGrid): """ Curvilinear Arakawa C-grid defined in geographic coordinates - + For a geographic grid, a projection may be specified, or The default projection for will be defined by the matplotlib.toolkits.Basemap projection: - + proj = Basemap(projection='merc', resolution=None, lat_ts=0.0) """ diff --git a/pyroms/pyroms/tools.py b/pyroms/pyroms/tools.py index a468015..69153f8 100644 --- a/pyroms/pyroms/tools.py +++ b/pyroms/pyroms/tools.py @@ -13,7 +13,7 @@ def zslice(var, depth, grd, Cpos='rho', vert=False, mode='linear'): optional switch: - Cpos='rho', 'u', 'v' or 'w' specify the C-grid position where - the variable rely + the variable rely - vert=True/False If True, return the position of the verticies - mode='linear' or 'spline' specify the type of interpolation @@ -30,7 +30,7 @@ def zslice(var, depth, grd, Cpos='rho', vert=False, mode='linear'): imode=1 else: imode=0 - raise Warning, '%s not supported, defaulting to linear' % mode + raise Warning('%s not supported, defaulting to linear' % mode) # compute the depth on Arakawa-C grid position @@ -111,8 +111,8 @@ def zslice(var, depth, grd, Cpos='rho', vert=False, mode='linear'): mask = grd.hgrid.mask_rho[:] else: - raise Warning, '%s bad position. Valid Arakawa-C are \ - rho, u or v.' % Cpos + raise Warning('%s bad position. Valid Arakawa-C are \ + rho, u or v.' % Cpos) assert len(z.shape) == 3, 'z must be 3D' assert len(var.shape) == 3, 'var must be 3D' @@ -136,8 +136,8 @@ def sslice(var, sindex, grd, Cpos='rho', vert=False): sslice, lon, lat = sslice(var, sindex, grd) optional switch: - - Cpos='rho', 'u' or 'v' specify the C-grid position where - the variable rely + - Cpos='rho', 'u' or 'v' specify the C-grid position where + the variable rely - vert=True/False If True, return the position of the verticies - mode='linear' or 'spline' specify the type of interpolation @@ -226,8 +226,8 @@ def sslice(var, sindex, grd, Cpos='rho', vert=False): mask = grd.hgrid.mask_rho[:] else: - raise Warning, '%s bad position. Valid Arakawa-C are \ - rho, u or v.' % Cpos + raise Warning('%s bad position. Valid Arakawa-C are \ + rho, u or v.' % Cpos) assert len(var.shape) == 3, 'var must be 3D' @@ -246,7 +246,7 @@ def islice(var, iindex, grd, Cpos='rho', vert=False): optional switch: - Cpos='rho', 'u', 'v' or 'w' specify the C-grid position where - the variable rely + the variable rely - vert=True/False If True, return the position of the verticies @@ -352,8 +352,8 @@ def islice(var, iindex, grd, Cpos='rho', vert=False): mask = grd.hgrid.mask_rho[:] else: - raise Warning, '%s bad position. Valid Arakawa-C are \ - rho, u or v.' % Cpos + raise Warning('%s bad position. Valid Arakawa-C are \ + rho, u or v.' % Cpos) # get constant-i slice vari = var[:,:,iindex] @@ -374,7 +374,7 @@ def jslice(var, jindex, grd, Cpos='rho', vert=False): optional switch: - Cpos='rho', 'u', 'v' or 'w' specify the C-grid position where - the variable rely + the variable rely - vert=True/False If True, return the position of the verticies @@ -479,8 +479,8 @@ def jslice(var, jindex, grd, Cpos='rho', vert=False): mask = grd.hgrid.mask_rho[:] else: - raise Warning, '%s bad position. Valid Arakawa-C are \ - rho, u or v.' % Cpos + raise Warning('%s bad position. Valid Arakawa-C are \ + rho, u or v.' % Cpos) # get constant-j slice varj = var[:,jindex,:] @@ -501,8 +501,8 @@ def isoslice(var,prop,isoval, grd, Cpos='rho', masking=True, vert=False): isoslice, lon, lat = isoslice(variable,property, isoval, grd) optional switch: - - Cpos='rho', 'u' or 'v' specify the C-grid position where - the variable rely + - Cpos='rho', 'u' or 'v' specify the C-grid position where + the variable rely - masking=True mask the output if True - vert=True/False If True, return the position of the verticies @@ -521,9 +521,9 @@ def isoslice(var,prop,isoval, grd, Cpos='rho', masking=True, vert=False): h_at_s30 = isoslice(z,s,30); # z at s == 30 """ if (len(var.squeeze().shape)<=2): - raise ValueError, 'variable must have at least two dimensions' + raise ValueError('variable must have at least two dimensions') if not prop.shape == var.shape: - raise ValueError, 'dimension of var and prop must be identical' + raise ValueError('dimension of var and prop must be identical') # compute the depth on Arakawa-C grid position @@ -603,8 +603,8 @@ def isoslice(var,prop,isoval, grd, Cpos='rho', masking=True, vert=False): mask = grd.hgrid.mask_rho[:] else: - raise Warning, '%s bad position. Valid Arakawa-C are \ - rho, u or v.' % Cpos + raise Warning('%s bad position. Valid Arakawa-C are \ + rho, u or v.' % Cpos) prop = prop-isoval sz = np.shape(var) @@ -624,8 +624,8 @@ def isoslice(var,prop,isoval, grd, Cpos='rho', masking=True, vert=False): if masking: isoslice = np.ma.masked_where(zc.sum(axis=0)==0, isoslice) if all(isoslice.mask): - raise Warning, 'property==%f out of range (%f, %f)' % \ - (isoval, (prop+isoval).min(), (prop+isoval).max()) + raise Warning('property==%f out of range (%f, %f)' % \ + (isoval, (prop+isoval).min(), (prop+isoval).max())) isoslice = isoslice.reshape(sz[1:]) # mask land @@ -641,8 +641,8 @@ def transect(var, istart, iend, jstart, jend, grd, Cpos='rho', vert=False, \ transect, z, lon, lat = transect(var, istart, iend, jstart, jend, grd) optional switch: - - Cpos='rho', 'u' or 'v' specify the C-grid position where - the variable rely + - Cpos='rho', 'u' or 'v' specify the C-grid position where + the variable rely - vert=True/False If True, return the position of the verticies - spval special value @@ -727,8 +727,8 @@ def transect(var, istart, iend, jstart, jend, grd, Cpos='rho', vert=False, \ mask = grd.hgrid.mask_rho[:] else: - raise Warning, '%s bad position. Valid Arakawa-C are \ - rho, u or v.' % Cpos + raise Warning('%s bad position. Valid Arakawa-C are \ + rho, u or v.' % Cpos) # Find the nearest point between P1 (imin,jmin) and P2 (imax, jmax) @@ -758,7 +758,7 @@ def transect(var, istart, iend, jstart, jend, grd, Cpos='rho', vert=False, \ # Chose the strait line with the smallest slope if (abs(aj) <= 1 ): # Here, the best line is y(x) - print 'Here, the best line is y(x)' + print('Here, the best line is y(x)') # If i1 < i0 swap points and remember it has been swapped if (i1 < i0 ): i = i0 ; j = j0 @@ -793,7 +793,7 @@ def transect(var, istart, iend, jstart, jend, grd, Cpos='rho', vert=False, \ else: # Here, the best line is x(y) - print 'Here, the best line is x(y)' + print('Here, the best line is x(y)') # If j1 < j0 swap points and remember it has been swapped if (j1 < j0 ): i = i0 ; j = j0 @@ -905,8 +905,8 @@ def lonslice(var, longitude, grd, Cpos='rho', vert=False, spval=1e37): lonslice, z, lon, lat = lonslice(var, longitude, grd) optional switch: - - Cpos='rho', 'u' or 'v' specify the C-grid position where - the variable rely + - Cpos='rho', 'u' or 'v' specify the C-grid position where + the variable rely - vert=True/False If True, return the position of the verticies - spval special value @@ -930,17 +930,17 @@ def lonslice(var, longitude, grd, Cpos='rho', vert=False, spval=1e37): lon = grd.hgrid.lon_rho lat = grd.hgrid.lat_rho else: - raise Warning, '%s bad position. Valid Arakawa-C are \ - rho, u or v.' % Cpos + raise Warning('%s bad position. Valid Arakawa-C are \ + rho, u or v.' % Cpos) edge = np.concatenate((lon[1,1:-1], \ lon[1:-1,-2], \ lon[-2,-2:0:-1], \ lon[-2:0:-1,1])) - idx = np.concatenate((range(1,lon[0,:].shape[0]-1), \ - range(1,lon[:,-1].shape[0]-1), \ - range(1,lon[-1,::-1].shape[0]-1)[::-1], \ - range(1,lon[::-1,0].shape[0]-1)[::-1])) + idx = np.concatenate((list(range(1,lon[0,:].shape[0]-1)), \ + list(range(1,lon[:,-1].shape[0]-1)), \ + list(range(1,lon[-1,::-1].shape[0]-1))[::-1], \ + list(range(1,lon[::-1,0].shape[0]-1))[::-1])) d = np.zeros(edge.shape) for i in range (edge.shape[0]): @@ -953,7 +953,7 @@ def lonslice(var, longitude, grd, Cpos='rho', vert=False, spval=1e37): Mp, Lp = lon.shape if len(pt_idx) != 2: - raise ValueError, 'this function only works for simple quadrangle' + raise ValueError('this function only works for simple quadrangle') # determine is latitude ligne is crossing a i or j edge side = np.zeros(2) @@ -1008,8 +1008,8 @@ def latslice(var, latitude, grd, Cpos='rho', vert=False, spval=1e37): latslice, z, lon, lat = latslice(var, latitude, grd) optional switch: - - Cpos='rho', 'u' or 'v' specify the C-grid position where - the variable rely + - Cpos='rho', 'u' or 'v' specify the C-grid position where + the variable rely - vert=True/False If True, return the position of the verticies - spval special value @@ -1033,17 +1033,17 @@ def latslice(var, latitude, grd, Cpos='rho', vert=False, spval=1e37): lon = grd.hgrid.lon_rho lat = grd.hgrid.lat_rho else: - raise Warning, '%s bad position. Valid Arakawa-C are \ - rho, u or v.' % Cpos + raise Warning('%s bad position. Valid Arakawa-C are \ + rho, u or v.' % Cpos) edge = np.concatenate((lat[1,1:-1], \ lat[1:-1,-2], \ lat[-2,-2:0:-1], \ lat[-2:0:-1,1])) - idx = np.concatenate((range(1,lat[0,:].shape[0]-1), \ - range(1,lat[:,-1].shape[0]-1), \ - range(1,lat[-1,::-1].shape[0]-1)[::-1], \ - range(1,lat[::-1,0].shape[0]-1)[::-1])) + idx = np.concatenate((list(range(1,lat[0,:].shape[0]-1)), \ + list(range(1,lat[:,-1].shape[0]-1)), \ + list(range(1,lat[-1,::-1].shape[0]-1))[::-1], \ + list(range(1,lat[::-1,0].shape[0]-1))[::-1])) d = np.zeros(edge.shape) for i in range (edge.shape[0]): @@ -1056,7 +1056,7 @@ def latslice(var, latitude, grd, Cpos='rho', vert=False, spval=1e37): Mp, Lp = lon.shape if len(pt_idx) != 2: - raise ValueError, 'this function only works for simple quadrangle' + raise ValueError('this function only works for simple quadrangle') # determine is latitude ligne is crossing a i or j edge side = np.zeros(2) @@ -1169,8 +1169,8 @@ def zlayer(var, grd, h1=None, h2=None, Cpos='rho', vert=False): mask = grd.hgrid.mask_rho[:] else: - raise Warning, '%s bad position. Valid Arakawa-C are \ - rho, u or v.' % Cpos + raise Warning('%s bad position. Valid Arakawa-C are \ + rho, u or v.' % Cpos) @@ -1247,7 +1247,7 @@ def section_transport(u, v, grd, istart, iend, jstart, jend): # Chose the strait line with the smallest slope if (abs(aj) <= 1 ): # Here, the best line is y(x) - print 'Here, the best line is y(x)' + print('Here, the best line is y(x)') # If i1 < i0 swap points and remember it has been swapped if i1 < i0: i = i0 ; j = j0 @@ -1272,7 +1272,7 @@ def section_transport(u, v, grd, istart, iend, jstart, jend): else: # Here, the best line is x(y) - print 'Here, the best line is x(y)' + print('Here, the best line is x(y)') # If j1 < j0 swap points and remember it has been swapped if j1 < j0: i = i0 ; j = j0 @@ -1308,9 +1308,9 @@ def section_transport(u, v, grd, istart, iend, jstart, jend): # distance between 2 neighbour points d = abs(inear[k] - inear[k-1]) - if ( d > 1 ): + if ( d > 1 ): # intermediate points required if d>1 - neari = interm_pt(inear, k, ai, bi, aj, bj) + neari = interm_pt(inear, k, ai, bi, aj, bj) near.insert(nn,neari) nn=nn+1 @@ -1395,7 +1395,7 @@ def section_transport_z(u, v, grd, istart, iend, jstart, jend, h1=None, h2=None) # Chose the strait line with the smallest slope if (abs(aj) <= 1 ): # Here, the best line is y(x) - print 'Here, the best line is y(x)' + print('Here, the best line is y(x)') # If i1 < i0 swap points and remember it has been swapped if i1 < i0: i = i0 ; j = j0 @@ -1420,7 +1420,7 @@ def section_transport_z(u, v, grd, istart, iend, jstart, jend, h1=None, h2=None) else: # Here, the best line is x(y) - print 'Here, the best line is x(y)' + print('Here, the best line is x(y)') # If j1 < j0 swap points and remember it has been swapped if j1 < j0: i = i0 ; j = j0 @@ -1456,9 +1456,9 @@ def section_transport_z(u, v, grd, istart, iend, jstart, jend, h1=None, h2=None) # distance between 2 neighbour points d = abs(inear[k] - inear[k-1]) - if ( d > 1 ): + if ( d > 1 ): # intermediate points required if d>1 - neari = interm_pt(inear, k, ai, bi, aj, bj) + neari = interm_pt(inear, k, ai, bi, aj, bj) near.insert(nn,neari) nn=nn+1 @@ -1573,7 +1573,7 @@ def section_tracer_transport_z(u, v, tracer, grd, istart, iend, jstart, jend, h1 # Chose the strait line with the smallest slope if (abs(aj) <= 1 ): # Here, the best line is y(x) - print 'Here, the best line is y(x)' + print('Here, the best line is y(x)') # If i1 < i0 swap points and remember it has been swapped if i1 < i0: i = i0 ; j = j0 @@ -1598,7 +1598,7 @@ def section_tracer_transport_z(u, v, tracer, grd, istart, iend, jstart, jend, h1 else: # Here, the best line is x(y) - print 'Here, the best line is x(y)' + print('Here, the best line is x(y)') # If j1 < j0 swap points and remember it has been swapped if j1 < j0: i = i0 ; j = j0 @@ -1634,9 +1634,9 @@ def section_tracer_transport_z(u, v, tracer, grd, istart, iend, jstart, jend, h1 # distance between 2 neighbour points d = abs(inear[k] - inear[k-1]) - if ( d > 1 ): + if ( d > 1 ): # intermediate points required if d>1 - neari = interm_pt(inear, k, ai, bi, aj, bj) + neari = interm_pt(inear, k, ai, bi, aj, bj) near.insert(nn,neari) nn=nn+1 @@ -1724,88 +1724,88 @@ def section_tracer_transport_z(u, v, tracer, grd, istart, iend, jstart, jend, h1 def interm_pt(pnear, pk, pai, pbi, paj, pbj): ### FIND THE BEST INTERMEDIATE POINT ON A PATHWAY - # ----------------------------- - # pnear : vector of the position of the nearest point - # pk : current working index - # pai, pbi: slope and original ordinate of x(y) - # paj, pbj: slope and original ordinate of y(x) - # pneari : vector holding the position of intermediate point - # ----------------------------- + # ----------------------------- + # pnear : vector of the position of the nearest point + # pk : current working index + # pai, pbi: slope and original ordinate of x(y) + # paj, pbj: slope and original ordinate of y(x) + # pneari : vector holding the position of intermediate point + # ----------------------------- # 1 - Compute intermediate point # Determine whether we use y(x) or x(y): if (abs(paj) <= 1): # y(x) - # possible intermediate point - ylptmp1 = pnear[pk-1] + 1 - ylptmp2 = pnear[pk-1] + (paj/abs(paj))*1j - # M is the candidate point: - zxm = np.real(ylptmp1) - zym = np.imag(ylptmp1) - za0 = paj - zb0 = pbj - # - za1 = -1./za0 - zb1 = zym-za1*zxm - # P is the projection of M in the strait line - zxp = -(zb1-zb0)/(za1-za0) - zyp = za0*zxp+zb0 - # zd1 is the distance MP - zd1 = (zxm-zxp) * (zxm-zxp) + (zym-zyp) * (zym-zyp) + # possible intermediate point + ylptmp1 = pnear[pk-1] + 1 + ylptmp2 = pnear[pk-1] + (paj/abs(paj))*1j + # M is the candidate point: + zxm = np.real(ylptmp1) + zym = np.imag(ylptmp1) + za0 = paj + zb0 = pbj # - # M is the candidate point: - zxm = np.real(ylptmp2) - zym = np.imag(ylptmp2) - za1 = -1./za0 - zb1 = zym-za1*zxm - # P is the projection of M in the strait line - zxp = -(zb1-zb0)/(za1-za0) - zyp = za0*zxp+zb0 - # zd1 is the distance MP - zd2 = (zxm-zxp) * (zxm-zxp) + (zym-zyp) * (zym-zyp) + za1 = -1./za0 + zb1 = zym-za1*zxm + # P is the projection of M in the strait line + zxp = -(zb1-zb0)/(za1-za0) + zyp = za0*zxp+zb0 + # zd1 is the distance MP + zd1 = (zxm-zxp) * (zxm-zxp) + (zym-zyp) * (zym-zyp) + # + # M is the candidate point: + zxm = np.real(ylptmp2) + zym = np.imag(ylptmp2) + za1 = -1./za0 + zb1 = zym-za1*zxm + # P is the projection of M in the strait line + zxp = -(zb1-zb0)/(za1-za0) + zyp = za0*zxp+zb0 + # zd1 is the distance MP + zd2 = (zxm-zxp) * (zxm-zxp) + (zym-zyp) * (zym-zyp) + # + # choose the smallest (zd1,zd2) + if (zd2 <= zd1): + pneari = ylptmp2 + else: + pneari = ylptmp1 # - # choose the smallest (zd1,zd2) - if (zd2 <= zd1): - pneari = ylptmp2 - else: - pneari = ylptmp1 - # else: # x(y) - ylptmp1 = pnear[pk-1] + (pai/abs(pai)) - ylptmp2 = pnear[pk-1] + 1*1j - # M is the candidate point: - zxm = np.real(ylptmp1) - zym = np.imag(ylptmp1) - za0 = pai - zb0 = pbi - # - za1 = -1./za0 - zb1 = zxm-za1*zym - # P is the projection of M in the strait line - zyp = -(zb1-zb0)/(za1-za0) - zxp = za0*zyp+zb0 - # zd1 is the distance MP - zd1 = (zxm-zxp) * (zxm-zxp) + (zym-zyp) * (zym-zyp) + ylptmp1 = pnear[pk-1] + (pai/abs(pai)) + ylptmp2 = pnear[pk-1] + 1*1j + # M is the candidate point: + zxm = np.real(ylptmp1) + zym = np.imag(ylptmp1) + za0 = pai + zb0 = pbi # - # M is the candidate point: - zxm = np.real(ylptmp2) - zym = np.imag(ylptmp2) - za1 = -1./za0 - zb1 = zxm-za1*zym - # P is the projection of M in the strait line - zyp = -(zb1-zb0)/(za1-za0) - zxp = za0*zyp+zb0 - # zd2 is the distance MP - zd2 = (zxm-zxp) * (zxm-zxp) + (zym-zyp) * (zym-zyp) + za1 = -1./za0 + zb1 = zxm-za1*zym + # P is the projection of M in the strait line + zyp = -(zb1-zb0)/(za1-za0) + zxp = za0*zyp+zb0 + # zd1 is the distance MP + zd1 = (zxm-zxp) * (zxm-zxp) + (zym-zyp) * (zym-zyp) # - # choose the smallest (zd1,zd2) - if (zd2 <= zd1): - pneari = ylptmp2 - else: - pneari = ylptmp1 - + # M is the candidate point: + zxm = np.real(ylptmp2) + zym = np.imag(ylptmp2) + za1 = -1./za0 + zb1 = zxm-za1*zym + # P is the projection of M in the strait line + zyp = -(zb1-zb0)/(za1-za0) + zxp = za0*zyp+zb0 + # zd2 is the distance MP + zd2 = (zxm-zxp) * (zxm-zxp) + (zym-zyp) * (zym-zyp) + # + # choose the smallest (zd1,zd2) + if (zd2 <= zd1): + pneari = ylptmp2 + else: + pneari = ylptmp1 + return pneari @@ -1828,8 +1828,8 @@ def hindices(lon, lat, grd, Cpos='rho', rectangular=0, spval=1e37): latg = grd.hgrid.lat_rho angle = grd.hgrid.angle_rho else: - raise Warning, '%s bad position. Valid Arakawa-C are \ - rho, u or v.' % Cpos + raise Warning('%s bad position. Valid Arakawa-C are \ + rho, u or v.' % Cpos) if type(grd).__name__ == 'CGrid_geo': spherical = grd.spherical @@ -1846,8 +1846,8 @@ def hindices(lon, lat, grd, Cpos='rho', rectangular=0, spval=1e37): latg = grd.lat_rho angle = grd.angle_rho else: - raise Warning, '%s bad position. Valid Arakawa-C are \ - rho, u or v.' % Cpos + raise Warning('%s bad position. Valid Arakawa-C are \ + rho, u or v.' % Cpos) lon = np.matrix(lon) diff --git a/pyroms/pyroms/utility.py b/pyroms/pyroms/utility.py index 4f98d03..a58d578 100644 --- a/pyroms/pyroms/utility.py +++ b/pyroms/pyroms/utility.py @@ -34,7 +34,7 @@ def get_lonlat(iindex, jindex, grd, Cpos='rho'): lon = grd.hgrid.lon_psi[:,:] lat = grd.hgrid.lat_psi[:,:] else: - raise Warning, '%s bad position. Cpos must be rho, psi, u or v.' % Cpos + raise Warning('%s bad position. Cpos must be rho, psi, u or v.' % Cpos) return lon[jindex, iindex], lat[jindex, iindex] @@ -60,7 +60,7 @@ def get_ij(longitude, latitude, grd, Cpos='rho'): lon = grd.hgrid.lon_psi[:,:] lat = grd.hgrid.lat_psi[:,:] else: - raise Warning, '%s bad position. Cpos must be rho, psi, u or v.' % Cpos + raise Warning('%s bad position. Cpos must be rho, psi, u or v.' % Cpos) lon = lon[:,:] - longitude lat = lat[:,:] - latitude @@ -91,7 +91,7 @@ def find_nearestgridpoints(longitude, latitude, grd, Cpos='rho'): lon = grd.hgrid.lon_vert[:,:] lat = grd.hgrid.lat_vert[:,:] else: - raise Warning, '%s bad position. Cpos must be rho, u or v.' % Cpos + raise Warning('%s bad position. Cpos must be rho, u or v.' % Cpos) if type(grd).__name__ == 'CGrid_geo': @@ -109,7 +109,7 @@ def find_nearestgridpoints(longitude, latitude, grd, Cpos='rho'): lon = grd.lon_vert[:,:] lat = grd.lat_vert[:,:] else: - raise Warning, '%s bad position. Cpos must be rho, u or v.' % Cpos + raise Warning('%s bad position. Cpos must be rho, u or v.' % Cpos) dlon = lon[:,:] - longitude @@ -168,7 +168,7 @@ def find_nearestgridpoints(longitude, latitude, grd, Cpos='rho'): inside = mpl.path.Path(verts).contains_point([longitude, latitude]) if inside == 0: - raise ValueError, 'well where is it then?' + raise ValueError('well where is it then?') iindex = iindex[:2] jindex = jindex[1:3] @@ -190,7 +190,7 @@ def get_coast_from_map(map): for k in range(kk): ll = len(map.coastsegs[k]) for l in range(ll): - c = map(map.coastsegs[k][l][0], map.coastsegs[k][l][1], inverse=True) + c = list(map(map.coastsegs[k][l][0], map.coastsegs[k][l][1], inverse=True)) coast.append(c) coast.append((np.nan, np.nan)) @@ -326,7 +326,7 @@ def roms_varlist(option): varlist = (['h','s_rho','s_w','Cs_r','Cs_w', \ 'theta_s','theta_b','Tcline','hc']) else: - raise Warning, 'Unknow varlist id' + raise Warning('Unknow varlist id') return varlist @@ -343,7 +343,7 @@ def get_bottom(varz, mask, spval=1e37): bottom[:,:] = _interp.get_bottom(varz,mask,spval) - return np.int(bottom) + return bottom def get_surface(varz, mask, spval=1e37): @@ -384,7 +384,7 @@ def move2grid(varin, init_grid, final_grid): elif (init_grid == 'v' and final_grid == 'psi'): varout = 0.5 * (varin[:,1:] + varin[:,:-1]) else: - raise ValueError, 'Undefined combination for init_grid and final_grid' + raise ValueError('Undefined combination for init_grid and final_grid') elif ndim == 3: @@ -400,15 +400,15 @@ def move2grid(varin, init_grid, final_grid): elif (init_grid == 'v' and final_grid == 'psi'): varout = 0.5 * (varin[:,:,1:] + varin[:,:,:-1]) else: - raise ValueError, 'Undefined combination for init_grid and final_grid' + raise ValueError('Undefined combination for init_grid and final_grid') else: - raise ValueError, 'varin must be 2D or 3D' + raise ValueError('varin must be 2D or 3D') return varout -def get_date_tag(roms_time, ref=(2006, 01, 01), format="%d %b %Y at %H:%M:%S"): +def get_date_tag(roms_time, ref=(2006, 0o1, 0o1), format="%d %b %Y at %H:%M:%S"): ''' tag = get_date_tag(roms_time) diff --git a/pyroms/pyroms/vgrid.py b/pyroms/pyroms/vgrid.py index c09f21a..152df3b 100644 --- a/pyroms/pyroms/vgrid.py +++ b/pyroms/pyroms/vgrid.py @@ -289,21 +289,17 @@ def __init__(self, h, theta_b, theta_s, Tcline, N, hraw=None, zeta=None): self.z_r = z_r(self.h, self.hc, self.N, self.s_rho, self.Cs_r, self.zeta, self.Vtrans) self.z_w = z_w(self.h, self.hc, self.Np, self.s_w, self.Cs_w, self.zeta, self.Vtrans) - def _get_s_rho(self): - lev = np.arange(1,self.N+1,1) - s = -(lev * lev - 2 * lev * self.N + lev + self.N * self.N - self.N) / \ - (self.N * self.N - self.N) - \ - 0.01 * (lev * lev - lev * self.N) / (self.c1 - self.N) -# (self.c1 * self.N * self.N - self.N) - \ - self.s_rho = s + lev = np.arange(1, self.N+1) - .5 + self.s_rho = -(lev * lev - 2 * lev * self.N + lev + self.N * self.N - self.N) / \ + (1.0 * self.N * self.N - self.N) - \ + 0.01 * (lev * lev - lev * self.N) / (1.0 - self.N) def _get_s_w(self): lev = np.arange(0,self.Np,1) s = -(lev * lev - 2 * lev * self.N + lev + self.N * self.N - self.N) / \ (self.N * self.N - self.N) - \ 0.01 * (lev * lev - lev * self.N) / (self.c1 - self.N) -# (self.c1 * self.N * self.N - self.N) - \ self.s_w = s def _get_Cs_r(self): @@ -417,7 +413,7 @@ def __getitem__(self, key): for k in range(self.Np): z0 = self.hc * self.s_w[k] + (self.h - self.hc) * self.Cs_w[k] z_w[n,k,:] = z0 + zeta[n,:] * (1.0 + z0 / self.h) - elif self.Vtrans == 2 or self.Vtrans == 4: + elif self.Vtrans == 2 or self.Vtrans == 4 or self.Vtrans == 5: for n in range(ti): for k in range(self.Np): z0 = (self.hc * self.s_w[k] + self.h * self.Cs_w[k]) / \ @@ -440,7 +436,7 @@ def __init__(self, h, depth, N): self.N = int(N) ndim = len(h.shape) -# print h.shape, ndim +# print(h.shape, ndim) if ndim == 2: Mm, Lm = h.shape diff --git a/pyroms/setup.py b/pyroms/setup.py index 92ba2d1..3f9739a 100644 --- a/pyroms/setup.py +++ b/pyroms/setup.py @@ -33,14 +33,14 @@ get_ROMS_hgrid get_ROMS_vgrid get_ROMS_grid - write_ROMS_grid + write_ROMS_grid io - wrapper for netCDF4 Dataset MFDataset cf - CF compliant files tools - time + time tools - Tools specific to the Regional Ocean Modeling System roms2z @@ -55,7 +55,7 @@ latslice section_transport - utility - Some basic tools + utility - Some basic tools get_lonlat get_ij roms_varlist @@ -76,25 +76,16 @@ Topic :: Software Development :: Libraries :: Python Modules """ -from numpy.distutils.core import Extension - -iso = Extension(name = '_iso', - sources = ['pyroms/src/iso.f']) - -interp = Extension(name = '_interp', - sources = ['pyroms/src/interp.f']) - -obs_interp = Extension(name = '_obs_interp', - sources = ['pyroms/src/obs_interp.f']) - -remapping = Extension(name = '_remapping', - sources = ['pyroms/src/remapping.f']) - -remapping_fast = Extension(name = '_remapping_fast', - sources = ['pyroms/src/remapping_fast.f']) - -remapping_fast_weighted = Extension(name = '_remapping_fast_weighted', - sources = ['pyroms/src/remapping_fast_weighted.f']) +def configuration(parent_package='', top_path=None): + from numpy.distutils.misc_util import Configuration + config = Configuration('pyroms', parent_package, top_path) + config.add_extension('_interp', sources = ['pyroms/src/interp.f']) + config.add_extension('_obs_interp', sources = ['pyroms/src/obs_interp.f']) + config.add_extension('_remapping', sources = ['pyroms/src/remapping.f']) + config.add_extension('_remapping_fast', sources = ['pyroms/src/remapping_fast.f']) + config.add_extension('_remapping_fast_weighted', sources = ['pyroms/src/remapping_fast_weighted.f']) + config.add_extension('_iso', sources = ['pyroms/src/iso.f']) + return config doclines = __doc__.split("\n") @@ -104,14 +95,11 @@ version = '0.1.0', description = doclines[0], long_description = "\n".join(doclines[2:]), - author = "Frederic Castruccio", - author_email = "frederic@marine.rutgers.edu", - url = "ftp://marine.rutgers.edu/pub/frederic/pyroms/", + url = "https://github.com/ESMG/", packages = ['pyroms', 'pyroms.remapping', 'pyroms.extern'], license = 'BSD', platforms = ["any"], - ext_modules = [iso, interp, obs_interp, remapping, remapping_fast, remapping_fast_weighted], - classifiers = filter(None, classifiers.split("\n")), ) + setup(**configuration(top_path='').todict()) diff --git a/pyroms_toolbox/.doxygen b/pyroms_toolbox/.doxygen new file mode 100644 index 0000000..b87a94b --- /dev/null +++ b/pyroms_toolbox/.doxygen @@ -0,0 +1,1519 @@ +# Doxyfile 1.6.1 + +# This file describes the settings to be used by the documentation system +# doxygen (www.doxygen.org) for a project +# +# All text after a hash (#) is considered a comment and will be ignored +# The format is: +# TAG = value [value, ...] +# For lists items can also be appended using: +# TAG += value [value, ...] +# Values that contain spaces should be placed between quotes (" ") + +#--------------------------------------------------------------------------- +# Project related configuration options +#--------------------------------------------------------------------------- + +# This tag specifies the encoding used for all characters in the config file +# that follow. The default is UTF-8 which is also the encoding used for all +# text before the first occurrence of this tag. Doxygen uses libiconv (or the +# iconv built into libc) for the transcoding. See +# http://www.gnu.org/software/libiconv for the list of possible encodings. + +DOXYFILE_ENCODING = UTF-8 + +# The PROJECT_NAME tag is a single word (or a sequence of words surrounded +# by quotes) that should identify the project. + +PROJECT_NAME = pycnal_toolbox + +# The PROJECT_NUMBER tag can be used to enter a project or revision number. +# This could be handy for archiving the generated documentation or +# if some version control system is used. + +PROJECT_NUMBER = + +# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) +# base path where the generated documentation will be put. +# If a relative path is entered, it will be relative to the location +# where doxygen was started. If left blank the current directory will be used. + +OUTPUT_DIRECTORY = + +# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create +# 4096 sub-directories (in 2 levels) under the output directory of each output +# format and will distribute the generated files over these directories. +# Enabling this option can be useful when feeding doxygen a huge amount of +# source files, where putting all generated files in the same directory would +# otherwise cause performance problems for the file system. + +CREATE_SUBDIRS = YES + +# The OUTPUT_LANGUAGE tag is used to specify the language in which all +# documentation generated by doxygen is written. Doxygen will use this +# information to generate all constant output in the proper language. +# The default language is English, other supported languages are: +# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, +# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, +# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English +# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, +# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrilic, Slovak, +# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. + +OUTPUT_LANGUAGE = English + +# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will +# include brief member descriptions after the members that are listed in +# the file and class documentation (similar to JavaDoc). +# Set to NO to disable this. + +BRIEF_MEMBER_DESC = YES + +# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend +# the brief description of a member or function before the detailed description. +# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the +# brief descriptions will be completely suppressed. + +REPEAT_BRIEF = YES + +# This tag implements a quasi-intelligent brief description abbreviator +# that is used to form the text in various listings. Each string +# in this list, if found as the leading text of the brief description, will be +# stripped from the text and the result after processing the whole list, is +# used as the annotated text. Otherwise, the brief description is used as-is. +# If left blank, the following values are used ("$name" is automatically +# replaced with the name of the entity): "The $name class" "The $name widget" +# "The $name file" "is" "provides" "specifies" "contains" +# "represents" "a" "an" "the" + +ABBREVIATE_BRIEF = + +# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then +# Doxygen will generate a detailed section even if there is only a brief +# description. + +ALWAYS_DETAILED_SEC = NO + +# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all +# inherited members of a class in the documentation of that class as if those +# members were ordinary class members. Constructors, destructors and assignment +# operators of the base classes will not be shown. + +INLINE_INHERITED_MEMB = NO + +# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full +# path before files name in the file list and in the header files. If set +# to NO the shortest path that makes the file name unique will be used. + +FULL_PATH_NAMES = YES + +# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag +# can be used to strip a user-defined part of the path. Stripping is +# only done if one of the specified strings matches the left-hand part of +# the path. The tag can be used to show relative paths in the file list. +# If left blank the directory from which doxygen is run is used as the +# path to strip. + +STRIP_FROM_PATH = + +# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of +# the path mentioned in the documentation of a class, which tells +# the reader which header file to include in order to use a class. +# If left blank only the name of the header file containing the class +# definition is used. Otherwise one should specify the include paths that +# are normally passed to the compiler using the -I flag. + +STRIP_FROM_INC_PATH = + +# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter +# (but less readable) file names. This can be useful is your file systems +# doesn't support long names like on DOS, Mac, or CD-ROM. + +SHORT_NAMES = NO + +# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen +# will interpret the first line (until the first dot) of a JavaDoc-style +# comment as the brief description. If set to NO, the JavaDoc +# comments will behave just like regular Qt-style comments +# (thus requiring an explicit @brief command for a brief description.) + +JAVADOC_AUTOBRIEF = NO + +# If the QT_AUTOBRIEF tag is set to YES then Doxygen will +# interpret the first line (until the first dot) of a Qt-style +# comment as the brief description. If set to NO, the comments +# will behave just like regular Qt-style comments (thus requiring +# an explicit \brief command for a brief description.) + +QT_AUTOBRIEF = NO + +# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen +# treat a multi-line C++ special comment block (i.e. a block of //! or /// +# comments) as a brief description. This used to be the default behaviour. +# The new default is to treat a multi-line C++ comment block as a detailed +# description. Set this tag to YES if you prefer the old behaviour instead. + +MULTILINE_CPP_IS_BRIEF = NO + +# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented +# member inherits the documentation from any documented member that it +# re-implements. + +INHERIT_DOCS = YES + +# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce +# a new page for each member. If set to NO, the documentation of a member will +# be part of the file/class/namespace that contains it. + +SEPARATE_MEMBER_PAGES = NO + +# The TAB_SIZE tag can be used to set the number of spaces in a tab. +# Doxygen uses this value to replace tabs by spaces in code fragments. + +TAB_SIZE = 8 + +# This tag can be used to specify a number of aliases that acts +# as commands in the documentation. An alias has the form "name=value". +# For example adding "sideeffect=\par Side Effects:\n" will allow you to +# put the command \sideeffect (or @sideeffect) in the documentation, which +# will result in a user-defined paragraph with heading "Side Effects:". +# You can put \n's in the value part of an alias to insert newlines. + +ALIASES = + +# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C +# sources only. Doxygen will then generate output that is more tailored for C. +# For instance, some of the names that are used will be different. The list +# of all members will be omitted, etc. + +OPTIMIZE_OUTPUT_FOR_C = NO + +# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java +# sources only. Doxygen will then generate output that is more tailored for +# Java. For instance, namespaces will be presented as packages, qualified +# scopes will look different, etc. + +OPTIMIZE_OUTPUT_JAVA = YES + +# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran +# sources only. Doxygen will then generate output that is more tailored for +# Fortran. + +OPTIMIZE_FOR_FORTRAN = NO + +# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL +# sources. Doxygen will then generate output that is tailored for +# VHDL. + +OPTIMIZE_OUTPUT_VHDL = NO + +# Doxygen selects the parser to use depending on the extension of the files it parses. +# With this tag you can assign which parser to use for a given extension. +# Doxygen has a built-in mapping, but you can override or extend it using this tag. +# The format is ext=language, where ext is a file extension, and language is one of +# the parsers supported by doxygen: IDL, Java, Javascript, C#, C, C++, D, PHP, +# Objective-C, Python, Fortran, VHDL, C, C++. For instance to make doxygen treat +# .inc files as Fortran files (default is PHP), and .f files as C (default is Fortran), +# use: inc=Fortran f=C. Note that for custom extensions you also need to set FILE_PATTERNS otherwise the files are not read by doxygen. + +EXTENSION_MAPPING = + +# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want +# to include (a tag file for) the STL sources as input, then you should +# set this tag to YES in order to let doxygen match functions declarations and +# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. +# func(std::string) {}). This also make the inheritance and collaboration +# diagrams that involve STL classes more complete and accurate. + +BUILTIN_STL_SUPPORT = NO + +# If you use Microsoft's C++/CLI language, you should set this option to YES to +# enable parsing support. + +CPP_CLI_SUPPORT = NO + +# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. +# Doxygen will parse them like normal C++ but will assume all classes use public +# instead of private inheritance when no explicit protection keyword is present. + +SIP_SUPPORT = NO + +# For Microsoft's IDL there are propget and propput attributes to indicate getter +# and setter methods for a property. Setting this option to YES (the default) +# will make doxygen to replace the get and set methods by a property in the +# documentation. This will only work if the methods are indeed getting or +# setting a simple type. If this is not the case, or you want to show the +# methods anyway, you should set this option to NO. + +IDL_PROPERTY_SUPPORT = YES + +# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC +# tag is set to YES, then doxygen will reuse the documentation of the first +# member in the group (if any) for the other members of the group. By default +# all members of a group must be documented explicitly. + +DISTRIBUTE_GROUP_DOC = NO + +# Set the SUBGROUPING tag to YES (the default) to allow class member groups of +# the same type (for instance a group of public functions) to be put as a +# subgroup of that type (e.g. under the Public Functions section). Set it to +# NO to prevent subgrouping. Alternatively, this can be done per class using +# the \nosubgrouping command. + +SUBGROUPING = YES + +# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum +# is documented as struct, union, or enum with the name of the typedef. So +# typedef struct TypeS {} TypeT, will appear in the documentation as a struct +# with name TypeT. When disabled the typedef will appear as a member of a file, +# namespace, or class. And the struct will be named TypeS. This can typically +# be useful for C code in case the coding convention dictates that all compound +# types are typedef'ed and only the typedef is referenced, never the tag name. + +TYPEDEF_HIDES_STRUCT = NO + +# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to +# determine which symbols to keep in memory and which to flush to disk. +# When the cache is full, less often used symbols will be written to disk. +# For small to medium size projects (<1000 input files) the default value is +# probably good enough. For larger projects a too small cache size can cause +# doxygen to be busy swapping symbols to and from disk most of the time +# causing a significant performance penality. +# If the system has enough physical memory increasing the cache will improve the +# performance by keeping more symbols in memory. Note that the value works on +# a logarithmic scale so increasing the size by one will rougly double the +# memory usage. The cache size is given by this formula: +# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, +# corresponding to a cache size of 2^16 = 65536 symbols + +SYMBOL_CACHE_SIZE = 0 + +#--------------------------------------------------------------------------- +# Build related configuration options +#--------------------------------------------------------------------------- + +# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in +# documentation are documented, even if no documentation was available. +# Private class members and static file members will be hidden unless +# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES + +EXTRACT_ALL = YES + +# If the EXTRACT_PRIVATE tag is set to YES all private members of a class +# will be included in the documentation. + +EXTRACT_PRIVATE = YES + +# If the EXTRACT_STATIC tag is set to YES all static members of a file +# will be included in the documentation. + +EXTRACT_STATIC = YES + +# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) +# defined locally in source files will be included in the documentation. +# If set to NO only classes defined in header files are included. + +EXTRACT_LOCAL_CLASSES = YES + +# This flag is only useful for Objective-C code. When set to YES local +# methods, which are defined in the implementation section but not in +# the interface are included in the documentation. +# If set to NO (the default) only methods in the interface are included. + +EXTRACT_LOCAL_METHODS = YES + +# If this flag is set to YES, the members of anonymous namespaces will be +# extracted and appear in the documentation as a namespace called +# 'anonymous_namespace{file}', where file will be replaced with the base +# name of the file that contains the anonymous namespace. By default +# anonymous namespace are hidden. + +EXTRACT_ANON_NSPACES = NO + +# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all +# undocumented members of documented classes, files or namespaces. +# If set to NO (the default) these members will be included in the +# various overviews, but no documentation section is generated. +# This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_MEMBERS = NO + +# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all +# undocumented classes that are normally visible in the class hierarchy. +# If set to NO (the default) these classes will be included in the various +# overviews. This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_CLASSES = NO + +# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all +# friend (class|struct|union) declarations. +# If set to NO (the default) these declarations will be included in the +# documentation. + +HIDE_FRIEND_COMPOUNDS = NO + +# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any +# documentation blocks found inside the body of a function. +# If set to NO (the default) these blocks will be appended to the +# function's detailed documentation block. + +HIDE_IN_BODY_DOCS = NO + +# The INTERNAL_DOCS tag determines if documentation +# that is typed after a \internal command is included. If the tag is set +# to NO (the default) then the documentation will be excluded. +# Set it to YES to include the internal documentation. + +INTERNAL_DOCS = YES + +# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate +# file names in lower-case letters. If set to YES upper-case letters are also +# allowed. This is useful if you have classes or files whose names only differ +# in case and if your file system supports case sensitive file names. Windows +# and Mac users are advised to set this option to NO. + +CASE_SENSE_NAMES = YES + +# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen +# will show members with their full class and namespace scopes in the +# documentation. If set to YES the scope will be hidden. + +HIDE_SCOPE_NAMES = NO + +# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen +# will put a list of the files that are included by a file in the documentation +# of that file. + +SHOW_INCLUDE_FILES = YES + +# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] +# is inserted in the documentation for inline members. + +INLINE_INFO = YES + +# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen +# will sort the (detailed) documentation of file and class members +# alphabetically by member name. If set to NO the members will appear in +# declaration order. + +SORT_MEMBER_DOCS = YES + +# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the +# brief documentation of file, namespace and class members alphabetically +# by member name. If set to NO (the default) the members will appear in +# declaration order. + +SORT_BRIEF_DOCS = NO + +# If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen will sort the (brief and detailed) documentation of class members so that constructors and destructors are listed first. If set to NO (the default) the constructors will appear in the respective orders defined by SORT_MEMBER_DOCS and SORT_BRIEF_DOCS. This tag will be ignored for brief docs if SORT_BRIEF_DOCS is set to NO and ignored for detailed docs if SORT_MEMBER_DOCS is set to NO. + +SORT_MEMBERS_CTORS_1ST = NO + +# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the +# hierarchy of group names into alphabetical order. If set to NO (the default) +# the group names will appear in their defined order. + +SORT_GROUP_NAMES = NO + +# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be +# sorted by fully-qualified names, including namespaces. If set to +# NO (the default), the class list will be sorted only by class name, +# not including the namespace part. +# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. +# Note: This option applies only to the class list, not to the +# alphabetical list. + +SORT_BY_SCOPE_NAME = NO + +# The GENERATE_TODOLIST tag can be used to enable (YES) or +# disable (NO) the todo list. This list is created by putting \todo +# commands in the documentation. + +GENERATE_TODOLIST = YES + +# The GENERATE_TESTLIST tag can be used to enable (YES) or +# disable (NO) the test list. This list is created by putting \test +# commands in the documentation. + +GENERATE_TESTLIST = YES + +# The GENERATE_BUGLIST tag can be used to enable (YES) or +# disable (NO) the bug list. This list is created by putting \bug +# commands in the documentation. + +GENERATE_BUGLIST = YES + +# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or +# disable (NO) the deprecated list. This list is created by putting +# \deprecated commands in the documentation. + +GENERATE_DEPRECATEDLIST= YES + +# The ENABLED_SECTIONS tag can be used to enable conditional +# documentation sections, marked by \if sectionname ... \endif. + +ENABLED_SECTIONS = + +# The MAX_INITIALIZER_LINES tag determines the maximum number of lines +# the initial value of a variable or define consists of for it to appear in +# the documentation. If the initializer consists of more lines than specified +# here it will be hidden. Use a value of 0 to hide initializers completely. +# The appearance of the initializer of individual variables and defines in the +# documentation can be controlled using \showinitializer or \hideinitializer +# command in the documentation regardless of this setting. + +MAX_INITIALIZER_LINES = 30 + +# Set the SHOW_USED_FILES tag to NO to disable the list of files generated +# at the bottom of the documentation of classes and structs. If set to YES the +# list will mention the files that were used to generate the documentation. + +SHOW_USED_FILES = YES + +# If the sources in your project are distributed over multiple directories +# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy +# in the documentation. The default is NO. + +SHOW_DIRECTORIES = YES + +# Set the SHOW_FILES tag to NO to disable the generation of the Files page. +# This will remove the Files entry from the Quick Index and from the +# Folder Tree View (if specified). The default is YES. + +SHOW_FILES = YES + +# Set the SHOW_NAMESPACES tag to NO to disable the generation of the +# Namespaces page. +# This will remove the Namespaces entry from the Quick Index +# and from the Folder Tree View (if specified). The default is YES. + +SHOW_NAMESPACES = YES + +# The FILE_VERSION_FILTER tag can be used to specify a program or script that +# doxygen should invoke to get the current version for each file (typically from +# the version control system). Doxygen will invoke the program by executing (via +# popen()) the command , where is the value of +# the FILE_VERSION_FILTER tag, and is the name of an input file +# provided by doxygen. Whatever the program writes to standard output +# is used as the file version. See the manual for examples. + +FILE_VERSION_FILTER = + +# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed by +# doxygen. The layout file controls the global structure of the generated output files +# in an output format independent way. The create the layout file that represents +# doxygen's defaults, run doxygen with the -l option. You can optionally specify a +# file name after the option, if omitted DoxygenLayout.xml will be used as the name +# of the layout file. + +LAYOUT_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to warning and progress messages +#--------------------------------------------------------------------------- + +# The QUIET tag can be used to turn on/off the messages that are generated +# by doxygen. Possible values are YES and NO. If left blank NO is used. + +QUIET = NO + +# The WARNINGS tag can be used to turn on/off the warning messages that are +# generated by doxygen. Possible values are YES and NO. If left blank +# NO is used. + +WARNINGS = YES + +# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings +# for undocumented members. If EXTRACT_ALL is set to YES then this flag will +# automatically be disabled. + +WARN_IF_UNDOCUMENTED = YES + +# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for +# potential errors in the documentation, such as not documenting some +# parameters in a documented function, or documenting parameters that +# don't exist or using markup commands wrongly. + +WARN_IF_DOC_ERROR = YES + +# This WARN_NO_PARAMDOC option can be abled to get warnings for +# functions that are documented, but have no documentation for their parameters +# or return value. If set to NO (the default) doxygen will only warn about +# wrong or incomplete parameter documentation, but not about the absence of +# documentation. + +WARN_NO_PARAMDOC = NO + +# The WARN_FORMAT tag determines the format of the warning messages that +# doxygen can produce. The string should contain the $file, $line, and $text +# tags, which will be replaced by the file and line number from which the +# warning originated and the warning text. Optionally the format may contain +# $version, which will be replaced by the version of the file (if it could +# be obtained via FILE_VERSION_FILTER) + +WARN_FORMAT = "$file:$line: $text" + +# The WARN_LOGFILE tag can be used to specify a file to which warning +# and error messages should be written. If left blank the output is written +# to stderr. + +WARN_LOGFILE = doxygen.log + +#--------------------------------------------------------------------------- +# configuration options related to the input files +#--------------------------------------------------------------------------- + +# The INPUT tag can be used to specify the files and/or directories that contain +# documented source files. You may enter file names like "myfile.cpp" or +# directories like "/usr/src/myproject". Separate the files or directories +# with spaces. + +INPUT = pycnal_toolbox + +# This tag can be used to specify the character encoding of the source files +# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is +# also the default input encoding. Doxygen uses libiconv (or the iconv built +# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for +# the list of possible encodings. + +INPUT_ENCODING = UTF-8 + +# If the value of the INPUT tag contains directories, you can use the +# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank the following patterns are tested: +# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx +# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py *.f90 + +FILE_PATTERNS = + +# The RECURSIVE tag can be used to turn specify whether or not subdirectories +# should be searched for input files as well. Possible values are YES and NO. +# If left blank NO is used. + +RECURSIVE = YES + +# The EXCLUDE tag can be used to specify files and/or directories that should +# excluded from the INPUT source files. This way you can easily exclude a +# subdirectory from a directory tree whose root is specified with the INPUT tag. + +EXCLUDE = + +# The EXCLUDE_SYMLINKS tag can be used select whether or not files or +# directories that are symbolic links (a Unix filesystem feature) are excluded +# from the input. + +EXCLUDE_SYMLINKS = NO + +# If the value of the INPUT tag contains directories, you can use the +# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude +# certain files from those directories. Note that the wildcards are matched +# against the file with absolute path, so to exclude all test directories +# for example use the pattern */test/* + +EXCLUDE_PATTERNS = + +# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names +# (namespaces, classes, functions, etc.) that should be excluded from the +# output. The symbol name can be a fully qualified name, a word, or if the +# wildcard * is used, a substring. Examples: ANamespace, AClass, +# AClass::ANamespace, ANamespace::*Test + +EXCLUDE_SYMBOLS = + +# The EXAMPLE_PATH tag can be used to specify one or more files or +# directories that contain example code fragments that are included (see +# the \include command). + +EXAMPLE_PATH = + +# If the value of the EXAMPLE_PATH tag contains directories, you can use the +# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank all files are included. + +EXAMPLE_PATTERNS = + +# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be +# searched for input files to be used with the \include or \dontinclude +# commands irrespective of the value of the RECURSIVE tag. +# Possible values are YES and NO. If left blank NO is used. + +EXAMPLE_RECURSIVE = NO + +# The IMAGE_PATH tag can be used to specify one or more files or +# directories that contain image that are included in the documentation (see +# the \image command). + +IMAGE_PATH = + +# The INPUT_FILTER tag can be used to specify a program that doxygen should +# invoke to filter for each input file. Doxygen will invoke the filter program +# by executing (via popen()) the command , where +# is the value of the INPUT_FILTER tag, and is the name of an +# input file. Doxygen will then use the output that the filter program writes +# to standard output. +# If FILTER_PATTERNS is specified, this tag will be +# ignored. + +INPUT_FILTER = + +# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern +# basis. +# Doxygen will compare the file name with each pattern and apply the +# filter if there is a match. +# The filters are a list of the form: +# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further +# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER +# is applied to all files. + +FILTER_PATTERNS = + +# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using +# INPUT_FILTER) will be used to filter the input files when producing source +# files to browse (i.e. when SOURCE_BROWSER is set to YES). + +FILTER_SOURCE_FILES = NO + +#--------------------------------------------------------------------------- +# configuration options related to source browsing +#--------------------------------------------------------------------------- + +# If the SOURCE_BROWSER tag is set to YES then a list of source files will +# be generated. Documented entities will be cross-referenced with these sources. +# Note: To get rid of all source code in the generated output, make sure also +# VERBATIM_HEADERS is set to NO. + +SOURCE_BROWSER = YES + +# Setting the INLINE_SOURCES tag to YES will include the body +# of functions and classes directly in the documentation. + +INLINE_SOURCES = YES + +# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct +# doxygen to hide any special comment blocks from generated source code +# fragments. Normal C and C++ comments will always remain visible. + +STRIP_CODE_COMMENTS = NO + +# If the REFERENCED_BY_RELATION tag is set to YES +# then for each documented function all documented +# functions referencing it will be listed. + +REFERENCED_BY_RELATION = YES + +# If the REFERENCES_RELATION tag is set to YES +# then for each documented function all documented entities +# called/used by that function will be listed. + +REFERENCES_RELATION = YES + +# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) +# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from +# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will +# link to the source code. +# Otherwise they will link to the documentation. + +REFERENCES_LINK_SOURCE = YES + +# If the USE_HTAGS tag is set to YES then the references to source code +# will point to the HTML generated by the htags(1) tool instead of doxygen +# built-in source browser. The htags tool is part of GNU's global source +# tagging system (see http://www.gnu.org/software/global/global.html). You +# will need version 4.8.6 or higher. + +USE_HTAGS = NO + +# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen +# will generate a verbatim copy of the header file for each class for +# which an include is specified. Set to NO to disable this. + +VERBATIM_HEADERS = YES + +#--------------------------------------------------------------------------- +# configuration options related to the alphabetical class index +#--------------------------------------------------------------------------- + +# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index +# of all compounds will be generated. Enable this if the project +# contains a lot of classes, structs, unions or interfaces. + +ALPHABETICAL_INDEX = YES + +# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then +# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns +# in which this list will be split (can be a number in the range [1..20]) + +COLS_IN_ALPHA_INDEX = 5 + +# In case all classes in a project start with a common prefix, all +# classes will be put under the same header in the alphabetical index. +# The IGNORE_PREFIX tag can be used to specify one or more prefixes that +# should be ignored while generating the index headers. + +IGNORE_PREFIX = + +#--------------------------------------------------------------------------- +# configuration options related to the HTML output +#--------------------------------------------------------------------------- + +# If the GENERATE_HTML tag is set to YES (the default) Doxygen will +# generate HTML output. + +GENERATE_HTML = YES + +# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `html' will be used as the default path. + +HTML_OUTPUT = html + +# The HTML_FILE_EXTENSION tag can be used to specify the file extension for +# each generated HTML page (for example: .htm,.php,.asp). If it is left blank +# doxygen will generate files with .html extension. + +HTML_FILE_EXTENSION = .html + +# The HTML_HEADER tag can be used to specify a personal HTML header for +# each generated HTML page. If it is left blank doxygen will generate a +# standard header. + +HTML_HEADER = + +# The HTML_FOOTER tag can be used to specify a personal HTML footer for +# each generated HTML page. If it is left blank doxygen will generate a +# standard footer. + +HTML_FOOTER = + +# If the HTML_TIMESTAMP tag is set to YES then the generated HTML +# documentation will contain the timesstamp. + +HTML_TIMESTAMP = NO + +# The HTML_STYLESHEET tag can be used to specify a user-defined cascading +# style sheet that is used by each HTML page. It can be used to +# fine-tune the look of the HTML output. If the tag is left blank doxygen +# will generate a default style sheet. Note that doxygen will try to copy +# the style sheet file to the HTML output directory, so don't put your own +# stylesheet in the HTML output directory as well, or it will be erased! + +HTML_STYLESHEET = + +# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, +# files or namespaces will be aligned in HTML using tables. If set to +# NO a bullet list will be used. + +HTML_ALIGN_MEMBERS = YES + +# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML +# documentation will contain sections that can be hidden and shown after the +# page has loaded. For this to work a browser that supports +# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox +# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari). + +HTML_DYNAMIC_SECTIONS = NO + +# If the GENERATE_DOCSET tag is set to YES, additional index files +# will be generated that can be used as input for Apple's Xcode 3 +# integrated development environment, introduced with OSX 10.5 (Leopard). +# To create a documentation set, doxygen will generate a Makefile in the +# HTML output directory. Running make will produce the docset in that +# directory and running "make install" will install the docset in +# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find +# it at startup. +# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html for more information. + +GENERATE_DOCSET = NO + +# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the +# feed. A documentation feed provides an umbrella under which multiple +# documentation sets from a single provider (such as a company or product suite) +# can be grouped. + +DOCSET_FEEDNAME = "Doxygen generated docs" + +# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that +# should uniquely identify the documentation set bundle. This should be a +# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen +# will append .docset to the name. + +DOCSET_BUNDLE_ID = org.doxygen.Project + +# If the GENERATE_HTMLHELP tag is set to YES, additional index files +# will be generated that can be used as input for tools like the +# Microsoft HTML help workshop to generate a compiled HTML help file (.chm) +# of the generated HTML documentation. + +GENERATE_HTMLHELP = NO + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can +# be used to specify the file name of the resulting .chm file. You +# can add a path in front of the file if the result should not be +# written to the html output directory. + +CHM_FILE = + +# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can +# be used to specify the location (absolute path including file name) of +# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run +# the HTML help compiler on the generated index.hhp. + +HHC_LOCATION = + +# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag +# controls if a separate .chi index file is generated (YES) or that +# it should be included in the master .chm file (NO). + +GENERATE_CHI = NO + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING +# is used to encode HtmlHelp index (hhk), content (hhc) and project file +# content. + +CHM_INDEX_ENCODING = + +# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag +# controls whether a binary table of contents is generated (YES) or a +# normal table of contents (NO) in the .chm file. + +BINARY_TOC = NO + +# The TOC_EXPAND flag can be set to YES to add extra items for group members +# to the contents of the HTML help documentation and to the tree view. + +TOC_EXPAND = NO + +# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and QHP_VIRTUAL_FOLDER +# are set, an additional index file will be generated that can be used as input for +# Qt's qhelpgenerator to generate a Qt Compressed Help (.qch) of the generated +# HTML documentation. + +GENERATE_QHP = NO + +# If the QHG_LOCATION tag is specified, the QCH_FILE tag can +# be used to specify the file name of the resulting .qch file. +# The path specified is relative to the HTML output folder. + +QCH_FILE = + +# The QHP_NAMESPACE tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#namespace + +QHP_NAMESPACE = + +# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#virtual-folders + +QHP_VIRTUAL_FOLDER = doc + +# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to add. +# For more information please see +# http://doc.trolltech.com/qthelpproject.html#custom-filters + +QHP_CUST_FILTER_NAME = + +# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the custom filter to add.For more information please see +# Qt Help Project / Custom Filters. + +QHP_CUST_FILTER_ATTRS = + +# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this project's +# filter section matches. +# Qt Help Project / Filter Attributes. + +QHP_SECT_FILTER_ATTRS = + +# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can +# be used to specify the location of Qt's qhelpgenerator. +# If non-empty doxygen will try to run qhelpgenerator on the generated +# .qhp file. + +QHG_LOCATION = + +# The DISABLE_INDEX tag can be used to turn on/off the condensed index at +# top of each HTML page. The value NO (the default) enables the index and +# the value YES disables it. + +DISABLE_INDEX = NO + +# This tag can be used to set the number of enum values (range [1..20]) +# that doxygen will group on one line in the generated HTML documentation. + +ENUM_VALUES_PER_LINE = 4 + +# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index +# structure should be generated to display hierarchical information. +# If the tag value is set to YES, a side panel will be generated +# containing a tree-like index structure (just like the one that +# is generated for HTML Help). For this to work a browser that supports +# JavaScript, DHTML, CSS and frames is required (i.e. any modern browser). +# Windows users are probably better off using the HTML help feature. + +GENERATE_TREEVIEW = YES + +# By enabling USE_INLINE_TREES, doxygen will generate the Groups, Directories, +# and Class Hierarchy pages using a tree view instead of an ordered list. + +USE_INLINE_TREES = NO + +# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be +# used to set the initial width (in pixels) of the frame in which the tree +# is shown. + +TREEVIEW_WIDTH = 250 + +# Use this tag to change the font size of Latex formulas included +# as images in the HTML documentation. The default is 10. Note that +# when you change the font size after a successful doxygen run you need +# to manually remove any form_*.png images from the HTML output directory +# to force them to be regenerated. + +FORMULA_FONTSIZE = 10 + +# When the SEARCHENGINE tag is enable doxygen will generate a search box for the HTML output. The underlying search engine uses javascript +# and DHTML and should work on any modern browser. Note that when using HTML help (GENERATE_HTMLHELP) or Qt help (GENERATE_QHP) +# there is already a search function so this one should typically +# be disabled. + +SEARCHENGINE = YES + +#--------------------------------------------------------------------------- +# configuration options related to the LaTeX output +#--------------------------------------------------------------------------- + +# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will +# generate Latex output. + +GENERATE_LATEX = NO + +# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `latex' will be used as the default path. + +LATEX_OUTPUT = latex + +# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be +# invoked. If left blank `latex' will be used as the default command name. + +LATEX_CMD_NAME = latex + +# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to +# generate index for LaTeX. If left blank `makeindex' will be used as the +# default command name. + +MAKEINDEX_CMD_NAME = makeindex + +# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact +# LaTeX documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_LATEX = NO + +# The PAPER_TYPE tag can be used to set the paper type that is used +# by the printer. Possible values are: a4, a4wide, letter, legal and +# executive. If left blank a4wide will be used. + +PAPER_TYPE = letter + +# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX +# packages that should be included in the LaTeX output. + +EXTRA_PACKAGES = + +# The LATEX_HEADER tag can be used to specify a personal LaTeX header for +# the generated latex document. The header should contain everything until +# the first chapter. If it is left blank doxygen will generate a +# standard header. Notice: only use this tag if you know what you are doing! + +LATEX_HEADER = + +# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated +# is prepared for conversion to pdf (using ps2pdf). The pdf file will +# contain links (just like the HTML output) instead of page references +# This makes the output suitable for online browsing using a pdf viewer. + +PDF_HYPERLINKS = YES + +# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of +# plain latex in the generated Makefile. Set this option to YES to get a +# higher quality PDF documentation. + +USE_PDFLATEX = YES + +# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. +# command to the generated LaTeX files. This will instruct LaTeX to keep +# running if errors occur, instead of asking the user for help. +# This option is also used when generating formulas in HTML. + +LATEX_BATCHMODE = NO + +# If LATEX_HIDE_INDICES is set to YES then doxygen will not +# include the index chapters (such as File Index, Compound Index, etc.) +# in the output. + +LATEX_HIDE_INDICES = NO + +# If LATEX_SOURCE_CODE is set to YES then doxygen will include source code with syntax highlighting in the LaTeX output. Note that which sources are shown also depends on other settings such as SOURCE_BROWSER. + +LATEX_SOURCE_CODE = NO + +#--------------------------------------------------------------------------- +# configuration options related to the RTF output +#--------------------------------------------------------------------------- + +# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output +# The RTF output is optimized for Word 97 and may not look very pretty with +# other RTF readers or editors. + +GENERATE_RTF = NO + +# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `rtf' will be used as the default path. + +RTF_OUTPUT = rtf + +# If the COMPACT_RTF tag is set to YES Doxygen generates more compact +# RTF documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_RTF = NO + +# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated +# will contain hyperlink fields. The RTF file will +# contain links (just like the HTML output) instead of page references. +# This makes the output suitable for online browsing using WORD or other +# programs which support those fields. +# Note: wordpad (write) and others do not support links. + +RTF_HYPERLINKS = NO + +# Load stylesheet definitions from file. Syntax is similar to doxygen's +# config file, i.e. a series of assignments. You only have to provide +# replacements, missing definitions are set to their default value. + +RTF_STYLESHEET_FILE = + +# Set optional variables used in the generation of an rtf document. +# Syntax is similar to doxygen's config file. + +RTF_EXTENSIONS_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to the man page output +#--------------------------------------------------------------------------- + +# If the GENERATE_MAN tag is set to YES (the default) Doxygen will +# generate man pages + +GENERATE_MAN = NO + +# The MAN_OUTPUT tag is used to specify where the man pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `man' will be used as the default path. + +MAN_OUTPUT = man + +# The MAN_EXTENSION tag determines the extension that is added to +# the generated man pages (default is the subroutine's section .3) + +MAN_EXTENSION = .3 + +# If the MAN_LINKS tag is set to YES and Doxygen generates man output, +# then it will generate one additional man file for each entity +# documented in the real man page(s). These additional files +# only source the real man page, but without them the man command +# would be unable to find the correct page. The default is NO. + +MAN_LINKS = NO + +#--------------------------------------------------------------------------- +# configuration options related to the XML output +#--------------------------------------------------------------------------- + +# If the GENERATE_XML tag is set to YES Doxygen will +# generate an XML file that captures the structure of +# the code including all documentation. + +GENERATE_XML = NO + +# The XML_OUTPUT tag is used to specify where the XML pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `xml' will be used as the default path. + +XML_OUTPUT = xml + +# The XML_SCHEMA tag can be used to specify an XML schema, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_SCHEMA = + +# The XML_DTD tag can be used to specify an XML DTD, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_DTD = + +# If the XML_PROGRAMLISTING tag is set to YES Doxygen will +# dump the program listings (including syntax highlighting +# and cross-referencing information) to the XML output. Note that +# enabling this will significantly increase the size of the XML output. + +XML_PROGRAMLISTING = YES + +#--------------------------------------------------------------------------- +# configuration options for the AutoGen Definitions output +#--------------------------------------------------------------------------- + +# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will +# generate an AutoGen Definitions (see autogen.sf.net) file +# that captures the structure of the code including all +# documentation. Note that this feature is still experimental +# and incomplete at the moment. + +GENERATE_AUTOGEN_DEF = NO + +#--------------------------------------------------------------------------- +# configuration options related to the Perl module output +#--------------------------------------------------------------------------- + +# If the GENERATE_PERLMOD tag is set to YES Doxygen will +# generate a Perl module file that captures the structure of +# the code including all documentation. Note that this +# feature is still experimental and incomplete at the +# moment. + +GENERATE_PERLMOD = NO + +# If the PERLMOD_LATEX tag is set to YES Doxygen will generate +# the necessary Makefile rules, Perl scripts and LaTeX code to be able +# to generate PDF and DVI output from the Perl module output. + +PERLMOD_LATEX = NO + +# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be +# nicely formatted so it can be parsed by a human reader. +# This is useful +# if you want to understand what is going on. +# On the other hand, if this +# tag is set to NO the size of the Perl module output will be much smaller +# and Perl will parse it just the same. + +PERLMOD_PRETTY = YES + +# The names of the make variables in the generated doxyrules.make file +# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. +# This is useful so different doxyrules.make files included by the same +# Makefile don't overwrite each other's variables. + +PERLMOD_MAKEVAR_PREFIX = + +#--------------------------------------------------------------------------- +# Configuration options related to the preprocessor +#--------------------------------------------------------------------------- + +# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will +# evaluate all C-preprocessor directives found in the sources and include +# files. + +ENABLE_PREPROCESSING = YES + +# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro +# names in the source code. If set to NO (the default) only conditional +# compilation will be performed. Macro expansion can be done in a controlled +# way by setting EXPAND_ONLY_PREDEF to YES. + +MACRO_EXPANSION = NO + +# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES +# then the macro expansion is limited to the macros specified with the +# PREDEFINED and EXPAND_AS_DEFINED tags. + +EXPAND_ONLY_PREDEF = NO + +# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files +# in the INCLUDE_PATH (see below) will be search if a #include is found. + +SEARCH_INCLUDES = YES + +# The INCLUDE_PATH tag can be used to specify one or more directories that +# contain include files that are not input files but should be processed by +# the preprocessor. + +INCLUDE_PATH = + +# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard +# patterns (like *.h and *.hpp) to filter out the header-files in the +# directories. If left blank, the patterns specified with FILE_PATTERNS will +# be used. + +INCLUDE_FILE_PATTERNS = + +# The PREDEFINED tag can be used to specify one or more macro names that +# are defined before the preprocessor is started (similar to the -D option of +# gcc). The argument of the tag is a list of macros of the form: name +# or name=definition (no spaces). If the definition and the = are +# omitted =1 is assumed. To prevent a macro definition from being +# undefined via #undef or recursively expanded use the := operator +# instead of the = operator. + +PREDEFINED = + +# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then +# this tag can be used to specify a list of macro names that should be expanded. +# The macro definition that is found in the sources will be used. +# Use the PREDEFINED tag if you want to use a different macro definition. + +EXPAND_AS_DEFINED = + +# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then +# doxygen's preprocessor will remove all function-like macros that are alone +# on a line, have an all uppercase name, and do not end with a semicolon. Such +# function macros are typically used for boiler-plate code, and will confuse +# the parser if not removed. + +SKIP_FUNCTION_MACROS = YES + +#--------------------------------------------------------------------------- +# Configuration::additions related to external references +#--------------------------------------------------------------------------- + +# The TAGFILES option can be used to specify one or more tagfiles. +# Optionally an initial location of the external documentation +# can be added for each tagfile. The format of a tag file without +# this location is as follows: +# +# TAGFILES = file1 file2 ... +# Adding location for the tag files is done as follows: +# +# TAGFILES = file1=loc1 "file2 = loc2" ... +# where "loc1" and "loc2" can be relative or absolute paths or +# URLs. If a location is present for each tag, the installdox tool +# does not have to be run to correct the links. +# Note that each tag file must have a unique name +# (where the name does NOT include the path) +# If a tag file is not located in the directory in which doxygen +# is run, you must also specify the path to the tagfile here. + +TAGFILES = + +# When a file name is specified after GENERATE_TAGFILE, doxygen will create +# a tag file that is based on the input files it reads. + +GENERATE_TAGFILE = + +# If the ALLEXTERNALS tag is set to YES all external classes will be listed +# in the class index. If set to NO only the inherited external classes +# will be listed. + +ALLEXTERNALS = NO + +# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed +# in the modules index. If set to NO, only the current project's groups will +# be listed. + +EXTERNAL_GROUPS = YES + +# The PERL_PATH should be the absolute path and name of the perl script +# interpreter (i.e. the result of `which perl'). + +PERL_PATH = /usr/bin/perl + +#--------------------------------------------------------------------------- +# Configuration options related to the dot tool +#--------------------------------------------------------------------------- + +# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will +# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base +# or super classes. Setting the tag to NO turns the diagrams off. Note that +# this option is superseded by the HAVE_DOT option below. This is only a +# fallback. It is recommended to install and use dot, since it yields more +# powerful graphs. + +CLASS_DIAGRAMS = YES + +# You can define message sequence charts within doxygen comments using the \msc +# command. Doxygen will then run the mscgen tool (see +# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the +# documentation. The MSCGEN_PATH tag allows you to specify the directory where +# the mscgen tool resides. If left empty the tool is assumed to be found in the +# default search path. + +MSCGEN_PATH = + +# If set to YES, the inheritance and collaboration graphs will hide +# inheritance and usage relations if the target is undocumented +# or is not a class. + +HIDE_UNDOC_RELATIONS = YES + +# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is +# available from the path. This tool is part of Graphviz, a graph visualization +# toolkit from AT&T and Lucent Bell Labs. The other options in this section +# have no effect if this option is set to NO (the default) + +HAVE_DOT = YES + +# By default doxygen will write a font called FreeSans.ttf to the output +# directory and reference it in all dot files that doxygen generates. This +# font does not include all possible unicode characters however, so when you need +# these (or just want a differently looking font) you can specify the font name +# using DOT_FONTNAME. You need need to make sure dot is able to find the font, +# which can be done by putting it in a standard location or by setting the +# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory +# containing the font. + +DOT_FONTNAME = FreeSans + +# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. +# The default size is 10pt. + +DOT_FONTSIZE = 10 + +# By default doxygen will tell dot to use the output directory to look for the +# FreeSans.ttf font (which doxygen will put there itself). If you specify a +# different font using DOT_FONTNAME you can set the path where dot +# can find it using this tag. + +DOT_FONTPATH = + +# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect inheritance relations. Setting this tag to YES will force the +# the CLASS_DIAGRAMS tag to NO. + +CLASS_GRAPH = YES + +# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect implementation dependencies (inheritance, containment, and +# class references variables) of the class with other documented classes. + +COLLABORATION_GRAPH = YES + +# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for groups, showing the direct groups dependencies + +GROUP_GRAPHS = YES + +# If the UML_LOOK tag is set to YES doxygen will generate inheritance and +# collaboration diagrams in a style similar to the OMG's Unified Modeling +# Language. + +UML_LOOK = NO + +# If set to YES, the inheritance and collaboration graphs will show the +# relations between templates and their instances. + +TEMPLATE_RELATIONS = NO + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT +# tags are set to YES then doxygen will generate a graph for each documented +# file showing the direct and indirect include dependencies of the file with +# other documented files. + +INCLUDE_GRAPH = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and +# HAVE_DOT tags are set to YES then doxygen will generate a graph for each +# documented header file showing the documented files that directly or +# indirectly include this file. + +INCLUDED_BY_GRAPH = YES + +# If the CALL_GRAPH and HAVE_DOT options are set to YES then +# doxygen will generate a call dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable call graphs +# for selected functions only using the \callgraph command. + +CALL_GRAPH = NO + +# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then +# doxygen will generate a caller dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable caller +# graphs for selected functions only using the \callergraph command. + +CALLER_GRAPH = NO + +# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen +# will graphical hierarchy of all classes instead of a textual one. + +GRAPHICAL_HIERARCHY = YES + +# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES +# then doxygen will show the dependencies a directory has on other directories +# in a graphical way. The dependency relations are determined by the #include +# relations between the files in the directories. + +DIRECTORY_GRAPH = YES + +# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images +# generated by dot. Possible values are png, jpg, or gif +# If left blank png will be used. + +DOT_IMAGE_FORMAT = png + +# The tag DOT_PATH can be used to specify the path where the dot tool can be +# found. If left blank, it is assumed the dot tool can be found in the path. + +DOT_PATH = + +# The DOTFILE_DIRS tag can be used to specify one or more directories that +# contain dot files that are included in the documentation (see the +# \dotfile command). + +DOTFILE_DIRS = + +# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of +# nodes that will be shown in the graph. If the number of nodes in a graph +# becomes larger than this value, doxygen will truncate the graph, which is +# visualized by representing a node as a red box. Note that doxygen if the +# number of direct children of the root node in a graph is already larger than +# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note +# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. + +DOT_GRAPH_MAX_NODES = 50 + +# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the +# graphs generated by dot. A depth value of 3 means that only nodes reachable +# from the root by following a path via at most 3 edges will be shown. Nodes +# that lay further from the root node will be omitted. Note that setting this +# option to 1 or 2 may greatly reduce the computation time needed for large +# code bases. Also note that the size of a graph can be further restricted by +# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. + +MAX_DOT_GRAPH_DEPTH = 0 + +# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent +# background. This is disabled by default, because dot on Windows does not +# seem to support this out of the box. Warning: Depending on the platform used, +# enabling this option may lead to badly anti-aliased labels on the edges of +# a graph (i.e. they become hard to read). + +DOT_TRANSPARENT = NO + +# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output +# files in one run (i.e. multiple -o and -T options on the command line). This +# makes dot run faster, but since only newer versions of dot (>1.8.10) +# support this, this feature is disabled by default. + +DOT_MULTI_TARGETS = NO + +# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will +# generate a legend page explaining the meaning of the various boxes and +# arrows in the dot generated graphs. + +GENERATE_LEGEND = YES + +# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will +# remove the intermediate dot files that are used to generate +# the various graphs. + +DOT_CLEANUP = YES diff --git a/pyroms_toolbox/docs/api-objects.txt b/pyroms_toolbox/docs/api-objects.txt deleted file mode 100644 index 5909d8f..0000000 --- a/pyroms_toolbox/docs/api-objects.txt +++ /dev/null @@ -1,264 +0,0 @@ -pyroms_toolbox pyroms_toolbox-module.html -pyroms_toolbox.shapiro2 pyroms_toolbox.shapiro_filter-module.html#shapiro2 -pyroms_toolbox.shapiro1 pyroms_toolbox.shapiro_filter-module.html#shapiro1 -pyroms_toolbox.BGrid_GFDL pyroms_toolbox.BGrid_GFDL-module.html -pyroms_toolbox.BGrid_GFDL.BGrid_GFDL' pyroms_toolbox.BGrid_GFDL.BGrid_GFDL%27-module.html -pyroms_toolbox.BGrid_GFDL.BGrid_GFDL_full pyroms_toolbox.BGrid_GFDL.BGrid_GFDL_full-module.html -pyroms_toolbox.BGrid_GFDL.BGrid_GFDL_full.get_nc_BGrid_GFDL pyroms_toolbox.BGrid_GFDL.BGrid_GFDL_full-module.html#get_nc_BGrid_GFDL -pyroms_toolbox.BGrid_GFDL.BGrid_GFDL_full.make_remap_BGrid_GFDL_file pyroms_toolbox.BGrid_GFDL.BGrid_GFDL_full-module.html#make_remap_BGrid_GFDL_file -pyroms_toolbox.BGrid_GFDL.flood' pyroms_toolbox.BGrid_GFDL.flood%27-module.html -pyroms_toolbox.BGrid_GFDL.flood'.flood pyroms_toolbox.BGrid_GFDL.flood%27-module.html#flood -pyroms_toolbox.BGrid_GFDL.get_Bgrid_proj' pyroms_toolbox.BGrid_GFDL.get_Bgrid_proj%27-module.html -pyroms_toolbox.BGrid_GFDL.get_Bgrid_proj'.get_Bgrid_proj pyroms_toolbox.BGrid_GFDL.get_Bgrid_proj%27-module.html#get_Bgrid_proj -pyroms_toolbox.BGrid_GFDL.get_coast_line' pyroms_toolbox.BGrid_GFDL.get_coast_line%27-module.html -pyroms_toolbox.BGrid_GFDL.get_coast_line'.get_coast_line pyroms_toolbox.BGrid_GFDL.get_coast_line%27-module.html#get_coast_line -pyroms_toolbox.BGrid_GFDL.get_nc_BGrid_GFDL' pyroms_toolbox.BGrid_GFDL.get_nc_BGrid_GFDL%27-module.html -pyroms_toolbox.BGrid_GFDL.get_nc_BGrid_GFDL'.get_nc_BGrid_GFDL pyroms_toolbox.BGrid_GFDL.get_nc_BGrid_GFDL%27-module.html#get_nc_BGrid_GFDL -pyroms_toolbox.BGrid_GFDL.make_remap_grid_file' pyroms_toolbox.BGrid_GFDL.make_remap_grid_file%27-module.html -pyroms_toolbox.BGrid_GFDL.make_remap_grid_file'.make_remap_grid_file pyroms_toolbox.BGrid_GFDL.make_remap_grid_file%27-module.html#make_remap_grid_file -pyroms_toolbox.BGrid_GFDL.plot_coast_line' pyroms_toolbox.BGrid_GFDL.plot_coast_line%27-module.html -pyroms_toolbox.BGrid_GFDL.plot_coast_line'.plot_coast_line pyroms_toolbox.BGrid_GFDL.plot_coast_line%27-module.html#plot_coast_line -pyroms_toolbox.N2' pyroms_toolbox.N2%27-module.html -pyroms_toolbox.N2'.N2 pyroms_toolbox.N2%27-module.html#N2 -pyroms_toolbox.O2_saturation' pyroms_toolbox.O2_saturation%27-module.html -pyroms_toolbox.O2_saturation'.O2_saturation pyroms_toolbox.O2_saturation%27-module.html#O2_saturation -pyroms_toolbox._average pyroms_toolbox._average-module.html -pyroms_toolbox._average.avg3d pyroms_toolbox._average-module.html#avg3d -pyroms_toolbox._average.avg2d pyroms_toolbox._average-module.html#avg2d -pyroms_toolbox.average' pyroms_toolbox.average%27-module.html -pyroms_toolbox.average'.average pyroms_toolbox.average%27-module.html#average -pyroms_toolbox.average'.avg pyroms_toolbox.average%27-module.html#avg -pyroms_toolbox.change' pyroms_toolbox.change%27-module.html -pyroms_toolbox.change'.change pyroms_toolbox.change%27-module.html#change -pyroms_toolbox.get_coast_line' pyroms_toolbox.get_coast_line%27-module.html -pyroms_toolbox.get_coast_line'.get_coast_line pyroms_toolbox.get_coast_line%27-module.html#get_coast_line -pyroms_toolbox.isoview' pyroms_toolbox.isoview%27-module.html -pyroms_toolbox.isoview'.isoview pyroms_toolbox.isoview%27-module.html#isoview -pyroms_toolbox.iview' pyroms_toolbox.iview%27-module.html -pyroms_toolbox.iview'.iview pyroms_toolbox.iview%27-module.html#iview -pyroms_toolbox.jview' pyroms_toolbox.jview%27-module.html -pyroms_toolbox.jview'.jview pyroms_toolbox.jview%27-module.html#jview -pyroms_toolbox.latview' pyroms_toolbox.latview%27-module.html -pyroms_toolbox.latview'.latview pyroms_toolbox.latview%27-module.html#latview -pyroms_toolbox.lonview' pyroms_toolbox.lonview%27-module.html -pyroms_toolbox.lonview'.lonview pyroms_toolbox.lonview%27-module.html#lonview -pyroms_toolbox.lsq_phase_amplitude' pyroms_toolbox.lsq_phase_amplitude%27-module.html -pyroms_toolbox.lsq_phase_amplitude'.lsq_phase_amplitude pyroms_toolbox.lsq_phase_amplitude%27-module.html#lsq_phase_amplitude -pyroms_toolbox.nc_create_roms_bdry_file' pyroms_toolbox.nc_create_roms_bdry_file%27-module.html -pyroms_toolbox.nc_create_roms_bdry_file'.nc_create_roms_bdry_file pyroms_toolbox.nc_create_roms_bdry_file%27-module.html#nc_create_roms_bdry_file -pyroms_toolbox.nc_create_roms_file' pyroms_toolbox.nc_create_roms_file%27-module.html -pyroms_toolbox.nc_create_roms_file'.nc_create_roms_file pyroms_toolbox.nc_create_roms_file%27-module.html#nc_create_roms_file -pyroms_toolbox.plot_coast_line' pyroms_toolbox.plot_coast_line%27-module.html -pyroms_toolbox.plot_coast_line'.plot_coast_line pyroms_toolbox.plot_coast_line%27-module.html#plot_coast_line -pyroms_toolbox.plot_mask' pyroms_toolbox.plot_mask%27-module.html -pyroms_toolbox.plot_mask'.plot_mask pyroms_toolbox.plot_mask%27-module.html#plot_mask -pyroms_toolbox.quiver' pyroms_toolbox.quiver%27-module.html -pyroms_toolbox.quiver'.FLOATING_POINT_SUPPORT pyroms_toolbox.quiver%27-module.html#FLOATING_POINT_SUPPORT -pyroms_toolbox.quiver'.True_ pyroms_toolbox.quiver%27-module.html#True_ -pyroms_toolbox.quiver'.cosh pyroms_toolbox.quiver%27-module.html#cosh -pyroms_toolbox.quiver'.FPE_OVERFLOW pyroms_toolbox.quiver%27-module.html#FPE_OVERFLOW -pyroms_toolbox.quiver'.index_exp pyroms_toolbox.quiver%27-module.html#index_exp -pyroms_toolbox.quiver'.power pyroms_toolbox.quiver%27-module.html#power -pyroms_toolbox.quiver'.ERR_WARN pyroms_toolbox.quiver%27-module.html#ERR_WARN -pyroms_toolbox.quiver'.fmax pyroms_toolbox.quiver%27-module.html#fmax -pyroms_toolbox.quiver'.sinh pyroms_toolbox.quiver%27-module.html#sinh -pyroms_toolbox.quiver'.trunc pyroms_toolbox.quiver%27-module.html#trunc -pyroms_toolbox.quiver'.less_equal pyroms_toolbox.quiver%27-module.html#less_equal -pyroms_toolbox.quiver'.BUFSIZE pyroms_toolbox.quiver%27-module.html#BUFSIZE -pyroms_toolbox.quiver'.divide pyroms_toolbox.quiver%27-module.html#divide -pyroms_toolbox.quiver'.sign pyroms_toolbox.quiver%27-module.html#sign -pyroms_toolbox.quiver'.bitwise_and pyroms_toolbox.quiver%27-module.html#bitwise_and -pyroms_toolbox.quiver'.deg2rad pyroms_toolbox.quiver%27-module.html#deg2rad -pyroms_toolbox.quiver'.negative pyroms_toolbox.quiver%27-module.html#negative -pyroms_toolbox.quiver'.MAXDIMS pyroms_toolbox.quiver%27-module.html#MAXDIMS -pyroms_toolbox.quiver'.true_divide pyroms_toolbox.quiver%27-module.html#true_divide -pyroms_toolbox.quiver'.Inf pyroms_toolbox.quiver%27-module.html#Inf -pyroms_toolbox.quiver'.quiver pyroms_toolbox.quiver%27-module.html#quiver -pyroms_toolbox.quiver'.infty pyroms_toolbox.quiver%27-module.html#infty -pyroms_toolbox.quiver'.logical_or pyroms_toolbox.quiver%27-module.html#logical_or -pyroms_toolbox.quiver'.minimum pyroms_toolbox.quiver%27-module.html#minimum -pyroms_toolbox.quiver'.WRAP pyroms_toolbox.quiver%27-module.html#WRAP -pyroms_toolbox.quiver'.tan pyroms_toolbox.quiver%27-module.html#tan -pyroms_toolbox.quiver'.absolute pyroms_toolbox.quiver%27-module.html#absolute -pyroms_toolbox.quiver'.sin pyroms_toolbox.quiver%27-module.html#sin -pyroms_toolbox.quiver'.s_ pyroms_toolbox.quiver%27-module.html#s_ -pyroms_toolbox.quiver'.logaddexp pyroms_toolbox.quiver%27-module.html#logaddexp -pyroms_toolbox.quiver'.left_shift pyroms_toolbox.quiver%27-module.html#left_shift -pyroms_toolbox.quiver'.CLIP pyroms_toolbox.quiver%27-module.html#CLIP -pyroms_toolbox.quiver'.degrees pyroms_toolbox.quiver%27-module.html#degrees -pyroms_toolbox.quiver'.FPE_INVALID pyroms_toolbox.quiver%27-module.html#FPE_INVALID -pyroms_toolbox.quiver'.logaddexp2 pyroms_toolbox.quiver%27-module.html#logaddexp2 -pyroms_toolbox.quiver'.greater pyroms_toolbox.quiver%27-module.html#greater -pyroms_toolbox.quiver'.PZERO pyroms_toolbox.quiver%27-module.html#PZERO -pyroms_toolbox.quiver'.radians pyroms_toolbox.quiver%27-module.html#radians -pyroms_toolbox.quiver'.fmod pyroms_toolbox.quiver%27-module.html#fmod -pyroms_toolbox.quiver'.ogrid pyroms_toolbox.quiver%27-module.html#ogrid -pyroms_toolbox.quiver'.r_ pyroms_toolbox.quiver%27-module.html#r_ -pyroms_toolbox.quiver'.ERR_RAISE pyroms_toolbox.quiver%27-module.html#ERR_RAISE -pyroms_toolbox.quiver'.remainder pyroms_toolbox.quiver%27-module.html#remainder -pyroms_toolbox.quiver'.expm1 pyroms_toolbox.quiver%27-module.html#expm1 -pyroms_toolbox.quiver'.newaxis pyroms_toolbox.quiver%27-module.html#newaxis -pyroms_toolbox.quiver'.arccos pyroms_toolbox.quiver%27-module.html#arccos -pyroms_toolbox.quiver'.rint pyroms_toolbox.quiver%27-module.html#rint -pyroms_toolbox.quiver'.arctan2 pyroms_toolbox.quiver%27-module.html#arctan2 -pyroms_toolbox.quiver'.little_endian pyroms_toolbox.quiver%27-module.html#little_endian -pyroms_toolbox.quiver'.ldexp pyroms_toolbox.quiver%27-module.html#ldexp -pyroms_toolbox.quiver'.logical_xor pyroms_toolbox.quiver%27-module.html#logical_xor -pyroms_toolbox.quiver'.exp2 pyroms_toolbox.quiver%27-module.html#exp2 -pyroms_toolbox.quiver'.False_ pyroms_toolbox.quiver%27-module.html#False_ -pyroms_toolbox.quiver'.arctanh pyroms_toolbox.quiver%27-module.html#arctanh -pyroms_toolbox.quiver'.typecodes pyroms_toolbox.quiver%27-module.html#typecodes -pyroms_toolbox.quiver'.sctypes pyroms_toolbox.quiver%27-module.html#sctypes -pyroms_toolbox.quiver'.not_equal pyroms_toolbox.quiver%27-module.html#not_equal -pyroms_toolbox.quiver'.typeNA pyroms_toolbox.quiver%27-module.html#typeNA -pyroms_toolbox.quiver'.conjugate pyroms_toolbox.quiver%27-module.html#conjugate -pyroms_toolbox.quiver'.ERR_LOG pyroms_toolbox.quiver%27-module.html#ERR_LOG -pyroms_toolbox.quiver'.right_shift pyroms_toolbox.quiver%27-module.html#right_shift -pyroms_toolbox.quiver'.isnan pyroms_toolbox.quiver%27-module.html#isnan -pyroms_toolbox.quiver'.multiply pyroms_toolbox.quiver%27-module.html#multiply -pyroms_toolbox.quiver'.logical_not pyroms_toolbox.quiver%27-module.html#logical_not -pyroms_toolbox.quiver'.nbytes pyroms_toolbox.quiver%27-module.html#nbytes -pyroms_toolbox.quiver'.FPE_UNDERFLOW pyroms_toolbox.quiver%27-module.html#FPE_UNDERFLOW -pyroms_toolbox.quiver'.frexp pyroms_toolbox.quiver%27-module.html#frexp -pyroms_toolbox.quiver'.SHIFT_OVERFLOW pyroms_toolbox.quiver%27-module.html#SHIFT_OVERFLOW -pyroms_toolbox.quiver'.NZERO pyroms_toolbox.quiver%27-module.html#NZERO -pyroms_toolbox.quiver'.ceil pyroms_toolbox.quiver%27-module.html#ceil -pyroms_toolbox.quiver'.isfinite pyroms_toolbox.quiver%27-module.html#isfinite -pyroms_toolbox.quiver'.SHIFT_UNDERFLOW pyroms_toolbox.quiver%27-module.html#SHIFT_UNDERFLOW -pyroms_toolbox.quiver'.rad2deg pyroms_toolbox.quiver%27-module.html#rad2deg -pyroms_toolbox.quiver'.sctypeDict pyroms_toolbox.quiver%27-module.html#sctypeDict -pyroms_toolbox.quiver'.NINF pyroms_toolbox.quiver%27-module.html#NINF -pyroms_toolbox.quiver'.ERR_DEFAULT2 pyroms_toolbox.quiver%27-module.html#ERR_DEFAULT2 -pyroms_toolbox.quiver'.cos pyroms_toolbox.quiver%27-module.html#cos -pyroms_toolbox.quiver'.arccosh pyroms_toolbox.quiver%27-module.html#arccosh -pyroms_toolbox.quiver'.equal pyroms_toolbox.quiver%27-module.html#equal -pyroms_toolbox.quiver'.bitwise_or pyroms_toolbox.quiver%27-module.html#bitwise_or -pyroms_toolbox.quiver'.invert pyroms_toolbox.quiver%27-module.html#invert -pyroms_toolbox.quiver'.UFUNC_PYVALS_NAME pyroms_toolbox.quiver%27-module.html#UFUNC_PYVALS_NAME -pyroms_toolbox.quiver'.SHIFT_INVALID pyroms_toolbox.quiver%27-module.html#SHIFT_INVALID -pyroms_toolbox.quiver'.c_ pyroms_toolbox.quiver%27-module.html#c_ -pyroms_toolbox.quiver'.pi pyroms_toolbox.quiver%27-module.html#pi -pyroms_toolbox.quiver'.arcsin pyroms_toolbox.quiver%27-module.html#arcsin -pyroms_toolbox.quiver'.sctypeNA pyroms_toolbox.quiver%27-module.html#sctypeNA -pyroms_toolbox.quiver'.SHIFT_DIVIDEBYZERO pyroms_toolbox.quiver%27-module.html#SHIFT_DIVIDEBYZERO -pyroms_toolbox.quiver'.ERR_PRINT pyroms_toolbox.quiver%27-module.html#ERR_PRINT -pyroms_toolbox.quiver'.reciprocal pyroms_toolbox.quiver%27-module.html#reciprocal -pyroms_toolbox.quiver'.tanh pyroms_toolbox.quiver%27-module.html#tanh -pyroms_toolbox.quiver'.cast pyroms_toolbox.quiver%27-module.html#cast -pyroms_toolbox.quiver'.mgrid pyroms_toolbox.quiver%27-module.html#mgrid -pyroms_toolbox.quiver'.signbit pyroms_toolbox.quiver%27-module.html#signbit -pyroms_toolbox.quiver'.conj pyroms_toolbox.quiver%27-module.html#conj -pyroms_toolbox.quiver'.inf pyroms_toolbox.quiver%27-module.html#inf -pyroms_toolbox.quiver'.bitwise_xor pyroms_toolbox.quiver%27-module.html#bitwise_xor -pyroms_toolbox.quiver'.fabs pyroms_toolbox.quiver%27-module.html#fabs -pyroms_toolbox.quiver'.NaN pyroms_toolbox.quiver%27-module.html#NaN -pyroms_toolbox.quiver'.sqrt pyroms_toolbox.quiver%27-module.html#sqrt -pyroms_toolbox.quiver'.floor_divide pyroms_toolbox.quiver%27-module.html#floor_divide -pyroms_toolbox.quiver'.greater_equal pyroms_toolbox.quiver%27-module.html#greater_equal -pyroms_toolbox.quiver'.PINF pyroms_toolbox.quiver%27-module.html#PINF -pyroms_toolbox.quiver'.less pyroms_toolbox.quiver%27-module.html#less -pyroms_toolbox.quiver'.ERR_CALL pyroms_toolbox.quiver%27-module.html#ERR_CALL -pyroms_toolbox.quiver'.UFUNC_BUFSIZE_DEFAULT pyroms_toolbox.quiver%27-module.html#UFUNC_BUFSIZE_DEFAULT -pyroms_toolbox.quiver'.NAN pyroms_toolbox.quiver%27-module.html#NAN -pyroms_toolbox.quiver'.typeDict pyroms_toolbox.quiver%27-module.html#typeDict -pyroms_toolbox.quiver'.RAISE pyroms_toolbox.quiver%27-module.html#RAISE -pyroms_toolbox.quiver'.add pyroms_toolbox.quiver%27-module.html#add -pyroms_toolbox.quiver'.mod pyroms_toolbox.quiver%27-module.html#mod -pyroms_toolbox.quiver'.bitwise_not pyroms_toolbox.quiver%27-module.html#bitwise_not -pyroms_toolbox.quiver'.hypot pyroms_toolbox.quiver%27-module.html#hypot -pyroms_toolbox.quiver'.logical_and pyroms_toolbox.quiver%27-module.html#logical_and -pyroms_toolbox.quiver'.modf pyroms_toolbox.quiver%27-module.html#modf -pyroms_toolbox.quiver'.FPE_DIVIDEBYZERO pyroms_toolbox.quiver%27-module.html#FPE_DIVIDEBYZERO -pyroms_toolbox.quiver'.subtract pyroms_toolbox.quiver%27-module.html#subtract -pyroms_toolbox.quiver'.fmin pyroms_toolbox.quiver%27-module.html#fmin -pyroms_toolbox.quiver'.ones_like pyroms_toolbox.quiver%27-module.html#ones_like -pyroms_toolbox.quiver'.arcsinh pyroms_toolbox.quiver%27-module.html#arcsinh -pyroms_toolbox.quiver'.square pyroms_toolbox.quiver%27-module.html#square -pyroms_toolbox.quiver'.Infinity pyroms_toolbox.quiver%27-module.html#Infinity -pyroms_toolbox.quiver'.log pyroms_toolbox.quiver%27-module.html#log -pyroms_toolbox.quiver'.log10 pyroms_toolbox.quiver%27-module.html#log10 -pyroms_toolbox.quiver'.maximum pyroms_toolbox.quiver%27-module.html#maximum -pyroms_toolbox.quiver'.log1p pyroms_toolbox.quiver%27-module.html#log1p -pyroms_toolbox.quiver'.ScalarType pyroms_toolbox.quiver%27-module.html#ScalarType -pyroms_toolbox.quiver'.floor pyroms_toolbox.quiver%27-module.html#floor -pyroms_toolbox.quiver'.nan pyroms_toolbox.quiver%27-module.html#nan -pyroms_toolbox.quiver'.arctan pyroms_toolbox.quiver%27-module.html#arctan -pyroms_toolbox.quiver'.ERR_DEFAULT pyroms_toolbox.quiver%27-module.html#ERR_DEFAULT -pyroms_toolbox.quiver'.exp pyroms_toolbox.quiver%27-module.html#exp -pyroms_toolbox.quiver'.ALLOW_THREADS pyroms_toolbox.quiver%27-module.html#ALLOW_THREADS -pyroms_toolbox.quiver'.isinf pyroms_toolbox.quiver%27-module.html#isinf -pyroms_toolbox.quiver'.e pyroms_toolbox.quiver%27-module.html#e -pyroms_toolbox.quiver'.ERR_IGNORE pyroms_toolbox.quiver%27-module.html#ERR_IGNORE -pyroms_toolbox.remapping' pyroms_toolbox.remapping%27-module.html -pyroms_toolbox.remapping'.remapping pyroms_toolbox.remapping%27-module.html#remapping -pyroms_toolbox.rfactor' pyroms_toolbox.rfactor%27-module.html -pyroms_toolbox.rfactor'.rfactor pyroms_toolbox.rfactor%27-module.html#rfactor -pyroms_toolbox.rvalue' pyroms_toolbox.rvalue%27-module.html -pyroms_toolbox.rvalue'.rvalue pyroms_toolbox.rvalue%27-module.html#rvalue -pyroms_toolbox.seawater pyroms_toolbox.seawater-module.html -pyroms_toolbox.seawater.temppot0 pyroms_toolbox.seawater.heat-module.html#temppot0 -pyroms_toolbox.seawater.depth pyroms_toolbox.seawater.misc-module.html#depth -pyroms_toolbox.seawater.temppot pyroms_toolbox.seawater.heat-module.html#temppot -pyroms_toolbox.seawater.dens pyroms_toolbox.seawater.density-module.html#dens -pyroms_toolbox.seawater.salt pyroms_toolbox.seawater.salinity-module.html#salt -pyroms_toolbox.seawater.beta pyroms_toolbox.seawater.density-module.html#beta -pyroms_toolbox.seawater.soundvel pyroms_toolbox.seawater.misc-module.html#soundvel -pyroms_toolbox.seawater.heatcap pyroms_toolbox.seawater.heat-module.html#heatcap -pyroms_toolbox.seawater.adtgrad pyroms_toolbox.seawater.heat-module.html#adtgrad -pyroms_toolbox.seawater.cond pyroms_toolbox.seawater.salinity-module.html#cond -pyroms_toolbox.seawater.sigma pyroms_toolbox.seawater.density-module.html#sigma -pyroms_toolbox.seawater.drhods pyroms_toolbox.seawater.density-module.html#drhods -pyroms_toolbox.seawater.alpha pyroms_toolbox.seawater.density-module.html#alpha -pyroms_toolbox.seawater.svan pyroms_toolbox.seawater.density-module.html#svan -pyroms_toolbox.seawater.freezept pyroms_toolbox.seawater.misc-module.html#freezept -pyroms_toolbox.seawater.drhodt pyroms_toolbox.seawater.density-module.html#drhodt -pyroms_toolbox.seawater.density pyroms_toolbox.seawater.density-module.html -pyroms_toolbox.seawater.density._seck pyroms_toolbox.seawater.density-module.html#_seck -pyroms_toolbox.seawater.density.dens pyroms_toolbox.seawater.density-module.html#dens -pyroms_toolbox.seawater.density._dens0 pyroms_toolbox.seawater.density-module.html#_dens0 -pyroms_toolbox.seawater.density.svan pyroms_toolbox.seawater.density-module.html#svan -pyroms_toolbox.seawater.density.beta pyroms_toolbox.seawater.density-module.html#beta -pyroms_toolbox.seawater.density.drhods pyroms_toolbox.seawater.density-module.html#drhods -pyroms_toolbox.seawater.density.alpha pyroms_toolbox.seawater.density-module.html#alpha -pyroms_toolbox.seawater.density.sigma pyroms_toolbox.seawater.density-module.html#sigma -pyroms_toolbox.seawater.density.drhodt pyroms_toolbox.seawater.density-module.html#drhodt -pyroms_toolbox.seawater.heat pyroms_toolbox.seawater.heat-module.html -pyroms_toolbox.seawater.heat.heatcap pyroms_toolbox.seawater.heat-module.html#heatcap -pyroms_toolbox.seawater.heat.adtgrad pyroms_toolbox.seawater.heat-module.html#adtgrad -pyroms_toolbox.seawater.heat.temppot pyroms_toolbox.seawater.heat-module.html#temppot -pyroms_toolbox.seawater.heat.temppot0 pyroms_toolbox.seawater.heat-module.html#temppot0 -pyroms_toolbox.seawater.misc pyroms_toolbox.seawater.misc-module.html -pyroms_toolbox.seawater.misc.depth pyroms_toolbox.seawater.misc-module.html#depth -pyroms_toolbox.seawater.misc.freezept pyroms_toolbox.seawater.misc-module.html#freezept -pyroms_toolbox.seawater.misc.soundvel pyroms_toolbox.seawater.misc-module.html#soundvel -pyroms_toolbox.seawater.salinity pyroms_toolbox.seawater.salinity-module.html -pyroms_toolbox.seawater.salinity._sal pyroms_toolbox.seawater.salinity-module.html#_sal -pyroms_toolbox.seawater.salinity.salt pyroms_toolbox.seawater.salinity-module.html#salt -pyroms_toolbox.seawater.salinity._dsal pyroms_toolbox.seawater.salinity-module.html#_dsal -pyroms_toolbox.seawater.salinity.cond pyroms_toolbox.seawater.salinity-module.html#cond -pyroms_toolbox.seawater.salinity._rt pyroms_toolbox.seawater.salinity-module.html#_rt -pyroms_toolbox.seawater.salinity._c pyroms_toolbox.seawater.salinity-module.html#_c -pyroms_toolbox.seawater.salinity._b pyroms_toolbox.seawater.salinity-module.html#_b -pyroms_toolbox.seawater.salinity._a pyroms_toolbox.seawater.salinity-module.html#_a -pyroms_toolbox.seawater.test pyroms_toolbox.seawater.test-module.html -pyroms_toolbox.shapiro_filter pyroms_toolbox.shapiro_filter-module.html -pyroms_toolbox.shapiro_filter.shapiro2 pyroms_toolbox.shapiro_filter-module.html#shapiro2 -pyroms_toolbox.shapiro_filter.shapiro1 pyroms_toolbox.shapiro_filter-module.html#shapiro1 -pyroms_toolbox.sview' pyroms_toolbox.sview%27-module.html -pyroms_toolbox.sview'.sview pyroms_toolbox.sview%27-module.html#sview -pyroms_toolbox.transectview' pyroms_toolbox.transectview%27-module.html -pyroms_toolbox.transectview'.transectview pyroms_toolbox.transectview%27-module.html#transectview -pyroms_toolbox.twoDview' pyroms_toolbox.twoDview%27-module.html -pyroms_toolbox.twoDview'.twoDview pyroms_toolbox.twoDview%27-module.html#twoDview -pyroms_toolbox.zview' pyroms_toolbox.zview%27-module.html -pyroms_toolbox.zview'.zview pyroms_toolbox.zview%27-module.html#zview -pyroms_toolbox.BGrid_GFDL.BGrid_GFDL'.BGrid_GFDL pyroms_toolbox.BGrid_GFDL.BGrid_GFDL%27.BGrid_GFDL-class.html -pyroms_toolbox.BGrid_GFDL.BGrid_GFDL'.BGrid_GFDL._calculate_grid_angle pyroms_toolbox.BGrid_GFDL.BGrid_GFDL%27.BGrid_GFDL-class.html#_calculate_grid_angle -pyroms_toolbox.BGrid_GFDL.BGrid_GFDL'.BGrid_GFDL.__init__ pyroms_toolbox.BGrid_GFDL.BGrid_GFDL%27.BGrid_GFDL-class.html#__init__ -pyroms_toolbox.BGrid_GFDL.BGrid_GFDL_full.BGrid_GFDL pyroms_toolbox.BGrid_GFDL.BGrid_GFDL_full.BGrid_GFDL-class.html -pyroms_toolbox.BGrid_GFDL.BGrid_GFDL_full.BGrid_GFDL._calculate_t_vert pyroms_toolbox.BGrid_GFDL.BGrid_GFDL_full.BGrid_GFDL-class.html#_calculate_t_vert -pyroms_toolbox.BGrid_GFDL.BGrid_GFDL_full.BGrid_GFDL._calculate_uv_vert pyroms_toolbox.BGrid_GFDL.BGrid_GFDL_full.BGrid_GFDL-class.html#_calculate_uv_vert -pyroms_toolbox.BGrid_GFDL.BGrid_GFDL_full.BGrid_GFDL._calculate_grid_angle pyroms_toolbox.BGrid_GFDL.BGrid_GFDL_full.BGrid_GFDL-class.html#_calculate_grid_angle -pyroms_toolbox.BGrid_GFDL.BGrid_GFDL_full.BGrid_GFDL.__init__ pyroms_toolbox.BGrid_GFDL.BGrid_GFDL_full.BGrid_GFDL-class.html#__init__ -pyroms_toolbox.average'.avg_obj pyroms_toolbox.average%27.avg_obj-class.html -pyroms_toolbox.seawater.OutOfRangeError pyroms_toolbox.seawater.OutOfRangeError-class.html diff --git a/pyroms_toolbox/docs/class-tree.html b/pyroms_toolbox/docs/class-tree.html deleted file mode 100644 index 2d9aeaf..0000000 --- a/pyroms_toolbox/docs/class-tree.html +++ /dev/null @@ -1,129 +0,0 @@ - - - - - Class Hierarchy - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  - - - - -
[hide private]
[frames] | no frames]
-
-
- [ Module Hierarchy - | Class Hierarchy ] -

-

Class Hierarchy

- - - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/crarr.png b/pyroms_toolbox/docs/crarr.png deleted file mode 100644 index 26b43c5..0000000 Binary files a/pyroms_toolbox/docs/crarr.png and /dev/null differ diff --git a/pyroms_toolbox/docs/epydoc.css b/pyroms_toolbox/docs/epydoc.css deleted file mode 100644 index 86d4170..0000000 --- a/pyroms_toolbox/docs/epydoc.css +++ /dev/null @@ -1,322 +0,0 @@ - - -/* Epydoc CSS Stylesheet - * - * This stylesheet can be used to customize the appearance of epydoc's - * HTML output. - * - */ - -/* Default Colors & Styles - * - Set the default foreground & background color with 'body'; and - * link colors with 'a:link' and 'a:visited'. - * - Use bold for decision list terms. - * - The heading styles defined here are used for headings *within* - * docstring descriptions. All headings used by epydoc itself use - * either class='epydoc' or class='toc' (CSS styles for both - * defined below). - */ -body { background: #ffffff; color: #000000; } -p { margin-top: 0.5em; margin-bottom: 0.5em; } -a:link { color: #0000ff; } -a:visited { color: #204080; } -dt { font-weight: bold; } -h1 { font-size: +140%; font-style: italic; - font-weight: bold; } -h2 { font-size: +125%; font-style: italic; - font-weight: bold; } -h3 { font-size: +110%; font-style: italic; - font-weight: normal; } -code { font-size: 100%; } -/* N.B.: class, not pseudoclass */ -a.link { font-family: monospace; } - -/* Page Header & Footer - * - The standard page header consists of a navigation bar (with - * pointers to standard pages such as 'home' and 'trees'); a - * breadcrumbs list, which can be used to navigate to containing - * classes or modules; options links, to show/hide private - * variables and to show/hide frames; and a page title (using - *

). The page title may be followed by a link to the - * corresponding source code (using 'span.codelink'). - * - The footer consists of a navigation bar, a timestamp, and a - * pointer to epydoc's homepage. - */ -h1.epydoc { margin: 0; font-size: +140%; font-weight: bold; } -h2.epydoc { font-size: +130%; font-weight: bold; } -h3.epydoc { font-size: +115%; font-weight: bold; - margin-top: 0.2em; } -td h3.epydoc { font-size: +115%; font-weight: bold; - margin-bottom: 0; } -table.navbar { background: #a0c0ff; color: #000000; - border: 2px groove #c0d0d0; } -table.navbar table { color: #000000; } -th.navbar-select { background: #70b0ff; - color: #000000; } -table.navbar a { text-decoration: none; } -table.navbar a:link { color: #0000ff; } -table.navbar a:visited { color: #204080; } -span.breadcrumbs { font-size: 85%; font-weight: bold; } -span.options { font-size: 70%; } -span.codelink { font-size: 85%; } -td.footer { font-size: 85%; } - -/* Table Headers - * - Each summary table and details section begins with a 'header' - * row. This row contains a section title (marked by - * 'span.table-header') as well as a show/hide private link - * (marked by 'span.options', defined above). - * - Summary tables that contain user-defined groups mark those - * groups using 'group header' rows. - */ -td.table-header { background: #70b0ff; color: #000000; - border: 1px solid #608090; } -td.table-header table { color: #000000; } -td.table-header table a:link { color: #0000ff; } -td.table-header table a:visited { color: #204080; } -span.table-header { font-size: 120%; font-weight: bold; } -th.group-header { background: #c0e0f8; color: #000000; - text-align: left; font-style: italic; - font-size: 115%; - border: 1px solid #608090; } - -/* Summary Tables (functions, variables, etc) - * - Each object is described by a single row of the table with - * two cells. The left cell gives the object's type, and is - * marked with 'code.summary-type'. The right cell gives the - * object's name and a summary description. - * - CSS styles for the table's header and group headers are - * defined above, under 'Table Headers' - */ -table.summary { border-collapse: collapse; - background: #e8f0f8; color: #000000; - border: 1px solid #608090; - margin-bottom: 0.5em; } -td.summary { border: 1px solid #608090; } -code.summary-type { font-size: 85%; } -table.summary a:link { color: #0000ff; } -table.summary a:visited { color: #204080; } - - -/* Details Tables (functions, variables, etc) - * - Each object is described in its own div. - * - A single-row summary table w/ table-header is used as - * a header for each details section (CSS style for table-header - * is defined above, under 'Table Headers'). - */ -table.details { border-collapse: collapse; - background: #e8f0f8; color: #000000; - border: 1px solid #608090; - margin: .2em 0 0 0; } -table.details table { color: #000000; } -table.details a:link { color: #0000ff; } -table.details a:visited { color: #204080; } - -/* Fields */ -dl.fields { margin-left: 2em; margin-top: 1em; - margin-bottom: 1em; } -dl.fields dd ul { margin-left: 0em; padding-left: 0em; } -dl.fields dd ul li ul { margin-left: 2em; padding-left: 0em; } -div.fields { margin-left: 2em; } -div.fields p { margin-bottom: 0.5em; } - -/* Index tables (identifier index, term index, etc) - * - link-index is used for indices containing lists of links - * (namely, the identifier index & term index). - * - index-where is used in link indices for the text indicating - * the container/source for each link. - * - metadata-index is used for indices containing metadata - * extracted from fields (namely, the bug index & todo index). - */ -table.link-index { border-collapse: collapse; - background: #e8f0f8; color: #000000; - border: 1px solid #608090; } -td.link-index { border-width: 0px; } -table.link-index a:link { color: #0000ff; } -table.link-index a:visited { color: #204080; } -span.index-where { font-size: 70%; } -table.metadata-index { border-collapse: collapse; - background: #e8f0f8; color: #000000; - border: 1px solid #608090; - margin: .2em 0 0 0; } -td.metadata-index { border-width: 1px; border-style: solid; } -table.metadata-index a:link { color: #0000ff; } -table.metadata-index a:visited { color: #204080; } - -/* Function signatures - * - sig* is used for the signature in the details section. - * - .summary-sig* is used for the signature in the summary - * table, and when listing property accessor functions. - * */ -.sig-name { color: #006080; } -.sig-arg { color: #008060; } -.sig-default { color: #602000; } -.summary-sig { font-family: monospace; } -.summary-sig-name { color: #006080; font-weight: bold; } -table.summary a.summary-sig-name:link - { color: #006080; font-weight: bold; } -table.summary a.summary-sig-name:visited - { color: #006080; font-weight: bold; } -.summary-sig-arg { color: #006040; } -.summary-sig-default { color: #501800; } - -/* Subclass list - */ -ul.subclass-list { display: inline; } -ul.subclass-list li { display: inline; } - -/* To render variables, classes etc. like functions */ -table.summary .summary-name { color: #006080; font-weight: bold; - font-family: monospace; } -table.summary - a.summary-name:link { color: #006080; font-weight: bold; - font-family: monospace; } -table.summary - a.summary-name:visited { color: #006080; font-weight: bold; - font-family: monospace; } - -/* Variable values - * - In the 'variable details' sections, each varaible's value is - * listed in a 'pre.variable' box. The width of this box is - * restricted to 80 chars; if the value's repr is longer than - * this it will be wrapped, using a backslash marked with - * class 'variable-linewrap'. If the value's repr is longer - * than 3 lines, the rest will be ellided; and an ellipsis - * marker ('...' marked with 'variable-ellipsis') will be used. - * - If the value is a string, its quote marks will be marked - * with 'variable-quote'. - * - If the variable is a regexp, it is syntax-highlighted using - * the re* CSS classes. - */ -pre.variable { padding: .5em; margin: 0; - background: #dce4ec; color: #000000; - border: 1px solid #708890; } -.variable-linewrap { color: #604000; font-weight: bold; } -.variable-ellipsis { color: #604000; font-weight: bold; } -.variable-quote { color: #604000; font-weight: bold; } -.variable-group { color: #008000; font-weight: bold; } -.variable-op { color: #604000; font-weight: bold; } -.variable-string { color: #006030; } -.variable-unknown { color: #a00000; font-weight: bold; } -.re { color: #000000; } -.re-char { color: #006030; } -.re-op { color: #600000; } -.re-group { color: #003060; } -.re-ref { color: #404040; } - -/* Base tree - * - Used by class pages to display the base class hierarchy. - */ -pre.base-tree { font-size: 80%; margin: 0; } - -/* Frames-based table of contents headers - * - Consists of two frames: one for selecting modules; and - * the other listing the contents of the selected module. - * - h1.toc is used for each frame's heading - * - h2.toc is used for subheadings within each frame. - */ -h1.toc { text-align: center; font-size: 105%; - margin: 0; font-weight: bold; - padding: 0; } -h2.toc { font-size: 100%; font-weight: bold; - margin: 0.5em 0 0 -0.3em; } - -/* Syntax Highlighting for Source Code - * - doctest examples are displayed in a 'pre.py-doctest' block. - * If the example is in a details table entry, then it will use - * the colors specified by the 'table pre.py-doctest' line. - * - Source code listings are displayed in a 'pre.py-src' block. - * Each line is marked with 'span.py-line' (used to draw a line - * down the left margin, separating the code from the line - * numbers). Line numbers are displayed with 'span.py-lineno'. - * The expand/collapse block toggle button is displayed with - * 'a.py-toggle' (Note: the CSS style for 'a.py-toggle' should not - * modify the font size of the text.) - * - If a source code page is opened with an anchor, then the - * corresponding code block will be highlighted. The code - * block's header is highlighted with 'py-highlight-hdr'; and - * the code block's body is highlighted with 'py-highlight'. - * - The remaining py-* classes are used to perform syntax - * highlighting (py-string for string literals, py-name for names, - * etc.) - */ -pre.py-doctest { padding: .5em; margin: 1em; - background: #e8f0f8; color: #000000; - border: 1px solid #708890; } -table pre.py-doctest { background: #dce4ec; - color: #000000; } -pre.py-src { border: 2px solid #000000; - background: #f0f0f0; color: #000000; } -.py-line { border-left: 2px solid #000000; - margin-left: .2em; padding-left: .4em; } -.py-lineno { font-style: italic; font-size: 90%; - padding-left: .5em; } -a.py-toggle { text-decoration: none; } -div.py-highlight-hdr { border-top: 2px solid #000000; - border-bottom: 2px solid #000000; - background: #d8e8e8; } -div.py-highlight { border-bottom: 2px solid #000000; - background: #d0e0e0; } -.py-prompt { color: #005050; font-weight: bold;} -.py-more { color: #005050; font-weight: bold;} -.py-string { color: #006030; } -.py-comment { color: #003060; } -.py-keyword { color: #600000; } -.py-output { color: #404040; } -.py-name { color: #000050; } -.py-name:link { color: #000050 !important; } -.py-name:visited { color: #000050 !important; } -.py-number { color: #005000; } -.py-defname { color: #000060; font-weight: bold; } -.py-def-name { color: #000060; font-weight: bold; } -.py-base-class { color: #000060; } -.py-param { color: #000060; } -.py-docstring { color: #006030; } -.py-decorator { color: #804020; } -/* Use this if you don't want links to names underlined: */ -/*a.py-name { text-decoration: none; }*/ - -/* Graphs & Diagrams - * - These CSS styles are used for graphs & diagrams generated using - * Graphviz dot. 'img.graph-without-title' is used for bare - * diagrams (to remove the border created by making the image - * clickable). - */ -img.graph-without-title { border: none; } -img.graph-with-title { border: 1px solid #000000; } -span.graph-title { font-weight: bold; } -span.graph-caption { } - -/* General-purpose classes - * - 'p.indent-wrapped-lines' defines a paragraph whose first line - * is not indented, but whose subsequent lines are. - * - The 'nomargin-top' class is used to remove the top margin (e.g. - * from lists). The 'nomargin' class is used to remove both the - * top and bottom margin (but not the left or right margin -- - * for lists, that would cause the bullets to disappear.) - */ -p.indent-wrapped-lines { padding: 0 0 0 7em; text-indent: -7em; - margin: 0; } -.nomargin-top { margin-top: 0; } -.nomargin { margin-top: 0; margin-bottom: 0; } - -/* HTML Log */ -div.log-block { padding: 0; margin: .5em 0 .5em 0; - background: #e8f0f8; color: #000000; - border: 1px solid #000000; } -div.log-error { padding: .1em .3em .1em .3em; margin: 4px; - background: #ffb0b0; color: #000000; - border: 1px solid #000000; } -div.log-warning { padding: .1em .3em .1em .3em; margin: 4px; - background: #ffffb0; color: #000000; - border: 1px solid #000000; } -div.log-info { padding: .1em .3em .1em .3em; margin: 4px; - background: #b0ffb0; color: #000000; - border: 1px solid #000000; } -h2.log-hdr { background: #70b0ff; color: #000000; - margin: 0; padding: 0em 0.5em 0em 0.5em; - border-bottom: 1px solid #000000; font-size: 110%; } -p.log { font-weight: bold; margin: .5em 0 .5em 0; } -tr.opt-changed { color: #000000; font-weight: bold; } -tr.opt-default { color: #606060; } -pre.log { margin: 0; padding: 0; padding-left: 1em; } diff --git a/pyroms_toolbox/docs/epydoc.js b/pyroms_toolbox/docs/epydoc.js deleted file mode 100644 index e787dbc..0000000 --- a/pyroms_toolbox/docs/epydoc.js +++ /dev/null @@ -1,293 +0,0 @@ -function toggle_private() { - // Search for any private/public links on this page. Store - // their old text in "cmd," so we will know what action to - // take; and change their text to the opposite action. - var cmd = "?"; - var elts = document.getElementsByTagName("a"); - for(var i=0; i...
"; - elt.innerHTML = s; - } -} - -function toggle(id) { - elt = document.getElementById(id+"-toggle"); - if (elt.innerHTML == "-") - collapse(id); - else - expand(id); - return false; -} - -function highlight(id) { - var elt = document.getElementById(id+"-def"); - if (elt) elt.className = "py-highlight-hdr"; - var elt = document.getElementById(id+"-expanded"); - if (elt) elt.className = "py-highlight"; - var elt = document.getElementById(id+"-collapsed"); - if (elt) elt.className = "py-highlight"; -} - -function num_lines(s) { - var n = 1; - var pos = s.indexOf("\n"); - while ( pos > 0) { - n += 1; - pos = s.indexOf("\n", pos+1); - } - return n; -} - -// Collapse all blocks that mave more than `min_lines` lines. -function collapse_all(min_lines) { - var elts = document.getElementsByTagName("div"); - for (var i=0; i 0) - if (elt.id.substring(split, elt.id.length) == "-expanded") - if (num_lines(elt.innerHTML) > min_lines) - collapse(elt.id.substring(0, split)); - } -} - -function expandto(href) { - var start = href.indexOf("#")+1; - if (start != 0 && start != href.length) { - if (href.substring(start, href.length) != "-") { - collapse_all(4); - pos = href.indexOf(".", start); - while (pos != -1) { - var id = href.substring(start, pos); - expand(id); - pos = href.indexOf(".", pos+1); - } - var id = href.substring(start, href.length); - expand(id); - highlight(id); - } - } -} - -function kill_doclink(id) { - var parent = document.getElementById(id); - parent.removeChild(parent.childNodes.item(0)); -} -function auto_kill_doclink(ev) { - if (!ev) var ev = window.event; - if (!this.contains(ev.toElement)) { - var parent = document.getElementById(this.parentID); - parent.removeChild(parent.childNodes.item(0)); - } -} - -function doclink(id, name, targets_id) { - var elt = document.getElementById(id); - - // If we already opened the box, then destroy it. - // (This case should never occur, but leave it in just in case.) - if (elt.childNodes.length > 1) { - elt.removeChild(elt.childNodes.item(0)); - } - else { - // The outer box: relative + inline positioning. - var box1 = document.createElement("div"); - box1.style.position = "relative"; - box1.style.display = "inline"; - box1.style.top = 0; - box1.style.left = 0; - - // A shadow for fun - var shadow = document.createElement("div"); - shadow.style.position = "absolute"; - shadow.style.left = "-1.3em"; - shadow.style.top = "-1.3em"; - shadow.style.background = "#404040"; - - // The inner box: absolute positioning. - var box2 = document.createElement("div"); - box2.style.position = "relative"; - box2.style.border = "1px solid #a0a0a0"; - box2.style.left = "-.2em"; - box2.style.top = "-.2em"; - box2.style.background = "white"; - box2.style.padding = ".3em .4em .3em .4em"; - box2.style.fontStyle = "normal"; - box2.onmouseout=auto_kill_doclink; - box2.parentID = id; - - // Get the targets - var targets_elt = document.getElementById(targets_id); - var targets = targets_elt.getAttribute("targets"); - var links = ""; - target_list = targets.split(","); - for (var i=0; i" + - target[0] + ""; - } - - // Put it all together. - elt.insertBefore(box1, elt.childNodes.item(0)); - //box1.appendChild(box2); - box1.appendChild(shadow); - shadow.appendChild(box2); - box2.innerHTML = - "Which "+name+" do you want to see documentation for?" + - ""; - } - return false; -} - -function get_anchor() { - var href = location.href; - var start = href.indexOf("#")+1; - if ((start != 0) && (start != href.length)) - return href.substring(start, href.length); - } -function redirect_url(dottedName) { - // Scan through each element of the "pages" list, and check - // if "name" matches with any of them. - for (var i=0; i-m" or "-c"; - // extract the portion & compare it to dottedName. - var pagename = pages[i].substring(0, pages[i].length-2); - if (pagename == dottedName.substring(0,pagename.length)) { - - // We've found a page that matches `dottedName`; - // construct its URL, using leftover `dottedName` - // content to form an anchor. - var pagetype = pages[i].charAt(pages[i].length-1); - var url = pagename + ((pagetype=="m")?"-module.html": - "-class.html"); - if (dottedName.length > pagename.length) - url += "#" + dottedName.substring(pagename.length+1, - dottedName.length); - return url; - } - } - } diff --git a/pyroms_toolbox/docs/frames.html b/pyroms_toolbox/docs/frames.html deleted file mode 100644 index 7e651fc..0000000 --- a/pyroms_toolbox/docs/frames.html +++ /dev/null @@ -1,17 +0,0 @@ - - - - - API Documentation - - - - - - - - - diff --git a/pyroms_toolbox/docs/help.html b/pyroms_toolbox/docs/help.html deleted file mode 100644 index f621b39..0000000 --- a/pyroms_toolbox/docs/help.html +++ /dev/null @@ -1,268 +0,0 @@ - - - - - Help - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  - - - - -
[hide private]
[frames] | no frames]
-
- -

API Documentation

- -

This document contains the API (Application Programming Interface) -documentation for this project. Documentation for the Python -objects defined by the project is divided into separate pages for each -package, module, and class. The API documentation also includes two -pages containing information about the project as a whole: a trees -page, and an index page.

- -

Object Documentation

- -

Each Package Documentation page contains:

-
    -
  • A description of the package.
  • -
  • A list of the modules and sub-packages contained by the - package.
  • -
  • A summary of the classes defined by the package.
  • -
  • A summary of the functions defined by the package.
  • -
  • A summary of the variables defined by the package.
  • -
  • A detailed description of each function defined by the - package.
  • -
  • A detailed description of each variable defined by the - package.
  • -
- -

Each Module Documentation page contains:

-
    -
  • A description of the module.
  • -
  • A summary of the classes defined by the module.
  • -
  • A summary of the functions defined by the module.
  • -
  • A summary of the variables defined by the module.
  • -
  • A detailed description of each function defined by the - module.
  • -
  • A detailed description of each variable defined by the - module.
  • -
- -

Each Class Documentation page contains:

-
    -
  • A class inheritance diagram.
  • -
  • A list of known subclasses.
  • -
  • A description of the class.
  • -
  • A summary of the methods defined by the class.
  • -
  • A summary of the instance variables defined by the class.
  • -
  • A summary of the class (static) variables defined by the - class.
  • -
  • A detailed description of each method defined by the - class.
  • -
  • A detailed description of each instance variable defined by the - class.
  • -
  • A detailed description of each class (static) variable defined - by the class.
  • -
- -

Project Documentation

- -

The Trees page contains the module and class hierarchies:

-
    -
  • The module hierarchy lists every package and module, with - modules grouped into packages. At the top level, and within each - package, modules and sub-packages are listed alphabetically.
  • -
  • The class hierarchy lists every class, grouped by base - class. If a class has more than one base class, then it will be - listed under each base class. At the top level, and under each base - class, classes are listed alphabetically.
  • -
- -

The Index page contains indices of terms and - identifiers:

-
    -
  • The term index lists every term indexed by any object's - documentation. For each term, the index provides links to each - place where the term is indexed.
  • -
  • The identifier index lists the (short) name of every package, - module, class, method, function, variable, and parameter. For each - identifier, the index provides a short description, and a link to - its documentation.
  • -
- -

The Table of Contents

- -

The table of contents occupies the two frames on the left side of -the window. The upper-left frame displays the project -contents, and the lower-left frame displays the module -contents:

- - - - - - - - - -
- Project
Contents
...
- API
Documentation
Frame


-
- Module
Contents
 
...
  -

- -

The project contents frame contains a list of all packages -and modules that are defined by the project. Clicking on an entry -will display its contents in the module contents frame. Clicking on a -special entry, labeled "Everything," will display the contents of -the entire project.

- -

The module contents frame contains a list of every -submodule, class, type, exception, function, and variable defined by a -module or package. Clicking on an entry will display its -documentation in the API documentation frame. Clicking on the name of -the module, at the top of the frame, will display the documentation -for the module itself.

- -

The "frames" and "no frames" buttons below the top -navigation bar can be used to control whether the table of contents is -displayed or not.

- -

The Navigation Bar

- -

A navigation bar is located at the top and bottom of every page. -It indicates what type of page you are currently viewing, and allows -you to go to related pages. The following table describes the labels -on the navigation bar. Note that not some labels (such as -[Parent]) are not displayed on all pages.

- - - - - - - - - - - - - - - - - - - - - - - - - - - - -
LabelHighlighted when...Links to...
[Parent](never highlighted) the parent of the current package
[Package]viewing a packagethe package containing the current object -
[Module]viewing a modulethe module containing the current object -
[Class]viewing a class the class containing the current object
[Trees]viewing the trees page the trees page
[Index]viewing the index page the index page
[Help]viewing the help page the help page
- -

The "show private" and "hide private" buttons below -the top navigation bar can be used to control whether documentation -for private objects is displayed. Private objects are usually defined -as objects whose (short) names begin with a single underscore, but do -not end with an underscore. For example, "_x", -"__pprint", and "epydoc.epytext._tokenize" -are private objects; but "re.sub", -"__init__", and "type_" are not. However, -if a module defines the "__all__" variable, then its -contents are used to decide which objects are private.

- -

A timestamp below the bottom navigation bar indicates when each -page was last updated.

- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/identifier-index.html b/pyroms_toolbox/docs/identifier-index.html deleted file mode 100644 index b386910..0000000 --- a/pyroms_toolbox/docs/identifier-index.html +++ /dev/null @@ -1,944 +0,0 @@ - - - - - Identifier Index - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  - - - - -
[hide private]
[frames] | no frames]
-
- -
-

Identifier Index

-
-[ - A - B - C - D - E - F - G - H - I - J - K - L - M - N - O - P - Q - R - S - T - U - V - W - X - Y - Z - _ -] -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

A

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

B

- - - - - - - - - - - - - - - - - - - - - - -

C

- - - - - - - - - - - - - - - - - - - - - - -

D

- - - - - - - - - - - - - - - - - -

E

- - - - - - - - - - - - - - - - - - - - - - - - - - - -

F

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

G

- - - - - - - - - - - - - - - - - - - - - - -

H

- - - - - - - - -

I

- - - - - - - - - - - - - - - - - - - - - - - - - - - -

J

- - - - - - - - -

L

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

M

- - - - - - - - - - - - - - - - - - - - - - -

N

- - - - - - - - - - - - - - - - - - - - - - - - - - - -

O

- - - - - - - - - - - - -

P

- - - - - - - - - - - - - - - - - - - - - - -

Q

- - - - - - - - -

R

- - - - - - - - - - - - - - - - - - - - - - - - - - - -

S

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

T

- - - - - - - - - - - - - - - - - - - - - - - - - - - -

U

- - - - - - - - -

W

- - - - - - - - -

Z

- - - - - - - - -

_

- - - - - - - - - - - - - - - - - - - - - - - - - - - -
-

- - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/index.html b/pyroms_toolbox/docs/index.html deleted file mode 100644 index 7e651fc..0000000 --- a/pyroms_toolbox/docs/index.html +++ /dev/null @@ -1,17 +0,0 @@ - - - - - API Documentation - - - - - - - - - diff --git a/pyroms_toolbox/docs/module-tree.html b/pyroms_toolbox/docs/module-tree.html deleted file mode 100644 index 6ee36c1..0000000 --- a/pyroms_toolbox/docs/module-tree.html +++ /dev/null @@ -1,188 +0,0 @@ - - - - - Module Hierarchy - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  - - - - -
[hide private]
[frames] | no frames]
-
-
- [ Module Hierarchy - | Class Hierarchy ] -

-

Module Hierarchy

- - - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox-module.html b/pyroms_toolbox/docs/pyroms_toolbox-module.html deleted file mode 100644 index 17ffb36..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox-module.html +++ /dev/null @@ -1,218 +0,0 @@ - - - - - pyroms_toolbox - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Package pyroms_toolbox

source code

-

PYROMS_TOOLBOX is a toolbox for working with ROMS ocean models - input/output files based on PYROMS

-

pyroms and pyroms_toolbox are based on the python/numpy/matplotlib - scientific python suite. NetCDF I/O is based on the NetCDF4-python - package.

- -
-

Version: - 0.1.0 -

-

Author: - Frederic Castruccio (frederic@marine.rutgers.edu) -

-
- - - - - - -
- - - - - -
Submodules[hide private]
-
-
- -
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox-pysrc.html b/pyroms_toolbox/docs/pyroms_toolbox-pysrc.html deleted file mode 100644 index 9a83147..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox-pysrc.html +++ /dev/null @@ -1,157 +0,0 @@ - - - - - pyroms_toolbox - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Package pyroms_toolbox

-
- 1  #!/usr/bin/env python 
- 2  ''' 
- 3  PYROMS_TOOLBOX is a toolbox for working with ROMS  
- 4  ocean models input/output files based on PYROMS 
- 5   
- 6  pyroms and pyroms_toolbox are based on the  
- 7  python/numpy/matplotlib scientific python suite.  
- 8  NetCDF I/O is based on the NetCDF4-python package. 
- 9  ''' 
-10   
-11   
-12  from iview import iview 
-13  from jview import jview 
-14  from lonview import lonview 
-15  from latview import latview 
-16  from sview import sview 
-17  from zview import zview 
-18  from isoview import isoview 
-19  from twoDview import twoDview 
-20  from transectview import transectview 
-21  from quiver import quiver 
-22  import seawater 
-23  from N2 import N2 
-24  from O2_saturation import O2_saturation 
-25  from shapiro_filter import * 
-26  from change import change 
-27  from rfactor import rfactor 
-28  from rvalue import rvalue 
-29  from get_coast_line import get_coast_line 
-30  from plot_coast_line import plot_coast_line 
-31  from lsq_phase_amplitude import lsq_phase_amplitude 
-32  from remapping import remapping 
-33  from nc_create_roms_file import nc_create_roms_file 
-34  from nc_create_roms_bdry_file import nc_create_roms_bdry_file 
-35  from average import average 
-36  from plot_mask import plot_mask 
-37  import BGrid_GFDL 
-38   
-39  __authors__ = ['Frederic Castruccio (frederic@marine.rutgers.edu)'] 
-40   
-41  __version__ = '0.1.0' 
-42   
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL-module.html b/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL-module.html deleted file mode 100644 index b262bbd..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL-module.html +++ /dev/null @@ -1,144 +0,0 @@ - - - - - pyroms_toolbox.BGrid_GFDL - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Package BGrid_GFDL - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Package BGrid_GFDL

source code

-

BGrid_GFDL module

- - - - - - - - -
- - - - - -
Submodules[hide private]
-
-
- -
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL-pysrc.html b/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL-pysrc.html deleted file mode 100644 index 7a50dea..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL-pysrc.html +++ /dev/null @@ -1,132 +0,0 @@ - - - - - pyroms_toolbox.BGrid_GFDL - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Package BGrid_GFDL - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Package pyroms_toolbox.BGrid_GFDL

-
- 1  """ 
- 2  BGrid_GFDL module 
- 3  """ 
- 4   
- 5  from BGrid_GFDL import BGrid_GFDL 
- 6  from get_nc_BGrid_GFDL import get_nc_BGrid_GFDL 
- 7  from make_remap_grid_file import make_remap_grid_file 
- 8  from get_coast_line import get_coast_line 
- 9  from plot_coast_line import plot_coast_line 
-10  from get_Bgrid_proj import get_Bgrid_proj 
-11  from flood import flood 
-12   
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.BGrid_GFDL'-module.html b/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.BGrid_GFDL'-module.html deleted file mode 100644 index ab856b4..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.BGrid_GFDL'-module.html +++ /dev/null @@ -1,131 +0,0 @@ - - - - - pyroms_toolbox.BGrid_GFDL.BGrid_GFDL' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Package BGrid_GFDL :: - Module BGrid_GFDL' - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module BGrid_GFDL'

source code

- - - - - - - - - -
- - - - - -
Classes[hide private]
-
-   - - BGrid_GFDL
- Arakawa B-Grid for GFDL CM2.1 -
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.BGrid_GFDL'-pysrc.html b/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.BGrid_GFDL'-pysrc.html deleted file mode 100644 index 72584df..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.BGrid_GFDL'-pysrc.html +++ /dev/null @@ -1,168 +0,0 @@ - - - - - pyroms_toolbox.BGrid_GFDL.BGrid_GFDL' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Package BGrid_GFDL :: - Module BGrid_GFDL' - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms_toolbox.BGrid_GFDL.BGrid_GFDL'

-
- 1  import numpy as np 
- 2  from mpl_toolkits.basemap import pyproj 
- 3  from datetime import datetime 
- 4  try: 
- 5    import netCDF4 as netCDF 
- 6  except: 
- 7    import netCDF3 as netCDF 
- 8  import pyroms 
- 9   
-10   
-11   
-
12 -class BGrid_GFDL(object): -
13 """ -14 Arakawa B-Grid for GFDL CM2.1 -15 """ -16 -17 -
18 - def __init__(self, lon_t, lat_t, lon_uv, lat_uv, \ -19 mask_t, mask_uv, h, z_t, z_t_edges, \ -20 z_uv, z_uv_edges, f, name, xrange, yrange): -
21 -22 self.name = name -23 -24 self.xrange = xrange -25 self.yrange = yrange -26 -27 self.lon_t = lon_t[yrange[0]:yrange[1]+1, xrange[0]:xrange[1]+1] -28 self.lat_t = lat_t[yrange[0]:yrange[1]+1, xrange[0]:xrange[1]+1] -29 self.lon_uv = lon_uv[yrange[0]:yrange[1]+1, xrange[0]:xrange[1]+1] -30 self.lat_uv = lat_uv[yrange[0]:yrange[1]+1, xrange[0]:xrange[1]+1] -31 self.mask_t = mask_t[:,yrange[0]:yrange[1]+1, xrange[0]:xrange[1]+1] -32 self.mask_uv = mask_uv[:,yrange[0]:yrange[1]+1, xrange[0]:xrange[1]+1] -33 self.h = h[yrange[0]:yrange[1]+1, xrange[0]:xrange[1]+1] -34 self.z_t = z_t -35 self.z_t_edges = z_t_edges -36 self.z_uv = z_uv -37 self.z_uv_edges = z_uv_edges -38 self.f = f[yrange[0]:yrange[1]+1, xrange[0]:xrange[1]+1] -39 -40 self.lon_t_vert = lon_uv[yrange[0]-1:yrange[1]+1, xrange[0]-1:xrange[1]+1] -41 self.lat_t_vert = lat_uv[yrange[0]-1:yrange[1]+1, xrange[0]-1:xrange[1]+1] -42 -43 self.lon_uv_vert = lon_t[yrange[0]:yrange[1]+2, xrange[0]:xrange[1]+2] -44 self.lat_uv_vert = lat_t[yrange[0]:yrange[1]+2, xrange[0]:xrange[1]+2] -45 -46 self._calculate_grid_angle() -
47 -48 -
49 - def _calculate_grid_angle(self): -
50 geod = pyproj.Geod(ellps='WGS84') -51 az_forward, az_back, dx = geod.inv(self.lon_t_vert[:,:-1], self.lat_t_vert[:,:-1], \ -52 self.lon_t_vert[:,1:], self.lat_t_vert[:,1:]) -53 -54 angle = 0.5 * (az_forward[1:,:] + az_forward[:-1,:]) -55 self.angle = (90 - angle) * np.pi/180. -
56 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.BGrid_GFDL'.BGrid_GFDL-class.html b/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.BGrid_GFDL'.BGrid_GFDL-class.html deleted file mode 100644 index 6c1f4c7..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.BGrid_GFDL'.BGrid_GFDL-class.html +++ /dev/null @@ -1,280 +0,0 @@ - - - - - pyroms_toolbox.BGrid_GFDL.BGrid_GFDL'.BGrid_GFDL - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Package BGrid_GFDL :: - Module BGrid_GFDL' :: - Class BGrid_GFDL - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Class BGrid_GFDL

source code

-
-object --+
-         |
-        BGrid_GFDL
-
- -
-

Arakawa B-Grid for GFDL CM2.1

- - - - - - - - - - - - - - - - -
- - - - - -
Instance Methods[hide private]
-
-   - - - - - - -
__init__(self, - lon_t, - lat_t, - lon_uv, - lat_uv, - mask_t, - mask_uv, - h, - z_t, - z_t_edges, - z_uv, - z_uv_edges, - f, - name, - xrange, - yrange)
- x.__init__(...) initializes x; see x.__class__.__doc__ for signature
- source code - -
- -
-   - - - - - - -
_calculate_grid_angle(self) - source code - -
- -
-

Inherited from object: - __delattr__, - __getattribute__, - __hash__, - __new__, - __reduce__, - __reduce_ex__, - __repr__, - __setattr__, - __str__ -

-
- - - - - - - - - -
- - - - - -
Properties[hide private]
-
-

Inherited from object: - __class__ -

-
- - - - - - -
- - - - - -
Method Details[hide private]
-
- -
- -
- - -
-

__init__(self, - lon_t, - lat_t, - lon_uv, - lat_uv, - mask_t, - mask_uv, - h, - z_t, - z_t_edges, - z_uv, - z_uv_edges, - f, - name, - xrange, - yrange) -
(Constructor) -

-
source code  -
- -

x.__init__(...) initializes x; see x.__class__.__doc__ for - signature

-
-
Overrides: - object.__init__ -
(inherited documentation)
- -
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.BGrid_GFDL_full-module.html b/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.BGrid_GFDL_full-module.html deleted file mode 100644 index 5a3e0c6..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.BGrid_GFDL_full-module.html +++ /dev/null @@ -1,222 +0,0 @@ - - - - - pyroms_toolbox.BGrid_GFDL.BGrid_GFDL_full - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Package BGrid_GFDL :: - Module BGrid_GFDL_full - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module BGrid_GFDL_full

source code

- - - - - - - - - -
- - - - - -
Classes[hide private]
-
-   - - BGrid_GFDL
- Arakawa B-Grid for GFDL CM2.1 -
- - - - - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
get_nc_BGrid_GFDL(grdfile)
- Bgrd = get_nc_BGrid_GFDL(grdfile)
- source code - -
- -
-   - - - - - - -
make_remap_BGrid_GFDL_file(Bgrd, - Bpos='t') - source code - -
- -
- - - - - - -
- - - - - -
Function Details[hide private]
-
- -
- -
- - -
-

get_nc_BGrid_GFDL(grdfile) -

-
source code  -
- -

Bgrd = get_nc_BGrid_GFDL(grdfile)

-

Load B-Grid grid object for GFDL CM2.1 from netCDF grid file

-
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.BGrid_GFDL_full-pysrc.html b/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.BGrid_GFDL_full-pysrc.html deleted file mode 100644 index cf36d51..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.BGrid_GFDL_full-pysrc.html +++ /dev/null @@ -1,347 +0,0 @@ - - - - - pyroms_toolbox.BGrid_GFDL.BGrid_GFDL_full - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Package BGrid_GFDL :: - Module BGrid_GFDL_full - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms_toolbox.BGrid_GFDL.BGrid_GFDL_full

-
-  1  import numpy as np 
-  2  from mpl_toolkits.basemap import pyproj 
-  3  from datetime import datetime 
-  4  try: 
-  5    import netCDF4 as netCDF 
-  6  except: 
-  7    import netCDF3 as netCDF 
-  8  import pyroms 
-  9   
- 10   
- 11   
-
12 -class BGrid_GFDL(object): -
13 """ - 14 Arakawa B-Grid for GFDL CM2.1 - 15 """ - 16 - 17 -
18 - def __init__(self, lon_t, lat_t, lon_uv, lat_uv, \ - 19 mask_t, mask_uv, h, z_t, z_t_edges, \ - 20 z_uv, z_uv_edges, f): -
21 - 22 self.name = 'GFDL_CM2.1' - 23 self.lon_t = lon_t - 24 self.lat_t = lat_t - 25 self.lon_uv = lon_uv - 26 self.lat_uv = lat_uv - 27 self.mask_t = mask_t - 28 self.mask_uv = mask_uv - 29 self.h = h - 30 self.z_t = z_t - 31 self.z_t_edges = z_t_edges - 32 self.z_uv = z_uv - 33 self.z_uv_edges = z_uv_edges - 34 self.f = f - 35 - 36 self._calculate_t_vert() - 37 self._calculate_uv_vert() - 38 self._calculate_grid_angle() -
39 - 40 -
41 - def _calculate_t_vert(self): -
42 Mm, Lm = self.lon_t.shape - 43 lon = np.zeros((Mm+1,Lm+1)) - 44 lat = np.zeros((Mm+1,Lm+1)) - 45 - 46 lon[1:, 1:] = self.lon_uv[:,:] - 47 lat[1:, 1:] = self.lat_uv[:,:] - 48 - 49 #South edge - 50 lon[0,0:-1] = self.lon_t[0,:] - ( self.lon_uv[0,:] - self.lon_t[0,:] ) - 51 lon[0,-1] = self.lon_t[0,-1] - ( self.lon_uv[0,-2] - self.lon_t[0,-1] ) - 52 lat[0,0:-1] = self.lat_t[0,:] - ( self.lat_uv[0,:] - self.lat_t[0,:] ) - 53 lat[0,-1] = self.lat_t[0,-1] - ( self.lat_uv[0,-2] - self.lat_t[0,-1] ) - 54 - 55 #West edge - 56 lon[0:-1,0] = self.lon_t[:,0] - ( self.lon_uv[:,0] - self.lon_t[:,0] ) - 57 lon[-1,0] = self.lon_t[-1,0] - ( self.lon_uv[-2,0] - self.lon_t[-1,0] ) - 58 lat[0:-1,0] = self.lat_t[:,0] - ( self.lat_uv[:,0] - self.lat_t[:,0] ) - 59 lat[-1,0] = self.lat_t[-1,0] - ( self.lat_uv[-2,0] - self.lat_t[-1,0] ) - 60 - 61 self.lon_t_vert = lon - 62 self.lat_t_vert = lat -
63 - 64 -
65 - def _calculate_uv_vert(self): -
66 Mm, Lm = self.lon_uv.shape - 67 lon = np.zeros((Mm+1,Lm+1)) - 68 lat = np.zeros((Mm+1,Lm+1)) - 69 - 70 lon[:-1, :-1] = self.lon_t[:,:] - 71 lat[:-1, :-1] = self.lat_t[:,:] - 72 - 73 #North edge - 74 lon[-1,0:-2] = self.lon_uv[-1,:-1] - ( self.lon_t[-1,1:] - self.lon_uv[-1,:-1] ) - 75 lon[-1,-2:] = self.lon_uv[-1,-2:] - ( self.lon_t[-1,-2:] - self.lon_uv[-1,-2:] ) - 76 lat[-1,0:-2] = self.lat_uv[-1,:-1] - ( self.lat_t[-1,1:] - self.lat_uv[-1,:-1] ) - 77 lat[-1,-2:] = self.lat_uv[-1,-2:] - ( self.lat_t[-1,-2:] - self.lat_uv[-1,-2:] ) - 78 - 79 #East edge - 80 lon[0:-2,-1] = self.lon_uv[:-1,-1] - ( self.lon_t[1:,-1] - self.lon_uv[:-1,-1] ) - 81 lon[-2,-1] = self.lon_uv[-2:-1,-1] - ( self.lon_t[-2:-1,-1] - self.lon_uv[-2:-1,-1] ) - 82 lat[0:-2,-1] = self.lat_uv[:-1,-1] - ( self.lat_t[1:,-1] - self.lat_uv[:-1,-1] ) - 83 lat[-2,-1] = self.lat_uv[-2:-1,-1] - ( self.lat_t[-2:-1,-1] - self.lat_uv[-2:-1,-1] ) - 84 - 85 self.lon_uv_vert = lon - 86 self.lat_uv_vert = lat -
87 - 88 -
89 - def _calculate_grid_angle(self): -
90 geod = pyproj.Geod(ellps='WGS84') - 91 az_forward, az_back, dx = geod.inv(self.lon_t[:,:-1], self.lat_t[:,:-1], \ - 92 self.lon_t[:,1:], self.lat_t[:,1:]) - 93 - 94 angle = 0.5 * (az_forward[1:,:] + az_forward[:-1,:]) - 95 self.angle = (90 - angle) * np.pi/180. -
96 - 97 - 98 -
99 -def get_nc_BGrid_GFDL(grdfile): -
100 """ -101 Bgrd = get_nc_BGrid_GFDL(grdfile) -102 -103 Load B-Grid grid object for GFDL CM2.1 from netCDF grid file -104 """ -105 -106 nc = pyroms.io.Dataset(grdfile) -107 -108 lon_t = nc.variables['geolon_t'][:] -109 lat_t = nc.variables['geolat_t'][:] -110 lon_uv = nc.variables['geolon_c'][:] -111 lat_uv = nc.variables['geolat_c'][:] -112 -113 h = nc.variables['ht'][:] -114 -115 f = nc.variables['coriolis_param'][:] -116 -117 kmt = nc.variables['kmt'][:] -118 z_t = nc.variables['st_ocean'][:] -119 z_t_edges = nc.variables['st_edges_ocean'][:] -120 -121 kmu = nc.variables['kmu'][:] -122 z_uv = nc.variables['sw_ocean'][:] -123 z_uv_edges = nc.variables['sw_edges_ocean'][:] -124 -125 # compute mask at t-point -126 M_t, L_t = kmt.shape -127 N_t = z_t.shape[0] -128 mask_t = np.zeros((N_t, M_t, L_t)) -129 for j in range(M_t): -130 for i in range(L_t): -131 try: -132 mask_t[0:kmt[j,i], j,i] = 1 -133 except: -134 mask_t[:, j,i] = 0 -135 -136 # compute mask at uv-point -137 M_uv, L_uv = kmu.shape -138 N_uv = z_uv.shape[0] -139 mask_uv = np.zeros((N_uv, M_uv, L_uv)) -140 for j in range(M_uv): -141 for i in range(L_uv): -142 try: -143 mask_uv[0:kmt[j,i], j,i] = 1 -144 except: -145 mask_uv[:, j,i] = 0 -146 -147 return BGrid_GFDL(lon_t, lat_t, lon_uv, lat_uv, \ -148 mask_t, mask_uv, h, z_t, z_t_edges, \ -149 z_uv, z_uv_edges, f) -
150 -151 -152 -
153 -def make_remap_BGrid_GFDL_file(Bgrd, Bpos='t'): -
154 -155 #create remap file -156 remap_filename = 'remap_grid_' + Bgrd.name + '_' + Bpos + '.nc' -157 nc = netCDF.Dataset(remap_filename, 'w', format='NETCDF3_CLASSIC') -158 nc.Description = 'remap grid file on' + Bpos + 'points' -159 nc.Author = 'pyroms.remapping.make_remap_grid_file' -160 nc.Created = datetime.now().strftime("%Y-%m-%d %H:%M:%S") -161 nc.title = Bgrd.name -162 -163 if Bpos == 't': -164 lon_corner = Bgrd.lon_t_vert -165 lat_corner = Bgrd.lat_t_vert -166 grid_center_lon = Bgrd.lon_t.flatten() -167 grid_center_lat = Bgrd.lat_t.flatten() -168 grid_imask = Bgrd.mask_t[0,:].flatten() -169 Mp, Lp = Bgrd.lon_t.shape -170 elif Bpos == 'uv': -171 lon_corner = Bgrd.lon_uv_vert -172 lat_corner = Bgrd.lat_uv_vert -173 grid_center_lon = Bgrd.lon_uv.flatten() -174 grid_center_lat = Bgrd.lat_uv.flatten() -175 grid_imask = Bgrd.mask_uv[0,:].flatten() -176 Mp, Lp = Bgrd.lon_uv.shape -177 else: -178 raise ValueError, 'Bpos must be t or uv' -179 -180 grid_size = Lp * Mp -181 -182 grid_corner_lon = np.zeros((grid_size, 4)) -183 grid_corner_lat = np.zeros((grid_size, 4)) -184 k = 0 -185 for j in range(Mp): -186 for i in range(Lp): -187 grid_corner_lon[k,0] = lon_corner[j,i] -188 grid_corner_lat[k,0] = lat_corner[j,i] -189 grid_corner_lon[k,1] = lon_corner[j,i+1] -190 grid_corner_lat[k,1] = lat_corner[j,i+1] -191 grid_corner_lon[k,2] = lon_corner[j+1,i+1] -192 grid_corner_lat[k,2] = lat_corner[j+1,i+1] -193 grid_corner_lon[k,3] = lon_corner[j+1,i] -194 grid_corner_lat[k,3] = lat_corner[j+1,i] -195 k = k + 1 -196 -197 #Write netcdf file -198 nc.createDimension('grid_size', grid_size) -199 nc.createDimension('grid_corners', 4) -200 nc.createDimension('grid_rank', 2) -201 -202 nc.createVariable('grid_dims', 'i4', ('grid_rank')) -203 nc.variables['grid_dims'].long_name = 'grid size along x and y axis' -204 nc.variables['grid_dims'].units = 'None' -205 nc.variables['grid_dims'][:] = [(Lp, Mp)] -206 -207 nc.createVariable('grid_center_lon', 'f8', ('grid_size')) -208 nc.variables['grid_center_lon'].long_name = 'longitude of cell center' -209 nc.variables['grid_center_lon'].units = 'degrees' -210 nc.variables['grid_center_lon'][:] = grid_center_lon -211 -212 nc.createVariable('grid_center_lat', 'f8', ('grid_size')) -213 nc.variables['grid_center_lat'].long_name = 'latitude of cell center' -214 nc.variables['grid_center_lat'].units = 'degrees' -215 nc.variables['grid_center_lat'][:] = grid_center_lat -216 -217 nc.createVariable('grid_imask', 'i4', ('grid_size')) -218 nc.variables['grid_imask'].long_name = 'mask' -219 nc.variables['grid_imask'].units = 'None' -220 nc.variables['grid_imask'][:] = grid_imask -221 -222 nc.createVariable('grid_corner_lon', 'f8', ('grid_size', 'grid_corners')) -223 nc.variables['grid_corner_lon'].long_name = 'longitude of cell corner' -224 nc.variables['grid_corner_lon'].units = 'degrees' -225 nc.variables['grid_corner_lon'][:] = grid_corner_lon -226 -227 nc.createVariable('grid_corner_lat', 'f8', ('grid_size', 'grid_corners')) -228 nc.variables['grid_corner_lat'].long_name = 'latitude of cell corner' -229 nc.variables['grid_corner_lat'].units = 'degrees' -230 nc.variables['grid_corner_lat'][:] = grid_corner_lat -231 -232 nc.close() -
233 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.BGrid_GFDL_full.BGrid_GFDL-class.html b/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.BGrid_GFDL_full.BGrid_GFDL-class.html deleted file mode 100644 index 0decc36..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.BGrid_GFDL_full.BGrid_GFDL-class.html +++ /dev/null @@ -1,306 +0,0 @@ - - - - - pyroms_toolbox.BGrid_GFDL.BGrid_GFDL_full.BGrid_GFDL - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Package BGrid_GFDL :: - Module BGrid_GFDL_full :: - Class BGrid_GFDL - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Class BGrid_GFDL

source code

-
-object --+
-         |
-        BGrid_GFDL
-
- -
-

Arakawa B-Grid for GFDL CM2.1

- - - - - - - - - - - - - - - - - - - - - - -
- - - - - -
Instance Methods[hide private]
-
-   - - - - - - -
__init__(self, - lon_t, - lat_t, - lon_uv, - lat_uv, - mask_t, - mask_uv, - h, - z_t, - z_t_edges, - z_uv, - z_uv_edges, - f)
- x.__init__(...) initializes x; see x.__class__.__doc__ for signature
- source code - -
- -
-   - - - - - - -
_calculate_t_vert(self) - source code - -
- -
-   - - - - - - -
_calculate_uv_vert(self) - source code - -
- -
-   - - - - - - -
_calculate_grid_angle(self) - source code - -
- -
-

Inherited from object: - __delattr__, - __getattribute__, - __hash__, - __new__, - __reduce__, - __reduce_ex__, - __repr__, - __setattr__, - __str__ -

-
- - - - - - - - - -
- - - - - -
Properties[hide private]
-
-

Inherited from object: - __class__ -

-
- - - - - - -
- - - - - -
Method Details[hide private]
-
- -
- -
- - -
-

__init__(self, - lon_t, - lat_t, - lon_uv, - lat_uv, - mask_t, - mask_uv, - h, - z_t, - z_t_edges, - z_uv, - z_uv_edges, - f) -
(Constructor) -

-
source code  -
- -

x.__init__(...) initializes x; see x.__class__.__doc__ for - signature

-
-
Overrides: - object.__init__ -
(inherited documentation)
- -
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.flood'-module.html b/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.flood'-module.html deleted file mode 100644 index ed4b478..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.flood'-module.html +++ /dev/null @@ -1,222 +0,0 @@ - - - - - pyroms_toolbox.BGrid_GFDL.flood' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Package BGrid_GFDL :: - Module flood' - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module flood'

source code

- - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
flood(varz, - Bgrdz, - Bpos='t', - irange=None, - jrange=None, - spval=-10000000000.0, - dmax=0, - cdepth=0, - kk=0)
- var = flood(var, Bgrdz)
- source code - -
- -
- - - - - - -
- - - - - -
Function Details[hide private]
-
- -
- -
- - -
-

flood(varz, - Bgrdz, - Bpos='t', - irange=None, - jrange=None, - spval=-10000000000.0, - dmax=0, - cdepth=0, - kk=0) -

-
source code  -
- -

var = flood(var, Bgrdz)

-

optional switch:

-
    -
  • - Bpos='t', 'uv' specify the B-grid position where the - variable rely -
  • -
  • - irange specify grid sub-sample for i direction -
  • -
  • - jrange specify grid sub-sample for j direction -
  • -
  • - spval=1e35 define spval value -
  • -
  • - dmax=0 if dmax>0, maximum horizontal - flooding distance -
  • -
  • - cdepth=0 critical depth for flooding if - depth<cdepth => no flooding -
  • -
  • - kk -
  • -
-

Flood varz on gridz

-
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.flood'-pysrc.html b/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.flood'-pysrc.html deleted file mode 100644 index 245e5c9..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.flood'-pysrc.html +++ /dev/null @@ -1,220 +0,0 @@ - - - - - pyroms_toolbox.BGrid_GFDL.flood' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Package BGrid_GFDL :: - Module flood' - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms_toolbox.BGrid_GFDL.flood'

-
-  1  # encoding: utf-8 
-  2   
-  3  import numpy as np 
-  4  import _remapping 
-  5   
-  6  import pyroms 
-  7   
-
8 -def flood(varz, Bgrdz, Bpos='t', irange=None, jrange=None, \ - 9 spval=-1.e10, dmax=0, cdepth=0, kk=0): -
10 """ - 11 var = flood(var, Bgrdz) - 12 - 13 optional switch: - 14 - Bpos='t', 'uv' specify the B-grid position where - 15 the variable rely - 16 - irange specify grid sub-sample for i direction - 17 - jrange specify grid sub-sample for j direction - 18 - spval=1e35 define spval value - 19 - dmax=0 if dmax>0, maximum horizontal - 20 flooding distance - 21 - cdepth=0 critical depth for flooding - 22 if depth<cdepth => no flooding - 23 - kk - 24 - 25 Flood varz on gridz - 26 """ - 27 - 28 varz = varz.copy() - 29 varz = np.array(varz) - 30 - 31 assert len(varz.shape) == 3, 'var must be 3D' - 32 - 33 # replace spval by nan - 34 idx = np.where(abs((varz-spval)/spval)<=1e-5) - 35 varz[idx] = np.nan - 36 - 37 if Bpos is 't': - 38 x = Bgrdz.lon_t - 39 y = Bgrdz.lat_t - 40 h = Bgrdz.h - 41 mask = Bgrdz.mask_t[0,:,:] - 42 elif Bpos is 'uv': - 43 x = Bgrdz.lon_uv - 44 y = Bgrdz.lat_uv - 45 h = Bgrdz.h - 46 mask = Bgrdz.mask_uv[0,:,:] - 47 else: - 48 raise Warning, '%s bad position. Bpos must be t or uv' % Bpos - 49 - 50 nlev, Mm, Lm = varz.shape - 51 - 52 if irange is None: - 53 irange = (0,Lm) - 54 else: - 55 assert varz.shape[2] == irange[1]-irange[0], \ - 56 'var shape and irange must agreed' - 57 - 58 if jrange is None: - 59 jrange = (0,Mm) - 60 else: - 61 assert varz.shape[1] == jrange[1]-jrange[0], \ - 62 'var shape and jrange must agreed' - 63 - 64 x = x[jrange[0]:jrange[1], irange[0]:irange[1]] - 65 y = y[jrange[0]:jrange[1], irange[0]:irange[1]] - 66 h = h[jrange[0]:jrange[1], irange[0]:irange[1]] - 67 mask = mask[jrange[0]:jrange[1], irange[0]:irange[1]] - 68 - 69 # Finding nearest values in horizontal - 70 # critical deph => no change if depth is less than specified value - 71 cdepth = abs(cdepth) - 72 if cdepth != 0: - 73 idx = np.where(h >= cdepth) - 74 msk = np.zeros(mask.shape) - 75 msk[idx] = 1 - 76 else: - 77 msk = mask.copy() - 78 for k in range(nlev-1,0,-1): - 79 c1 = np.array(msk, dtype=bool) - 80 c2 = np.isnan(varz[k,:,:]) == 1 - 81 if kk == 0: - 82 c3 = np.ones(mask.shape).astype(bool) - 83 else: - 84 c3 = np.isnan(varz[min(k-kk,0),:,:]) == 0 - 85 c = c1 & c2 & c3 - 86 idxnan = np.where(c == True) - 87 idx = np.where(c2 == False) - 88 if list(idx[0]): - 89 wet = np.zeros((len(idx[0]),2)) - 90 dry = np.zeros((len(idxnan[0]),2)) - 91 wet[:,0] = idx[0]+1 - 92 wet[:,1] = idx[1]+1 - 93 dry[:,0] = idxnan[0]+1 - 94 dry[:,1] = idxnan[1]+1 - 95 - 96 varz[k,:] = _remapping.flood(varz[k,:], wet, dry, x, y, dmax) - 97 - 98 # drop the deepest values down - 99 idx = np.where(np.isnan(varz) == 1) -100 varz[idx] = spval -101 bottom = pyroms.utility.get_bottom(varz[::-1,:,:], mask, spval=spval) -102 bottom = (nlev-1) - bottom -103 for i in range(Lm): -104 for j in range(Mm): -105 if mask[j,i] == 1: -106 varz[bottom[j,i]:,j,i] = varz[bottom[j,i],j,i] -107 -108 return varz -
109 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.get_Bgrid_proj'-module.html b/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.get_Bgrid_proj'-module.html deleted file mode 100644 index 8ccc001..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.get_Bgrid_proj'-module.html +++ /dev/null @@ -1,192 +0,0 @@ - - - - - pyroms_toolbox.BGrid_GFDL.get_Bgrid_proj' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Package BGrid_GFDL :: - Module get_Bgrid_proj' - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module get_Bgrid_proj'

source code

- - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
get_Bgrid_proj(Bgrd, - type='merc', - resolution='h')
- map = get_Bgrid_proj(Bgrd)
- source code - -
- -
- - - - - - -
- - - - - -
Function Details[hide private]
-
- -
- -
- - -
-

get_Bgrid_proj(Bgrd, - type='merc', - resolution='h') -

-
source code  -
- -

map = get_Bgrid_proj(Bgrd)

-

optional arguments:

-
    -
  • - type set projection type (default is merc) -
  • -
  • - resolution set resolution parameter (default is high) -
  • -
-

return a Basemap object that can be use for plotting

-
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.get_Bgrid_proj'-pysrc.html b/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.get_Bgrid_proj'-pysrc.html deleted file mode 100644 index fe13e6f..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.get_Bgrid_proj'-pysrc.html +++ /dev/null @@ -1,138 +0,0 @@ - - - - - pyroms_toolbox.BGrid_GFDL.get_Bgrid_proj' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Package BGrid_GFDL :: - Module get_Bgrid_proj' - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms_toolbox.BGrid_GFDL.get_Bgrid_proj'

-
- 1  from mpl_toolkits.basemap import Basemap 
- 2   
-
3 -def get_Bgrid_proj(Bgrd, type='merc', resolution='h'): -
4 """ - 5 map = get_Bgrid_proj(Bgrd) - 6 - 7 optional arguments: - 8 - type set projection type (default is merc) - 9 - resolution set resolution parameter (default is high) -10 -11 return a Basemap object that can be use for plotting -12 """ -13 -14 lon_min = Bgrd.lon_t_vert.min() -15 lon_max = Bgrd.lon_t_vert.max() -16 lon_0 = (lon_min + lon_max) / 2. -17 -18 lat_min = Bgrd.lat_t_vert.min() -19 lat_max = Bgrd.lat_t_vert.max() -20 lat_0 = (lat_min + lat_max) / 2. -21 -22 map = Basemap(projection=type, llcrnrlon=lon_min, llcrnrlat=lat_min, \ -23 urcrnrlon=lon_max, urcrnrlat=lat_max, lat_0=lat_0, lon_0=lon_0, \ -24 resolution=resolution) -25 -26 return map -
27 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.get_coast_line'-module.html b/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.get_coast_line'-module.html deleted file mode 100644 index dc7dba4..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.get_coast_line'-module.html +++ /dev/null @@ -1,179 +0,0 @@ - - - - - pyroms_toolbox.BGrid_GFDL.get_coast_line' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Package BGrid_GFDL :: - Module get_coast_line' - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module get_coast_line'

source code

- - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
get_coast_line(Bgrd)
- coast = get_coast_line(grd)
- source code - -
- -
- - - - - - -
- - - - - -
Function Details[hide private]
-
- -
- -
- - -
-

get_coast_line(Bgrd) -

-
source code  -
- -

coast = get_coast_line(grd)

-

return the coastline from the grid object grid

-
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.get_coast_line'-pysrc.html b/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.get_coast_line'-pysrc.html deleted file mode 100644 index 70c1a47..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.get_coast_line'-pysrc.html +++ /dev/null @@ -1,161 +0,0 @@ - - - - - pyroms_toolbox.BGrid_GFDL.get_coast_line' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Package BGrid_GFDL :: - Module get_coast_line' - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms_toolbox.BGrid_GFDL.get_coast_line'

-
- 1  import numpy as np 
- 2   
-
3 -def get_coast_line(Bgrd): -
4 ''' - 5 coast = get_coast_line(grd) - 6 - 7 return the coastline from the grid object grid - 8 ''' - 9 -10 #get data -11 lon = Bgrd.lon_t_vert -12 lat = Bgrd.lat_t_vert -13 mask = Bgrd.mask_t[0,:] -14 -15 #get land point -16 jidx, iidx = np.where(mask == 0) -17 -18 coast = [] -19 -20 for i in range(iidx.shape[0]): -21 if jidx[i] != mask.shape[0]-1: -22 if mask[jidx[i], iidx[i]] != mask[jidx[i]+1, iidx[i]]: -23 lonc = ([lon[jidx[i]+1,iidx[i]], lon[jidx[i]+1,iidx[i]+1]]) -24 latc = ([lat[jidx[i]+1,iidx[i]], lat[jidx[i]+1,iidx[i]+1]]) -25 seg = zip(lonc,latc) -26 coast.append(seg) -27 -28 if jidx[i] != 0: -29 if mask[jidx[i], iidx[i]] != mask[jidx[i]-1, iidx[i]]: -30 lonc = ([lon[jidx[i],iidx[i]], lon[jidx[i],iidx[i]+1]]) -31 latc = ([lat[jidx[i],iidx[i]], lat[jidx[i],iidx[i]+1]]) -32 seg = zip(lonc,latc) -33 coast.append(seg) -34 -35 if iidx[i] != mask.shape[1]-1: -36 if mask[jidx[i], iidx[i]] != mask[jidx[i], iidx[i]+1]: -37 lonc = ([lon[jidx[i],iidx[i]+1], lon[jidx[i]+1,iidx[i]+1]]) -38 latc = ([lat[jidx[i],iidx[i]+1], lat[jidx[i]+1,iidx[i]+1]]) -39 seg = zip(lonc,latc) -40 coast.append(seg) -41 -42 if iidx[i] != 0: -43 if mask[jidx[i], iidx[i]] != mask[jidx[i], iidx[i]-1]: -44 lonc = ([lon[jidx[i],iidx[i]], lon[jidx[i]+1,iidx[i]]]) -45 latc = ([lat[jidx[i],iidx[i]], lat[jidx[i]+1,iidx[i]]]) -46 seg = zip(lonc,latc) -47 coast.append(seg) -48 -49 return coast -
50 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.get_nc_BGrid_GFDL'-module.html b/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.get_nc_BGrid_GFDL'-module.html deleted file mode 100644 index b1336d2..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.get_nc_BGrid_GFDL'-module.html +++ /dev/null @@ -1,185 +0,0 @@ - - - - - pyroms_toolbox.BGrid_GFDL.get_nc_BGrid_GFDL' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Package BGrid_GFDL :: - Module get_nc_BGrid_GFDL' - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module get_nc_BGrid_GFDL'

source code

- - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
get_nc_BGrid_GFDL(grdfile, - name='GFDL_CM2.1_North_Pacific', - xrange=(60, 175), - yrange=(120, 190))
- Bgrd = get_nc_BGrid_GFDL(grdfile)
- source code - -
- -
- - - - - - -
- - - - - -
Function Details[hide private]
-
- -
- -
- - -
-

get_nc_BGrid_GFDL(grdfile, - name='GFDL_CM2.1_North_Pacific', - xrange=(60, 175), - yrange=(120, 190)) -

-
source code  -
- -

Bgrd = get_nc_BGrid_GFDL(grdfile)

-

Load B-Grid grid object for GFDL CM2.1 from netCDF grid file

-
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.get_nc_BGrid_GFDL'-pysrc.html b/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.get_nc_BGrid_GFDL'-pysrc.html deleted file mode 100644 index 7071efd..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.get_nc_BGrid_GFDL'-pysrc.html +++ /dev/null @@ -1,177 +0,0 @@ - - - - - pyroms_toolbox.BGrid_GFDL.get_nc_BGrid_GFDL' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Package BGrid_GFDL :: - Module get_nc_BGrid_GFDL' - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms_toolbox.BGrid_GFDL.get_nc_BGrid_GFDL'

-
- 1  import numpy as np 
- 2  import pyroms 
- 3  from pyroms_toolbox.BGrid_GFDL import BGrid_GFDL 
- 4   
- 5   
-
6 -def get_nc_BGrid_GFDL(grdfile, name='GFDL_CM2.1_North_Pacific', \ - 7 # xrange=(80,189), yrange=(96,198)): - 8 xrange=(60,175), yrange=(120, 190)): -
9 """ -10 Bgrd = get_nc_BGrid_GFDL(grdfile) -11 -12 Load B-Grid grid object for GFDL CM2.1 from netCDF grid file -13 """ -14 -15 nc = pyroms.io.Dataset(grdfile) -16 -17 lon_t = nc.variables['geolon_t'][:] -18 lat_t = nc.variables['geolat_t'][:] -19 lon_uv = nc.variables['geolon_c'][:] -20 lat_uv = nc.variables['geolat_c'][:] -21 -22 h = nc.variables['ht'][:] -23 -24 f = nc.variables['coriolis_param'][:] -25 -26 kmt = nc.variables['kmt'][:] -27 z_t = nc.variables['st_ocean'][:] -28 z_t_edges = nc.variables['st_edges_ocean'][:] -29 -30 kmu = nc.variables['kmu'][:] -31 z_uv = nc.variables['sw_ocean'][:] -32 z_uv_edges = nc.variables['sw_edges_ocean'][:] -33 -34 # compute mask at t-point -35 M_t, L_t = kmt.shape -36 N_t = z_t.shape[0] -37 mask_t = np.zeros((N_t, M_t, L_t)) -38 for j in range(M_t): -39 for i in range(L_t): -40 try: -41 mask_t[0:kmt[j,i], j,i] = 1 -42 except: -43 mask_t[:, j,i] = 0 -44 -45 # compute mask at uv-point -46 M_uv, L_uv = kmu.shape -47 N_uv = z_uv.shape[0] -48 mask_uv = np.zeros((N_uv, M_uv, L_uv)) -49 for j in range(M_uv): -50 for i in range(L_uv): -51 try: -52 mask_uv[0:kmu[j,i], j,i] = 1 -53 except: -54 mask_uv[:, j,i] = 0 -55 -56 return BGrid_GFDL(lon_t, lat_t, lon_uv, lat_uv, \ -57 mask_t, mask_uv, h, z_t, z_t_edges, \ -58 z_uv, z_uv_edges, f, \ -59 name, xrange=xrange, yrange=yrange) -
60 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.make_remap_grid_file'-module.html b/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.make_remap_grid_file'-module.html deleted file mode 100644 index 077da4b..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.make_remap_grid_file'-module.html +++ /dev/null @@ -1,140 +0,0 @@ - - - - - pyroms_toolbox.BGrid_GFDL.make_remap_grid_file' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Package BGrid_GFDL :: - Module make_remap_grid_file' - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module make_remap_grid_file'

source code

- - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
make_remap_grid_file(Bgrd, - Bpos='t') - source code - -
- -
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.make_remap_grid_file'-pysrc.html b/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.make_remap_grid_file'-pysrc.html deleted file mode 100644 index d6118f7..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.make_remap_grid_file'-pysrc.html +++ /dev/null @@ -1,202 +0,0 @@ - - - - - pyroms_toolbox.BGrid_GFDL.make_remap_grid_file' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Package BGrid_GFDL :: - Module make_remap_grid_file' - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms_toolbox.BGrid_GFDL.make_remap_grid_file'

-
- 1  import numpy as np 
- 2  from mpl_toolkits.basemap import pyproj 
- 3  from datetime import datetime 
- 4  try: 
- 5    import netCDF4 as netCDF 
- 6  except: 
- 7    import netCDF3 as netCDF 
- 8  import pyroms 
- 9   
-10   
-
11 -def make_remap_grid_file(Bgrd, Bpos='t'): -
12 -13 #create remap file -14 remap_filename = 'remap_grid_' + Bgrd.name + '_' + Bpos + '.nc' -15 nc = netCDF.Dataset(remap_filename, 'w', format='NETCDF3_CLASSIC') -16 nc.Description = 'remap grid file on' + Bpos + 'points' -17 nc.Author = 'pyroms.remapping.make_remap_grid_file' -18 nc.Created = datetime.now().strftime("%Y-%m-%d %H:%M:%S") -19 nc.title = Bgrd.name -20 -21 if Bpos == 't': -22 lon_corner = Bgrd.lon_t_vert -23 lat_corner = Bgrd.lat_t_vert -24 grid_center_lon = Bgrd.lon_t.flatten() -25 grid_center_lat = Bgrd.lat_t.flatten() -26 grid_imask = Bgrd.mask_t[0,:].flatten() -27 Mp, Lp = Bgrd.lon_t.shape -28 elif Bpos == 'uv': -29 lon_corner = Bgrd.lon_uv_vert -30 lat_corner = Bgrd.lat_uv_vert -31 grid_center_lon = Bgrd.lon_uv.flatten() -32 grid_center_lat = Bgrd.lat_uv.flatten() -33 grid_imask = Bgrd.mask_uv[0,:].flatten() -34 Mp, Lp = Bgrd.lon_uv.shape -35 else: -36 raise ValueError, 'Bpos must be t or uv' -37 -38 grid_size = Lp * Mp -39 -40 grid_corner_lon = np.zeros((grid_size, 4)) -41 grid_corner_lat = np.zeros((grid_size, 4)) -42 k = 0 -43 for j in range(Mp): -44 for i in range(Lp): -45 grid_corner_lon[k,0] = lon_corner[j,i] -46 grid_corner_lat[k,0] = lat_corner[j,i] -47 grid_corner_lon[k,1] = lon_corner[j,i+1] -48 grid_corner_lat[k,1] = lat_corner[j,i+1] -49 grid_corner_lon[k,2] = lon_corner[j+1,i+1] -50 grid_corner_lat[k,2] = lat_corner[j+1,i+1] -51 grid_corner_lon[k,3] = lon_corner[j+1,i] -52 grid_corner_lat[k,3] = lat_corner[j+1,i] -53 k = k + 1 -54 -55 #Write netcdf file -56 nc.createDimension('grid_size', grid_size) -57 nc.createDimension('grid_corners', 4) -58 nc.createDimension('grid_rank', 2) -59 -60 nc.createVariable('grid_dims', 'i4', ('grid_rank')) -61 nc.variables['grid_dims'].long_name = 'grid size along x and y axis' -62 nc.variables['grid_dims'].units = 'None' -63 nc.variables['grid_dims'][:] = [(Lp, Mp)] -64 -65 nc.createVariable('grid_center_lon', 'f8', ('grid_size')) -66 nc.variables['grid_center_lon'].long_name = 'longitude of cell center' -67 nc.variables['grid_center_lon'].units = 'degrees' -68 nc.variables['grid_center_lon'][:] = grid_center_lon -69 -70 nc.createVariable('grid_center_lat', 'f8', ('grid_size')) -71 nc.variables['grid_center_lat'].long_name = 'latitude of cell center' -72 nc.variables['grid_center_lat'].units = 'degrees' -73 nc.variables['grid_center_lat'][:] = grid_center_lat -74 -75 nc.createVariable('grid_imask', 'i4', ('grid_size')) -76 nc.variables['grid_imask'].long_name = 'mask' -77 nc.variables['grid_imask'].units = 'None' -78 nc.variables['grid_imask'][:] = grid_imask -79 -80 nc.createVariable('grid_corner_lon', 'f8', ('grid_size', 'grid_corners')) -81 nc.variables['grid_corner_lon'].long_name = 'longitude of cell corner' -82 nc.variables['grid_corner_lon'].units = 'degrees' -83 nc.variables['grid_corner_lon'][:] = grid_corner_lon -84 -85 nc.createVariable('grid_corner_lat', 'f8', ('grid_size', 'grid_corners')) -86 nc.variables['grid_corner_lat'].long_name = 'latitude of cell corner' -87 nc.variables['grid_corner_lat'].units = 'degrees' -88 nc.variables['grid_corner_lat'][:] = grid_corner_lat -89 -90 nc.close() -
91 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.plot_coast_line'-module.html b/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.plot_coast_line'-module.html deleted file mode 100644 index 8675fde..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.plot_coast_line'-module.html +++ /dev/null @@ -1,182 +0,0 @@ - - - - - pyroms_toolbox.BGrid_GFDL.plot_coast_line' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Package BGrid_GFDL :: - Module plot_coast_line' - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module plot_coast_line'

source code

- - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
plot_coast_line(grd, - proj=None)
- plot_coast_line(grd, {proj})
- source code - -
- -
- - - - - - -
- - - - - -
Function Details[hide private]
-
- -
- -
- - -
-

plot_coast_line(grd, - proj=None) -

-
source code  -
- -

plot_coast_line(grd, {proj})

-

plot the coastline from the object grid. proj=map (optional) is a - Basemap object for projection.

-
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.plot_coast_line'-pysrc.html b/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.plot_coast_line'-pysrc.html deleted file mode 100644 index c6b3d0b..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.BGrid_GFDL.plot_coast_line'-pysrc.html +++ /dev/null @@ -1,150 +0,0 @@ - - - - - pyroms_toolbox.BGrid_GFDL.plot_coast_line' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Package BGrid_GFDL :: - Module plot_coast_line' - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms_toolbox.BGrid_GFDL.plot_coast_line'

-
- 1  import numpy as np 
- 2  import matplotlib.pyplot as plt 
- 3  import matplotlib.collections as collections 
- 4  from pyroms_toolbox.BGrid_GFDL import get_coast_line 
- 5   
- 6   
-
7 -def plot_coast_line(grd, proj=None): -
8 ''' - 9 plot_coast_line(grd, {proj}) -10 -11 plot the coastline from the object grid. -12 proj=map (optional) is a Basemap object for -13 projection. -14 ''' -15 -16 -17 a = plt.gca() -18 -19 coast = get_coast_line(grd) -20 c = np.array(coast) -21 -22 if proj is None: -23 col = collections.LineCollection(c) -24 else: -25 cp = np.zeros(c.shape) -26 for i in range(c.shape[0]): -27 cp[i,:,0], cp[i,:,1] = proj(c[i,:,0], c[i,:,1]) -28 -29 col = collections.LineCollection(cp) -30 -31 -32 a.add_collection(col, autolim=True) -33 col.set_color('k') -
34 #a.autoscale_view() -35 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.N2'-module.html b/pyroms_toolbox/docs/pyroms_toolbox.N2'-module.html deleted file mode 100644 index e5afb0e..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.N2'-module.html +++ /dev/null @@ -1,198 +0,0 @@ - - - - - pyroms_toolbox.N2' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Module N2' - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module N2'

source code

- - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
N2(rho, - z, - rho_0=1000.0)
- Return the stratification frequency
- source code - -
- -
- - - - - - -
- - - - - -
Function Details[hide private]
-
- -
- -
- - -
-

N2(rho, - z, - rho_0=1000.0) -

-
source code  -
- -
-
-Return the stratification frequency
-
-Parameters
-----------
-rho : array_like
-    density [kg/m^3]
-z : array_like
-    depths [m] (positive upward)
-
-Returns
--------
-N2 : array_like
-    Stratification frequency [1/s], where the vertical dimension (the
-    first dimension) is one less than the input arrays.    
-
-
-
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.N2'-pysrc.html b/pyroms_toolbox/docs/pyroms_toolbox.N2'-pysrc.html deleted file mode 100644 index d075cf4..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.N2'-pysrc.html +++ /dev/null @@ -1,133 +0,0 @@ - - - - - pyroms_toolbox.N2' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Module N2' - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms_toolbox.N2'

-
-
1 -def N2(rho, z, rho_0=1000.0): -
2 ''' - 3 Return the stratification frequency - 4 - 5 Parameters - 6 ---------- - 7 rho : array_like - 8 density [kg/m^3] - 9 z : array_like -10 depths [m] (positive upward) -11 -12 Returns -13 ------- -14 N2 : array_like -15 Stratification frequency [1/s], where the vertical dimension (the -16 first dimension) is one less than the input arrays. -17 ''' -18 rho = np.asarray(rho) -19 z = np.asarray(z) -20 assert rho.shape == z.shape, 'rho and z must have the same shape.' -21 r_z = np.diff(rho, axis=0) / np.diff(z, axis=0) -22 return -(9.8 / rho_0) * r_z -
23 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.O2_saturation'-module.html b/pyroms_toolbox/docs/pyroms_toolbox.O2_saturation'-module.html deleted file mode 100644 index 47de142..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.O2_saturation'-module.html +++ /dev/null @@ -1,196 +0,0 @@ - - - - - pyroms_toolbox.O2_saturation' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Module O2_saturation' - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module O2_saturation'

source code

- - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
O2_saturation(T, - S)
- Return saturation value of oxygen.
- source code - -
- -
- - - - - - -
- - - - - -
Function Details[hide private]
-
- -
- -
- - -
-

O2_saturation(T, - S) -

-
source code  -
- -
-
-Return saturation value of oxygen.
-
-Parameters
-----------
-T : array_like
-    Temperature (ËšC)
-S : array_like
-    Salinity (PSU)
-
-Returns
--------
-O2_sat : array_like
-    concentrations of O2 [ millimole O2 / m3 ] for a given temperature and
-    salinity (at STP)
-
-
-
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.O2_saturation'-pysrc.html b/pyroms_toolbox/docs/pyroms_toolbox.O2_saturation'-pysrc.html deleted file mode 100644 index cfce17c..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.O2_saturation'-pysrc.html +++ /dev/null @@ -1,148 +0,0 @@ - - - - - pyroms_toolbox.O2_saturation' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Module O2_saturation' - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms_toolbox.O2_saturation'

-
- 1  # encoding: utf-8 
- 2   
-
3 -def O2_saturation(T,S): -
4 """ - 5 Return saturation value of oxygen. - 6 - 7 Parameters - 8 ---------- - 9 T : array_like -10 Temperature (˚C) -11 S : array_like -12 Salinity (PSU) -13 -14 Returns -15 ------- -16 O2_sat : array_like -17 concentrations of O2 [ millimole O2 / m3 ] for a given temperature and -18 salinity (at STP) -19 """ -20 -21 A1 = -173.4292 -22 A2 = 249.6339 -23 A3 = 143.3483 -24 A4 = -21.8492 -25 B1 = -0.033096 -26 B2 = 0.014259 -27 B3 = -0.0017000 -28 # Convert T to deg. C to deg. K -29 T = T + 273.15 -30 # O2 Concentration in mg/l -31 # [from Millero and Sohn, Chemical Oceanography, CRC Press, 1992] -32 O = np.exp(A1 + A2*(100.0/T) + A3*np.log(T/100.0) + A4*(T/100.0) + \ -33 S*(B1 + B2*(T/100.0) + B3*((T/100.0)**2)) ) -34 # Convert to mmol/m3 -35 # mmol/m3 = 44.66 ml/l -36 # mg/l = ml/l * 1.42903 mg/ml -37 return O*(44.66*1.42903) -
38 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox._average-module.html b/pyroms_toolbox/docs/pyroms_toolbox._average-module.html deleted file mode 100644 index efbb59e..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox._average-module.html +++ /dev/null @@ -1,149 +0,0 @@ - - - - - pyroms_toolbox._average - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Module _average - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module _average

-
-This module '_average' is auto-generated with f2py (version:2_5865).
-Functions:
-  avg = avg3d(dataset,incavg,counter,spval,dim1=shape(dataset,0),dim2=shape(dataset,1),dim3=shape(dataset,2))
-  avg = avg2d(dataset,incavg,counter,spval,dim1=shape(dataset,0),dim2=shape(dataset,1))
-.
-
-
- -
-

Version: - $Revision: $ -

-
- - - - - - - - - - - -
- - - - - -
Variables[hide private]
-
-   - - avg2d = <fortran object at 0x2eea698> -
-   - - avg3d = <fortran object at 0x2eea530> -
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.average'-module.html b/pyroms_toolbox/docs/pyroms_toolbox.average'-module.html deleted file mode 100644 index 65f653b..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.average'-module.html +++ /dev/null @@ -1,270 +0,0 @@ - - - - - pyroms_toolbox.average' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Module average' - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module average'

source code

- - - - - - - - - -
- - - - - -
Classes[hide private]
-
-   - - avg_obj -
- - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
average(var, - ncfiles, - trange=None, - avgfile=None, - spval=1e+37, - timevar='ocean_time')
- This function computes the temporal average of a given variable or set of -variables.
- source code - -
- -
- - - - - - - - - -
- - - - - -
Variables[hide private]
-
-   - - avg = <pyroms_toolbox.average.avg_obj object at 0x329db90> -
- - - - - - -
- - - - - -
Function Details[hide private]
-
- -
- -
- - -
-

average(var, - ncfiles, - trange=None, - avgfile=None, - spval=1e+37, - timevar='ocean_time') -

-
source code  -
- -
-
-
-This function computes the temporal average of a given variable or set of
-variables. It uses a shared library object created using f2py to
-calculate this average. Creates a netCDF file if avgfile does not
-equal None, or returns an object containing all of the averages if
-avgfile is equal to None.
-
-List of Arguments:
-
-var        - Either a string or list of strings relating to variable
-             names in the netCDF files
-
-ncfiles    - This variable can be one of three types: a string containing
-             the path to a netCDF file, a string with a Unix wildcard indicator,
-             or a list of strings containing netCDF files.
-
-             E.g. 1) ncfiles = '/path/to/ncfile.nc'
-                  2) ncfiles = '/path/to/nc*.nc'
-                  3) ncfiles = ['/path/to/ncfile1.nc','/path/to/ncfile2.nc',...]
-
-trange      - time range index for ocean time dimension, used if you want to compute
-             average over a given time range.
-
-             E.g. trange = (10,40)
-
-avgfile    - A string used for naming a new netCDF file containing only the
-             averages from the variables after being computed.
-
-timevar    - time variable name. Default is ROMS time variable name "ocean_time".
-
-
-
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.average'-pysrc.html b/pyroms_toolbox/docs/pyroms_toolbox.average'-pysrc.html deleted file mode 100644 index fbcb0be..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.average'-pysrc.html +++ /dev/null @@ -1,290 +0,0 @@ - - - - - pyroms_toolbox.average' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Module average' - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms_toolbox.average'

-
-  1  # encoding: utf-8 
-  2   
-  3  import _average 
-  4  import types 
-  5  import pyroms 
-  6  import numpy as np 
-  7  import netCDF4 
-  8  from datetime import datetime 
-  9   
- 10  # defined an avg_obj object 
-
11 -class avg_obj(object): -
12 pass -
13 - 14 avg = avg_obj() - 15 -
16 -def average(var, ncfiles, trange=None, avgfile=None, spval=1e37, timevar='ocean_time'): -
17 """ - 18 - 19 This function computes the temporal average of a given variable or set of - 20 variables. It uses a shared library object created using f2py to - 21 calculate this average. Creates a netCDF file if avgfile does not - 22 equal None, or returns an object containing all of the averages if - 23 avgfile is equal to None. - 24 - 25 List of Arguments: - 26 - 27 var - Either a string or list of strings relating to variable - 28 names in the netCDF files - 29 - 30 ncfiles - This variable can be one of three types: a string containing - 31 the path to a netCDF file, a string with a Unix wildcard indicator, - 32 or a list of strings containing netCDF files. - 33 - 34 E.g. 1) ncfiles = '/path/to/ncfile.nc' - 35 2) ncfiles = '/path/to/nc*.nc' - 36 3) ncfiles = ['/path/to/ncfile1.nc','/path/to/ncfile2.nc',...] - 37 - 38 trange - time range index for ocean time dimension, used if you want to compute - 39 average over a given time range. - 40 - 41 E.g. trange = (10,40) - 42 - 43 avgfile - A string used for naming a new netCDF file containing only the - 44 averages from the variables after being computed. - 45 - 46 timevar - time variable name. Default is ROMS time variable name "ocean_time". - 47 """ - 48 - 49 if type(var).__name__ == 'list': - 50 nvar = len(var) - 51 elif type(var).__name__ == 'str': - 52 var = [var] - 53 nvar = len(var) - 54 else: - 55 raise ValueError, 'var must be a str or a list of str' - 56 - 57 avg.ncfiles = pyroms.io.MFDataset(ncfiles) - 58 - 59 ocean_time = pyroms.utility.get_nc_var(timevar, avg.ncfiles) - 60 Nt = len(ocean_time[:]) - 61 - 62 if trange is None: - 63 start = 0 - 64 end = Nt - 65 else: - 66 if trange[0] >= 0 and trange[1] <= Nt: - 67 start = trange[0] - 68 end = min(trange[1]+1, Nt) - 69 else: - 70 raise ValueError, 'trange must be within interval [0, %s].' %Nt - 71 - 72 print range(start,end) - 73 - 74 for varname in var: - 75 name = varname - 76 vble = pyroms.utility.get_nc_var(varname,avg.ncfiles) - 77 vsh = vble.shape - 78 leng = len(vsh) - 79 - 80 # if variable is 4D, enters this conditional - 81 if leng is 4: - 82 # create an empty numpy array with dimensions equal to shape of the - 83 # shape of the variable minus the ocean_time dimension - 84 incavg = np.zeros((vsh[1],vsh[2],vsh[3])) - 85 - 86 ii = 0 - 87 for i in range(start,end): - 88 ii = ii + 1 - 89 #calls Fortran function avg3d to perform an incremental average - 90 incavg = _average.avg3d(vble[i,:],incavg,ii,spval) - 91 # mask - 92 incavg = np.ma.masked_values(incavg, spval) - 93 #sets attribute of avg object to the final temporal average - 94 setattr(avg, varname, incavg[:]) - 95 - 96 # if variable is 3D, enters this conditional - 97 elif leng is 3: - 98 # create an empty numpy array with dimensions equal to shape of the - 99 # shape of the variable minus the ocean_time dimension -100 incavg = np.zeros((vsh[1],vsh[2])) -101 -102 ii = 0 -103 for i in range(start,end): -104 ii = ii + 1 -105 #calls Fortran function avg2d to perform an incremental average -106 incavg = _average.avg2d(vble[i,:],incavg,ii,spval) -107 # mask -108 incavg = np.ma.masked_values(incavg, spval) -109 #sets attribute of avg object to the final temporal average -110 setattr(avg, varname, incavg[:]) -111 -112 else: -113 raise ValueError, 'Variable must be 3D (time + 2 spacial dims) or \ -114 4D (time + 3 spacial dims)' -115 -116 -117 # if avgfile is defined, enter this conditional and begin creating a new netCDF file -118 if avgfile is not None: -119 # if avgfile is a string, enter this conditional -120 if type(avgfile).__name__ == 'str': -121 # if original name of avgfile left off .nc in the filename, append it to the string -122 if avgfile.find('.nc') == -1: -123 avgfile = avgfile+'.nc' -124 -125 # creates netCDF file -126 nc = netCDF4.Dataset(avgfile, 'w', format='NETCDF3_CLASSIC') -127 nc.Description = 'Variable Average File' -128 nc.Author = 'pyroms_toolbox.average' -129 nc.Created = datetime.now().strftime("%Y-%m-%d %H:%M:%S") -130 nc.Files = ", ".join(avg.ncfiles._files) -131 -132 print 'Writing '+str(avgfile)+'...' -133 -134 # for each dimension in original netCDF files, recreate the dimensions in the new netCDF file -135 for newdim in avg.ncfiles.dimensions.keys(): -136 if avg.ncfiles.dimensions[newdim].isunlimited(): -137 nc.createDimension(newdim,None) -138 else: -139 nc.createDimension(newdim,len(avg.ncfiles.dimensions[newdim])) -140 -141 tdims = avg.ncfiles.variables[timevar].dimensions -142 nc.createVariable(timevar, 'f8', (tdims)) -143 nc.variables[timevar].long_name = 'averaged time since initialization' -144 try: -145 nc.variables[timevar].units = avg.ncfiles.variables[timevar].units -146 nc.variables[timevar].field = avg.ncfiles.variables[timevar].field -147 except: -148 nc.variables[timevar].units = 'N/A' -149 nc.variables[timevar].field = 'N/A' -150 nc.variables[timevar][0] = ocean_time[start:end].mean() -151 -152 # for each variable in var, create a new variable with all dimensions associated with that -153 # variable except ocean_time -154 for varname in var: -155 print ' writting %s...' %varname -156 -157 vardims = avg.ncfiles.variables[varname].dimensions -158 -159 nc.createVariable(varname, 'f8', (vardims)) -160 nc.variables[varname].long_name = 'Temporal average of variable '+str(varname) -161 -162 # try to pull the units from the original netCDF variable -163 try: -164 nc.variables[varname].units = avg.ncfiles.variables[varname].units -165 nc.variables[varname].field = avg.ncfiles.variables[varname].field -166 # mark as N/A if no units were given to this particular variable -167 except: -168 nc.variables[varname].units = 'N/A' -169 nc.variables[varname].field = 'N/A' -170 -171 nc.variables[varname]._FillValue = str(spval) -172 nc.variables[varname][0] = avg.__getattribute__(varname) -173 nc.close() -174 else: -175 print "avgfile must be a string that equates to the path where this netCDF file is to be placed." -176 return -177 else: -178 print "Returning average object..." -179 return avg -
180 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.average'.avg_obj-class.html b/pyroms_toolbox/docs/pyroms_toolbox.average'.avg_obj-class.html deleted file mode 100644 index 0d7cab3..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.average'.avg_obj-class.html +++ /dev/null @@ -1,171 +0,0 @@ - - - - - pyroms_toolbox.average'.avg_obj - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Module average' :: - Class avg_obj - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Class avg_obj

source code

-
-object --+
-         |
-        avg_obj
-
- -
- - - - - - - - - -
- - - - - -
Instance Methods[hide private]
-
-

Inherited from object: - __delattr__, - __getattribute__, - __hash__, - __init__, - __new__, - __reduce__, - __reduce_ex__, - __repr__, - __setattr__, - __str__ -

-
- - - - - - - - - -
- - - - - -
Properties[hide private]
-
-

Inherited from object: - __class__ -

-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.change'-module.html b/pyroms_toolbox/docs/pyroms_toolbox.change'-module.html deleted file mode 100644 index 865dd40..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.change'-module.html +++ /dev/null @@ -1,142 +0,0 @@ - - - - - pyroms_toolbox.change' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Module change' - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module change'

source code

- - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
change(old, - relation, - flag, - value)
- Change values in a matrix
- source code - -
- -
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.change'-pysrc.html b/pyroms_toolbox/docs/pyroms_toolbox.change'-pysrc.html deleted file mode 100644 index 2dc63e6..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.change'-pysrc.html +++ /dev/null @@ -1,148 +0,0 @@ - - - - - pyroms_toolbox.change' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Module change' - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms_toolbox.change'

-
- 1  import numpy as np 
- 2   
-
3 -def change(old,relation,flag,value): -
4 """ - 5 Change values in a matrix - 6 """ - 7 - 8 if relation != '==' and relation != '!=' and relation != '>' and relation != '<' and \ - 9 relation != '!=' and relation != '>=' and relation != '<=': -10 raise ValueError, 'Relation {%s} not valid' % relation -11 -12 if np.isnan(flag): -13 if relation == '==': -14 replace = np.where(np.isnan(old) == True) -15 elif relation == '!=': -16 replace = np.where(np.isnan(old) == False) -17 else: -18 raise ValueError, 'Relation should be == or ~= to compare to NaN' -19 -20 else: -21 if relation == '==': -22 replace = np.where(old == flag) -23 elif relation == '>': -24 replace = np.where(old > flag) -25 elif relation == '<': -26 replace = np.where(old < flag) -27 elif relation == '!=': -28 replace = np.where(old != flag) -29 elif relation == '>=': -30 replace = np.where(old >= flag) -31 elif relation == '<=': -32 replace = np.where(old <= flag) -33 -34 new = old.copy() -35 new[replace] = value -36 -37 return new -
38 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.get_coast_line'-module.html b/pyroms_toolbox/docs/pyroms_toolbox.get_coast_line'-module.html deleted file mode 100644 index ee5298e..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.get_coast_line'-module.html +++ /dev/null @@ -1,178 +0,0 @@ - - - - - pyroms_toolbox.get_coast_line' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Module get_coast_line' - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module get_coast_line'

source code

- - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
get_coast_line(grd)
- coast = get_coast_line(grd)
- source code - -
- -
- - - - - - -
- - - - - -
Function Details[hide private]
-
- -
- -
- - -
-

get_coast_line(grd) -

-
source code  -
- -

coast = get_coast_line(grd)

-

return the coastline from the grid object grid

-
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.get_coast_line'-pysrc.html b/pyroms_toolbox/docs/pyroms_toolbox.get_coast_line'-pysrc.html deleted file mode 100644 index 82e08ad..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.get_coast_line'-pysrc.html +++ /dev/null @@ -1,159 +0,0 @@ - - - - - pyroms_toolbox.get_coast_line' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Module get_coast_line' - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms_toolbox.get_coast_line'

-
- 1  import numpy as np 
- 2   
-
3 -def get_coast_line(grd): -
4 ''' - 5 coast = get_coast_line(grd) - 6 - 7 return the coastline from the grid object grid - 8 ''' - 9 -10 #get land point -11 jidx, iidx = np.where(grd.hgrid.mask_rho == 0) -12 -13 lon = grd.hgrid.lon_vert -14 lat = grd.hgrid.lat_vert -15 mask = grd.hgrid.mask_rho -16 -17 coast = [] -18 -19 for i in range(iidx.shape[0]): -20 if jidx[i] != mask.shape[0]-1: -21 if mask[jidx[i], iidx[i]] != mask[jidx[i]+1, iidx[i]]: -22 lonc = ([lon[jidx[i]+1,iidx[i]], lon[jidx[i]+1,iidx[i]+1]]) -23 latc = ([lat[jidx[i]+1,iidx[i]], lat[jidx[i]+1,iidx[i]+1]]) -24 seg = zip(lonc,latc) -25 coast.append(seg) -26 -27 if jidx[i] != 0: -28 if mask[jidx[i], iidx[i]] != mask[jidx[i]-1, iidx[i]]: -29 lonc = ([lon[jidx[i],iidx[i]], lon[jidx[i],iidx[i]+1]]) -30 latc = ([lat[jidx[i],iidx[i]], lat[jidx[i],iidx[i]+1]]) -31 seg = zip(lonc,latc) -32 coast.append(seg) -33 -34 if iidx[i] != mask.shape[1]-1: -35 if mask[jidx[i], iidx[i]] != mask[jidx[i], iidx[i]+1]: -36 lonc = ([lon[jidx[i],iidx[i]+1], lon[jidx[i]+1,iidx[i]+1]]) -37 latc = ([lat[jidx[i],iidx[i]+1], lat[jidx[i]+1,iidx[i]+1]]) -38 seg = zip(lonc,latc) -39 coast.append(seg) -40 -41 if iidx[i] != 0: -42 if mask[jidx[i], iidx[i]] != mask[jidx[i], iidx[i]-1]: -43 lonc = ([lon[jidx[i],iidx[i]], lon[jidx[i]+1,iidx[i]]]) -44 latc = ([lat[jidx[i],iidx[i]], lat[jidx[i]+1,iidx[i]]]) -45 seg = zip(lonc,latc) -46 coast.append(seg) -47 -48 return coast -
49 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.isoview'-module.html b/pyroms_toolbox/docs/pyroms_toolbox.isoview'-module.html deleted file mode 100644 index 75ae00b..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.isoview'-module.html +++ /dev/null @@ -1,269 +0,0 @@ - - - - - pyroms_toolbox.isoview' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Module isoview' - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module isoview'

source code

- - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
isoview(var, - prop, - tindex, - isoval, - grid, - filename=None, - cmin=None, - cmax=None, - clev=None, - fill=False, - contour=False, - d=4, - range=None, - fts=None, - title=None, - clb=True, - pal=None, - proj='merc', - fill_land=False, - outfile=None)
- map = isoview(var, prop, tindex, isoval, grid, {optional switch})
- source code - -
- -
- - - - - - -
- - - - - -
Function Details[hide private]
-
- -
- -
- - -
-

isoview(var, - prop, - tindex, - isoval, - grid, - filename=None, - cmin=None, - cmax=None, - clev=None, - fill=False, - contour=False, - d=4, - range=None, - fts=None, - title=None, - clb=True, - pal=None, - proj='merc', - fill_land=False, - outfile=None) -

-
source code  -
- -

map = isoview(var, prop, tindex, isoval, grid, {optional switch})

-

optional switch:

-
    -
  • - filename if defined, load the variable from file -
  • -
  • - cmin set color minimum limit -
  • -
  • - cmax set color maximum limit -
  • -
  • - clev set the number of color step -
  • -
  • - fill use contourf instead of pcolor -
  • -
  • - contour overlay contour (request fill=True) -
  • -
  • - d contour density (default d=4) -
  • -
  • - range set axis limit -
  • -
  • - fts set font size (default: 12) -
  • -
  • - title add title to the plot -
  • -
  • - clb add colorbar (defaul: True) -
  • -
  • - pal set color map (default: cm.jet) -
  • -
  • - proj set projection type (default: merc) -
  • -
  • - fill_land fill land masked area with gray (defaul: True) -
  • -
  • - outfile if defined, write figure to file -
  • -
-

plot a projection of variable at property == isoval. If filename is - provided, var and prop must be a strings and the variables will be load - from the file. grid can be a grid object or a gridid. In the later case, - the grid object correponding to the provided gridid will be loaded. If - proj is not None, return a Basemap object to be used with quiver for - example.

-
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.isoview'-pysrc.html b/pyroms_toolbox/docs/pyroms_toolbox.isoview'-pysrc.html deleted file mode 100644 index 65b9242..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.isoview'-pysrc.html +++ /dev/null @@ -1,332 +0,0 @@ - - - - - pyroms_toolbox.isoview' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Module isoview' - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms_toolbox.isoview'

-
-  1  import numpy as np 
-  2  import matplotlib.pyplot as plt 
-  3  from  matplotlib import cm, colors 
-  4  from mpl_toolkits.basemap import Basemap 
-  5  import pyroms 
-  6  import pyroms_toolbox 
-  7   
-  8   
-
9 -def isoview(var, prop, tindex, isoval, grid, filename=None, \ - 10 cmin=None, cmax=None, clev=None, fill=False, \ - 11 contour=False, d=4, range=None, fts=None, \ - 12 title=None, clb=True, pal=None, proj='merc', \ - 13 fill_land=False, outfile=None): -
14 """ - 15 map = isoview(var, prop, tindex, isoval, grid, {optional switch}) - 16 - 17 optional switch: - 18 - filename if defined, load the variable from file - 19 - cmin set color minimum limit - 20 - cmax set color maximum limit - 21 - clev set the number of color step - 22 - fill use contourf instead of pcolor - 23 - contour overlay contour (request fill=True) - 24 - d contour density (default d=4) - 25 - range set axis limit - 26 - fts set font size (default: 12) - 27 - title add title to the plot - 28 - clb add colorbar (defaul: True) - 29 - pal set color map (default: cm.jet) - 30 - proj set projection type (default: merc) - 31 - fill_land fill land masked area with gray (defaul: True) - 32 - outfile if defined, write figure to file - 33 - 34 plot a projection of variable at property == isoval. If filename - 35 is provided, var and prop must be a strings and the variables will - 36 be load from the file. - 37 grid can be a grid object or a gridid. In the later case, the grid - 38 object correponding to the provided gridid will be loaded. - 39 If proj is not None, return a Basemap object to be used with quiver - 40 for example. - 41 """ - 42 - 43 # get grid - 44 if type(grid).__name__ == 'ROMS_Grid': - 45 grd = grid - 46 else: - 47 grd = pyroms.grid.get_ROMS_grid(grid) - 48 - 49 - 50 # get variable - 51 if filename == None: - 52 var = var - 53 prop = prop - 54 else: - 55 data = pyroms.io.Dataset(filename) - 56 var = data.variables[var] - 57 prop = data.variables[prop] - 58 - 59 Np, Mp, Lp = grd.vgrid.z_r[0,:].shape - 60 - 61 if tindex is not -1: - 62 assert len(var.shape) == 4, 'var must be 4D (time plus space).' - 63 K, N, M, L = var.shape - 64 else: - 65 assert len(var.shape) == 3, 'var must be 3D (no time dependency).' - 66 N, M, L = var.shape - 67 - 68 # determine where on the C-grid these variable lies - 69 if N == Np and M == Mp and L == Lp: - 70 Cpos='rho' - 71 mask = grd.hgrid.mask_rho - 72 - 73 if N == Np and M == Mp and L == Lp-1: - 74 Cpos='u' - 75 mask = grd.hgrid.mask_u - 76 - 77 if N == Np and M == Mp-1 and L == Lp: - 78 Cpos='v' - 79 mask = grd.hgrid.mask_v - 80 - 81 # get constante-iso slice - 82 if tindex == -1: - 83 var = var[:,:,:] - 84 prop = prop[:,:,:] - 85 else: - 86 var = var[tindex,:,:,:] - 87 prop = prop[tindex,:,:,:] - 88 - 89 if fill == True: - 90 isoslice, lon, lat = pyroms.tools.isoslice(var, prop, isoval, \ - 91 grd, Cpos=Cpos) - 92 else: - 93 isoslice, lon, lat = pyroms.tools.isoslice(var, prop, isoval, \ - 94 grd, Cpos=Cpos, vert=True) - 95 - 96 # plot - 97 if cmin is None: - 98 cmin = isoslice.min() - 99 else: -100 cmin = float(cmin) -101 -102 if cmax is None: -103 cmax = isoslice.max() -104 else: -105 cmax = float(cmax) -106 -107 if clev is None: -108 clev = 100. -109 else: -110 clev = float(clev) -111 -112 dc = (cmax - cmin)/clev ; vc = np.arange(cmin,cmax+dc,dc) -113 -114 if pal is None: -115 pal = cm.jet -116 else: -117 pal = pal -118 -119 if fts is None: -120 fts = 12 -121 else: -122 fts = fts -123 -124 #pal.set_over('w', 1.0) -125 #pal.set_under('w', 1.0) -126 #pal.set_bad('w', 1.0) -127 -128 pal_norm = colors.BoundaryNorm(vc,ncolors=256, clip = False) -129 -130 if range is None: -131 lon_min = lon.min() -132 lon_max = lon.max() -133 lon_0 = (lon_min + lon_max) / 2. -134 lat_min = lat.min() -135 lat_max = lat.max() -136 lat_0 = (lat_min + lat_max) / 2. -137 else: -138 lon_min = range[0] -139 lon_max = range[1] -140 lon_0 = (lon_min + lon_max) / 2. -141 lat_min = range[2] -142 lat_max = range[3] -143 lat_0 = (lat_min + lat_max) / 2. -144 -145 # clear figure -146 #plt.clf() -147 -148 if proj is not None: -149 map = Basemap(projection=proj, llcrnrlon=lon_min, llcrnrlat=lat_min, \ -150 urcrnrlon=lon_max, urcrnrlat=lat_max, lat_0=lat_0, lon_0=lon_0, \ -151 resolution='h', area_thresh=5.) -152 x, y = map(lon,lat) -153 -154 if fill_land is True and proj is not None: -155 # fill land and draw coastlines -156 map.drawcoastlines() -157 map.fillcontinents(color='grey') -158 else: -159 if proj is not None: -160 Basemap.pcolor(map, x, y, mask, vmin=-2, cmap=cm.gray) -161 pyroms_toolbox.plot_coast_line(grd, map) -162 else: -163 plt.pcolor(lon, lat, mask, vmin=-2, cmap=cm.gray) -164 pyroms_toolbox.plot_coast_line(grd) -165 -166 if fill is True: -167 if proj is not None: -168 cf = Basemap.contourf(map, x, y, isoslice, vc, cmap = pal, \ -169 norm = pal_norm) -170 else: -171 cf = plt.contourf(lon, lat, isoslice, vc, cmap = pal, \ -172 norm = pal_norm) -173 else: -174 if proj is not None: -175 cf = Basemap.pcolor(map, x, y, isoslice, cmap = pal, norm = pal_norm) -176 else: -177 cf = plt.pcolor(lon, lat, isoslice, cmap = pal, norm = pal_norm) -178 -179 if clb is True: -180 clb = plt.colorbar(cf, fraction=0.075,format='%.2f') -181 for t in clb.ax.get_yticklabels(): -182 t.set_fontsize(fts) -183 -184 if contour is True: -185 if fill is not True: -186 raise Warning, 'Please run again with fill=True to overlay contour.' -187 else: -188 if proj is not None: -189 Basemap.contour(map, x, y, isoslice, vc[::d], colors='k', linewidths=0.5, linestyles='solid') -190 else: -191 plt.contour(lon, lat, isoslice, vc[::d], colors='k', linewidths=0.5, linestyles='solid') -192 -193 if proj is None and range is not None: -194 plt.axis(range) -195 -196 -197 if title is not None: -198 plt.title(title, fontsize=fts+4) -199 -200 if proj is not None: -201 map.drawmeridians(np.arange(lon_min,lon_max, (lon_max-lon_min)/5.), \ -202 labels=[0,0,0,1], fmt='%.1f') -203 map.drawparallels(np.arange(lat_min,lat_max, (lat_max-lat_min)/5.), \ -204 labels=[1,0,0,0], fmt='%.1f') -205 -206 if outfile is not None: -207 if outfile.find('.png') != -1 or outfile.find('.svg') != -1 or \ -208 outfile.find('.eps') != -1: -209 print 'Write figure to file', outfile -210 plt.savefig(outfile, dpi=200, facecolor='w', edgecolor='w', \ -211 orientation='portrait') -212 else: -213 print 'Unrecognized file extension. Please use .png, .svg or .eps file extension.' -214 -215 -216 if proj is None: -217 return -218 else: -219 return map -
220 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.iview'-module.html b/pyroms_toolbox/docs/pyroms_toolbox.iview'-module.html deleted file mode 100644 index 9aea734..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.iview'-module.html +++ /dev/null @@ -1,267 +0,0 @@ - - - - - pyroms_toolbox.iview' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Module iview' - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module iview'

source code

- - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
iview(var, - tindex, - iindex, - gridid, - filename=None, - cmin=None, - cmax=None, - clev=None, - clbformat='%.2f', - fill=False, - contour=False, - d=4, - jrange=None, - hrange=None, - fts=None, - title=None, - map=False, - pal=None, - clb=True, - outfile=None)
- iview(var, tindex, iindex, gridid, {optional switch})
- source code - -
- -
- - - - - - -
- - - - - -
Function Details[hide private]
-
- -
- -
- - -
-

iview(var, - tindex, - iindex, - gridid, - filename=None, - cmin=None, - cmax=None, - clev=None, - clbformat='%.2f', - fill=False, - contour=False, - d=4, - jrange=None, - hrange=None, - fts=None, - title=None, - map=False, - pal=None, - clb=True, - outfile=None) -

-
source code  -
- -

iview(var, tindex, iindex, gridid, {optional switch})

-

optional switch:

-
    -
  • - filename if defined, load the variable from file -
  • -
  • - cmin set color minimum limit -
  • -
  • - cmax set color maximum limit -
  • -
  • - clev set the number of color step -
  • -
  • - fill use contourf instead of pcolor -
  • -
  • - contour overlay contour (request fill=True) -
  • -
  • - d contour density (default d=4) -
  • -
  • - jrange j range -
  • -
  • - hrange h range -
  • -
  • - fts set font size (default: 12) -
  • -
  • - title add title to the plot -
  • -
  • - map if True, draw a map showing islice location -
  • -
  • - pal set color map (default: cm.jet) -
  • -
  • - clb add colorbar (defaul: True) -
  • -
  • - outfile if defined, write figure to file -
  • -
-

plot a constante-i slice of variable var. If filename is provided, var - must be a string and the variable will be load from the file. grid can be - a grid object or a gridid. In the later case, the grid object - correponding to the provided gridid will be loaded.

-
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.iview'-pysrc.html b/pyroms_toolbox/docs/pyroms_toolbox.iview'-pysrc.html deleted file mode 100644 index ff2c76b..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.iview'-pysrc.html +++ /dev/null @@ -1,335 +0,0 @@ - - - - - pyroms_toolbox.iview' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Module iview' - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms_toolbox.iview'

-
-  1  import numpy as np 
-  2  import matplotlib.pyplot as plt 
-  3  from  matplotlib import cm, colors 
-  4  from mpl_toolkits.basemap import Basemap 
-  5  import pyroms 
-  6   
-  7   
-
8 -def iview(var, tindex, iindex, gridid, filename=None, \ - 9 cmin=None, cmax=None, clev=None, clbformat='%.2f', \ - 10 fill=False, contour=False, d=4, jrange=None, \ - 11 hrange=None, fts=None, title=None, map=False, \ - 12 pal=None, clb=True, outfile=None): -
13 """ - 14 iview(var, tindex, iindex, gridid, {optional switch}) - 15 - 16 optional switch: - 17 - filename if defined, load the variable from file - 18 - cmin set color minimum limit - 19 - cmax set color maximum limit - 20 - clev set the number of color step - 21 - fill use contourf instead of pcolor - 22 - contour overlay contour (request fill=True) - 23 - d contour density (default d=4) - 24 - jrange j range - 25 - hrange h range - 26 - fts set font size (default: 12) - 27 - title add title to the plot - 28 - map if True, draw a map showing islice location - 29 - pal set color map (default: cm.jet) - 30 - clb add colorbar (defaul: True) - 31 - outfile if defined, write figure to file - 32 - 33 plot a constante-i slice of variable var. If filename is provided, - 34 var must be a string and the variable will be load from the file. - 35 grid can be a grid object or a gridid. In the later case, the grid - 36 object correponding to the provided gridid will be loaded. - 37 """ - 38 - 39 # get grid - 40 if type(gridid).__name__ == 'ROMS_Grid': - 41 grd = gridid - 42 else: - 43 grd = pyroms.grid.get_ROMS_grid(gridid) - 44 - 45 # get variable - 46 if filename == None: - 47 var = var - 48 else: - 49 data = pyroms.io.Dataset(filename) - 50 - 51 var = data.variables[var] - 52 - 53 Np, Mp, Lp = grd.vgrid.z_r[0,:].shape - 54 - 55 if tindex is not -1: - 56 assert len(var.shape) == 4, 'var must be 4D (time plus space).' - 57 K, N, M, L = var.shape - 58 else: - 59 assert len(var.shape) == 3, 'var must be 3D (no time dependency).' - 60 N, M, L = var.shape - 61 - 62 # determine where on the C-grid these variable lies - 63 if N == Np and M == Mp and L == Lp: - 64 Cpos='rho' - 65 lon = grd.hgrid.lon_vert - 66 lat = grd.hgrid.lat_vert - 67 mask = grd.hgrid.mask_rho - 68 - 69 if N == Np and M == Mp and L == Lp-1: - 70 Cpos='u' - 71 lon = 0.5 * (grd.hgrid.lon_vert[:,:-1] + grd.hgrid.lon_vert[:,1:]) - 72 lat = 0.5 * (grd.hgrid.lat_vert[:,:-1] + grd.hgrid.lat_vert[:,1:]) - 73 mask = grd.hgrid.mask_u - 74 - 75 if N == Np and M == Mp-1 and L == Lp: - 76 Cpos='v' - 77 lon = 0.5 * (grd.hgrid.lon_vert[:-1,:] + grd.hgrid.lon_vert[1:,:]) - 78 lat = 0.5 * (grd.hgrid.lat_vert[:-1,:] + grd.hgrid.lat_vert[1:,:]) - 79 mask = grd.hgrid.mask_v - 80 - 81 if N == Np+1 and M == Mp and L == Lp: - 82 Cpos='w' - 83 lon = grd.hgrid.lon_vert - 84 lat = grd.hgrid.lat_vert - 85 mask = grd.hgrid.mask_rho - 86 - 87 # get constante-j slice - 88 if tindex == -1: - 89 var = var[:,:,:] - 90 else: - 91 var = var[tindex,:,:,:] - 92 - 93 if fill == True: - 94 islice, zi, loni, lati, = pyroms.tools.islice(var, iindex, grd, \ - 95 Cpos) - 96 else: - 97 islice, zi, loni, lati, = pyroms.tools.islice(var, iindex, grd, \ - 98 Cpos, vert=True) - 99 -100 -101 # plot -102 if cmin is None: -103 cmin = islice.min() -104 else: -105 cmin = float(cmin) -106 -107 if cmax is None: -108 cmax = islice.max() -109 else: -110 cmax = float(cmax) -111 -112 if clev is None: -113 clev = 100. -114 else: -115 clev = float(clev) -116 -117 dc = (cmax - cmin)/clev ; vc = np.arange(cmin,cmax+dc,dc) -118 -119 if pal is None: -120 pal = cm.jet -121 else: -122 pal = pal -123 -124 if fts is None: -125 fts = 12 -126 else: -127 fts = fts -128 -129 #pal.set_over('w', 1.0) -130 #pal.set_under('w', 1.0) -131 #pal.set_bad('w', 1.0) -132 -133 pal_norm = colors.BoundaryNorm(vc,ncolors=256, clip = False) -134 -135 # clear figure -136 #plt.clf() -137 -138 if map is True: -139 # set axes for the main plot in order to keep space for the map -140 if fts < 12: -141 ax=None -142 else: -143 ax = plt.axes([0.15, 0.08, 0.8, 0.65]) -144 else: -145 if fts < 12: -146 ax=None -147 else: -148 ax=plt.axes([0.15, 0.1, 0.8, 0.8]) -149 -150 -151 if fill is True: -152 cf = plt.contourf(lati, zi, islice, vc, cmap = pal, norm = pal_norm, axes=ax) -153 else: -154 cf = plt.pcolor(lati, zi, islice, cmap = pal, norm = pal_norm, axes=ax) -155 -156 if clb is True: -157 clb = plt.colorbar(cf, fraction=0.075,format=clbformat) -158 for t in clb.ax.get_yticklabels(): -159 t.set_fontsize(fts) -160 -161 if contour is True: -162 if fill is not True: -163 raise Warning, 'Please run again with fill=True for overlay contour.' -164 else: -165 plt.contour(lati, zi, islice, vc[::d], colors='k', linewidths=0.5, linestyles='solid', axes=ax) -166 -167 -168 if jrange is not None: -169 plt.xlim(jrange) -170 -171 if hrange is not None: -172 plt.ylim(hrange) -173 -174 if title is not None: -175 if map is True: -176 # move the title on the right -177 xmin, xmax = ax.get_xlim() -178 ymin, ymax = ax.get_ylim() -179 xt = xmin - (xmax-xmin)/9. -180 yt = ymax + (ymax-ymin)/7. -181 plt.text(xt, yt, title, fontsize=fts+4) -182 else: -183 plt.title(title, fontsize=fts+4) -184 -185 plt.xlabel('Latitude', fontsize=fts) -186 plt.ylabel('Depth', fontsize=fts) -187 -188 if map is True: -189 # draw a map with constant-i slice location -190 ax_map = plt.axes([0.4, 0.76, 0.2, 0.23]) -191 varm = np.ma.masked_where(mask[:,:] == 0, var[var.shape[0]-1,:,:]) -192 xmin, xmax = ax.get_xlim() -193 dd = (lat[:,iindex] - xmin) * (lat[:,iindex] - xmin) -194 start = np.where(dd == dd.min()) -195 dd = (lat[:,iindex] - xmax) * (lat[:,iindex] - xmax) -196 end = np.where(dd == dd.min()) -197 lon_min = lon.min() -198 lon_max = lon.max() -199 lon_0 = (lon_min + lon_max) / 2. -200 lat_min = lat.min() -201 lat_max = lat.max() -202 lat_0 = (lat_min + lat_max) / 2. -203 map = Basemap(projection='merc', llcrnrlon=lon_min, llcrnrlat=lat_min, \ -204 urcrnrlon=lon_max, urcrnrlat=lat_max, lat_0=lat_0, lon_0=lon_0, \ -205 resolution='i', area_thresh=10.) -206 x, y = map(lon,lat) -207 # fill land and draw coastlines -208 map.drawcoastlines() -209 map.fillcontinents(color='grey') -210 #map.drawmapboundary() -211 Basemap.pcolor(map, x, y, varm, axes=ax_map) -212 Basemap.plot(map, x[start[0]:end[0],iindex], y[start[0]:end[0],iindex], \ -213 'k-', linewidth=3, axes=ax_map) -214 -215 -216 if outfile is not None: -217 if outfile.find('.png') != -1 or outfile.find('.svg') != -1 or outfile.find('.eps') != -1: -218 print 'Write figure to file', outfile -219 plt.savefig(outfile, dpi=200, facecolor='w', edgecolor='w', orientation='portrait') -220 else: -221 print 'Unrecognized file extension. Please use .png, .svg or .eps file extension.' -222 -223 -224 return -
225 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.jview'-module.html b/pyroms_toolbox/docs/pyroms_toolbox.jview'-module.html deleted file mode 100644 index 4c78324..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.jview'-module.html +++ /dev/null @@ -1,267 +0,0 @@ - - - - - pyroms_toolbox.jview' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Module jview' - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module jview'

source code

- - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
jview(var, - tindex, - jindex, - gridid, - filename=None, - cmin=None, - cmax=None, - clev=None, - clbformat='%.2f', - fill=False, - contour=False, - d=4, - irange=None, - hrange=None, - fts=None, - title=None, - map=False, - pal=None, - clb=True, - outfile=None)
- jview(var, tindex, jindex, gridid, {optional switch})
- source code - -
- -
- - - - - - -
- - - - - -
Function Details[hide private]
-
- -
- -
- - -
-

jview(var, - tindex, - jindex, - gridid, - filename=None, - cmin=None, - cmax=None, - clev=None, - clbformat='%.2f', - fill=False, - contour=False, - d=4, - irange=None, - hrange=None, - fts=None, - title=None, - map=False, - pal=None, - clb=True, - outfile=None) -

-
source code  -
- -

jview(var, tindex, jindex, gridid, {optional switch})

-

optional switch:

-
    -
  • - filename if defined, load the variable from file -
  • -
  • - cmin set color minimum limit -
  • -
  • - cmax set color maximum limit -
  • -
  • - clev set the number of color step -
  • -
  • - fill use contourf instead of pcolor -
  • -
  • - contour overlay contour (request fill=True) -
  • -
  • - d contour density (default d=4) -
  • -
  • - irange i range -
  • -
  • - hrange h range -
  • -
  • - fts set font size (default: 12) -
  • -
  • - title add title to the plot -
  • -
  • - map if True, draw a map showing islice location -
  • -
  • - pal set color map (default: cm.jet) -
  • -
  • - clb add colorbar (defaul: True) -
  • -
  • - outfile if defined, write figure to file -
  • -
-

plot a constante-j slice of variable var. If filename is provided, var - must be a string and the variable will be load from the file. grid can be - a grid object or a gridid. In the later case, the grid object - correponding to the provided gridid will be loaded.

-
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.jview'-pysrc.html b/pyroms_toolbox/docs/pyroms_toolbox.jview'-pysrc.html deleted file mode 100644 index 5534985..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.jview'-pysrc.html +++ /dev/null @@ -1,334 +0,0 @@ - - - - - pyroms_toolbox.jview' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Module jview' - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms_toolbox.jview'

-
-  1  import numpy as np 
-  2  import matplotlib.pyplot as plt 
-  3  from  matplotlib import cm, colors 
-  4  from mpl_toolkits.basemap import Basemap 
-  5  import pyroms 
-  6   
-  7   
-
8 -def jview(var, tindex, jindex, gridid, filename=None, \ - 9 cmin=None, cmax=None, clev=None, clbformat='%.2f', \ - 10 fill=False, contour=False, d=4, irange=None, \ - 11 hrange=None, fts=None, title=None, map=False, \ - 12 pal=None, clb=True, outfile=None): -
13 """ - 14 jview(var, tindex, jindex, gridid, {optional switch}) - 15 - 16 optional switch: - 17 - filename if defined, load the variable from file - 18 - cmin set color minimum limit - 19 - cmax set color maximum limit - 20 - clev set the number of color step - 21 - fill use contourf instead of pcolor - 22 - contour overlay contour (request fill=True) - 23 - d contour density (default d=4) - 24 - irange i range - 25 - hrange h range - 26 - fts set font size (default: 12) - 27 - title add title to the plot - 28 - map if True, draw a map showing islice location - 29 - pal set color map (default: cm.jet) - 30 - clb add colorbar (defaul: True) - 31 - outfile if defined, write figure to file - 32 - 33 plot a constante-j slice of variable var. If filename is provided, - 34 var must be a string and the variable will be load from the file. - 35 grid can be a grid object or a gridid. In the later case, the grid - 36 object correponding to the provided gridid will be loaded. - 37 """ - 38 - 39 # get grid - 40 if type(gridid).__name__ == 'ROMS_Grid': - 41 grd = gridid - 42 else: - 43 grd = pyroms.grid.get_ROMS_grid(gridid) - 44 - 45 # get variable - 46 if filename == None: - 47 var = var - 48 else: - 49 data = pyroms.io.Dataset(filename) - 50 - 51 var = data.variables[var] - 52 - 53 Np, Mp, Lp = grd.vgrid.z_r[0,:].shape - 54 - 55 if tindex is not -1: - 56 assert len(var.shape) == 4, 'var must be 4D (time plus space).' - 57 K, N, M, L = var.shape - 58 else: - 59 assert len(var.shape) == 3, 'var must be 3D (no time dependency).' - 60 N, M, L = var.shape - 61 - 62 # determine where on the C-grid these variable lies - 63 if N == Np and M == Mp and L == Lp: - 64 Cpos='rho' - 65 lon = grd.hgrid.lon_vert - 66 lat = grd.hgrid.lat_vert - 67 mask = grd.hgrid.mask_rho - 68 - 69 if N == Np and M == Mp and L == Lp-1: - 70 Cpos='u' - 71 lon = 0.5 * (grd.hgrid.lon_vert[:,:-1] + grd.hgrid.lon_vert[:,1:]) - 72 lat = 0.5 * (grd.hgrid.lat_vert[:,:-1] + grd.hgrid.lat_vert[:,1:]) - 73 mask = grd.hgrid.mask_u - 74 - 75 if N == Np and M == Mp-1 and L == Lp: - 76 Cpos='v' - 77 lon = 0.5 * (grd.hgrid.lon_vert[:-1,:] + grd.hgrid.lon_vert[1:,:]) - 78 lat = 0.5 * (grd.hgrid.lat_vert[:-1,:] + grd.hgrid.lat_vert[1:,:]) - 79 mask = grd.hgrid.mask_v - 80 - 81 if N == Np+1 and M == Mp and L == Lp: - 82 Cpos='w' - 83 lon = grd.hgrid.lon_vert - 84 lat = grd.hgrid.lat_vert - 85 mask = grd.hgrid.mask_rho - 86 - 87 # get constante-j slice - 88 if tindex == -1: - 89 var = var[:,:,:] - 90 else: - 91 var = var[tindex,:,:,:] - 92 - 93 if fill == True: - 94 jslice, zj, lonj, latj, = pyroms.tools.jslice(var, jindex, grd, \ - 95 Cpos) - 96 else: - 97 jslice, zj, lonj, latj, = pyroms.tools.jslice(var, jindex, grd, \ - 98 Cpos, vert=True) - 99 -100 # plot -101 if cmin is None: -102 cmin = jslice.min() -103 else: -104 cmin = float(cmin) -105 -106 if cmax is None: -107 cmax = jslice.max() -108 else: -109 cmax = float(cmax) -110 -111 if clev is None: -112 clev = 100. -113 else: -114 clev = float(clev) -115 -116 dc = (cmax - cmin)/clev ; vc = np.arange(cmin,cmax+dc,dc) -117 -118 if pal is None: -119 pal = cm.jet -120 else: -121 pal = pal -122 -123 if fts is None: -124 fts = 12 -125 else: -126 fts = fts -127 -128 #pal.set_over('w', 1.0) -129 #pal.set_under('w', 1.0) -130 #pal.set_bad('w', 1.0) -131 -132 pal_norm = colors.BoundaryNorm(vc,ncolors=256, clip = False) -133 -134 # clear figure -135 #plt.clf() -136 -137 if map is True: -138 # set axes for the main plot in order to keep space for the map -139 if fts < 12: -140 ax=None -141 else: -142 ax = plt.axes([0.15, 0.08, 0.8, 0.65]) -143 else: -144 if fts < 12: -145 ax=None -146 else: -147 ax=plt.axes([0.15, 0.1, 0.8, 0.8]) -148 -149 -150 if fill is True: -151 cf = plt.contourf(lonj, zj, jslice, vc, cmap = pal, norm = pal_norm, axes=ax) -152 else: -153 cf = plt.pcolor(lonj, zj, jslice, cmap = pal, norm = pal_norm, axes=ax) -154 -155 if clb is True: -156 clb = plt.colorbar(cf, fraction=0.075,format=clbformat) -157 for t in clb.ax.get_yticklabels(): -158 t.set_fontsize(fts) -159 -160 if contour is True: -161 if fill is not True: -162 raise Warning, 'Please run again with fill=True for overlay contour.' -163 else: -164 plt.contour(lonj, zj, jslice, vc[::d], colors='k', linewidths=0.5, linestyles='solid', axes=ax) -165 -166 -167 if irange is not None: -168 plt.xlim(irange) -169 -170 if hrange is not None: -171 plt.ylim(hrange) -172 -173 if title is not None: -174 if map is True: -175 # move the title on the right -176 xmin, xmax = ax.get_xlim() -177 ymin, ymax = ax.get_ylim() -178 xt = xmin - (xmax-xmin)/9. -179 yt = ymax + (ymax-ymin)/7. -180 plt.text(xt, yt, title, fontsize=fts+4) -181 else: -182 plt.title(title, fontsize=fts+4) -183 -184 plt.xlabel('Latitude', fontsize=fts) -185 plt.ylabel('Depth', fontsize=fts) -186 -187 if map is True: -188 # draw a map with constant-i slice location -189 ax_map = plt.axes([0.4, 0.76, 0.2, 0.23]) -190 varm = np.ma.masked_where(mask[:,:] == 0, var[var.shape[0]-1,:,:]) -191 xmin, xmax = ax.get_xlim() -192 dd = (lon[jindex,:] - xmin) * (lon[jindex,:] - xmin) -193 start = np.where(dd == dd.min()) -194 dd = (lon[jindex,:] - xmax) * (lon[jindex,:] - xmax) -195 end = np.where(dd == dd.min()) -196 lon_min = lon.min() -197 lon_max = lon.max() -198 lon_0 = (lon_min + lon_max) / 2. -199 lat_min = lat.min() -200 lat_max = lat.max() -201 lat_0 = (lat_min + lat_max) / 2. -202 map = Basemap(projection='merc', llcrnrlon=lon_min, llcrnrlat=lat_min, \ -203 urcrnrlon=lon_max, urcrnrlat=lat_max, lat_0=lat_0, lon_0=lon_0, \ -204 resolution='i', area_thresh=10.) -205 x, y = map(lon,lat) -206 # fill land and draw coastlines -207 map.drawcoastlines() -208 map.fillcontinents(color='grey') -209 #map.drawmapboundary() -210 Basemap.pcolor(map, x, y, varm, axes=ax_map) -211 Basemap.plot(map, x[jindex,start[0]:end[0]], y[jindex,start[0]:end[0]], \ -212 'k-', linewidth=3, axes=ax_map) -213 -214 -215 if outfile is not None: -216 if outfile.find('.png') != -1 or outfile.find('.svg') != -1 or outfile.find('.eps') != -1: -217 print 'Write figure to file', outfile -218 plt.savefig(outfile, dpi=200, facecolor='w', edgecolor='w', orientation='portrait') -219 else: -220 print 'Unrecognized file extension. Please use .png, .svg or .eps file extension.' -221 -222 -223 return -
224 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.latview'-module.html b/pyroms_toolbox/docs/pyroms_toolbox.latview'-module.html deleted file mode 100644 index 379ba3d..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.latview'-module.html +++ /dev/null @@ -1,265 +0,0 @@ - - - - - pyroms_toolbox.latview' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Module latview' - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module latview'

source code

- - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
latview(var, - tindex, - latitude, - gridid, - filename=None, - cmin=None, - cmax=None, - clev=None, - fill=False, - contour=False, - d=4, - lonrange=None, - hrange=None, - fts=None, - title=None, - map=False, - pal=None, - clb=True, - outfile=None)
- latview(var, tindex, latitude, gridid, {optional switch})
- source code - -
- -
- - - - - - -
- - - - - -
Function Details[hide private]
-
- -
- -
- - -
-

latview(var, - tindex, - latitude, - gridid, - filename=None, - cmin=None, - cmax=None, - clev=None, - fill=False, - contour=False, - d=4, - lonrange=None, - hrange=None, - fts=None, - title=None, - map=False, - pal=None, - clb=True, - outfile=None) -

-
source code  -
- -

latview(var, tindex, latitude, gridid, {optional switch})

-

optional switch:

-
    -
  • - filename if defined, load the variable from file -
  • -
  • - cmin set color minimum limit -
  • -
  • - cmax set color maximum limit -
  • -
  • - clev set the number of color step -
  • -
  • - fill use contourf instead of pcolor -
  • -
  • - contour overlay contour (request fill=True) -
  • -
  • - d contour density (default d=4) -
  • -
  • - lonrange longitude range -
  • -
  • - hrange h range -
  • -
  • - fts set font size (default: 12) -
  • -
  • - title add title to the plot -
  • -
  • - map if True, draw a map showing islice location -
  • -
  • - pal set color map (default: cm.jet) -
  • -
  • - clb add colorbar (defaul: True) -
  • -
  • - outfile if defined, write figure to file -
  • -
-

plot a constante-latitudinal slice of variable var. If filename is - provided, var must be a string and the variable will be load from the - file. grid can be a grid object or a gridid. In the later case, the grid - object correponding to the provided gridid will be loaded.

-
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.latview'-pysrc.html b/pyroms_toolbox/docs/pyroms_toolbox.latview'-pysrc.html deleted file mode 100644 index 385af21..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.latview'-pysrc.html +++ /dev/null @@ -1,330 +0,0 @@ - - - - - pyroms_toolbox.latview' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Module latview' - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms_toolbox.latview'

-
-  1  import numpy as np 
-  2  import matplotlib.pyplot as plt 
-  3  from  matplotlib import cm, colors 
-  4  from mpl_toolkits.basemap import Basemap 
-  5  import pyroms 
-  6   
-  7   
-
8 -def latview(var, tindex, latitude, gridid, filename=None, \ - 9 cmin=None, cmax=None, clev=None, fill=False, \ - 10 contour=False, d=4, lonrange=None, hrange=None,\ - 11 fts=None, title=None, map=False, \ - 12 pal=None, clb=True, outfile=None): -
13 """ - 14 latview(var, tindex, latitude, gridid, {optional switch}) - 15 - 16 optional switch: - 17 - filename if defined, load the variable from file - 18 - cmin set color minimum limit - 19 - cmax set color maximum limit - 20 - clev set the number of color step - 21 - fill use contourf instead of pcolor - 22 - contour overlay contour (request fill=True) - 23 - d contour density (default d=4) - 24 - lonrange longitude range - 25 - hrange h range - 26 - fts set font size (default: 12) - 27 - title add title to the plot - 28 - map if True, draw a map showing islice location - 29 - pal set color map (default: cm.jet) - 30 - clb add colorbar (defaul: True) - 31 - outfile if defined, write figure to file - 32 - 33 plot a constante-latitudinal slice of variable var. If filename - 34 is provided, var must be a string and the variable will be load - 35 from the file. - 36 grid can be a grid object or a gridid. In the later case, the grid - 37 object correponding to the provided gridid will be loaded. - 38 """ - 39 - 40 # get grid - 41 if type(gridid).__name__ == 'ROMS_Grid': - 42 grd = gridid - 43 else: - 44 grd = pyroms.grid.get_ROMS_grid(gridid) - 45 - 46 # get variable - 47 if filename == None: - 48 var = var - 49 else: - 50 data = pyroms.io.Dataset(filename) - 51 - 52 var = data.variables[var] - 53 - 54 Np, Mp, Lp = grd.vgrid.z_r[0,:].shape - 55 - 56 if tindex is not -1: - 57 assert len(var.shape) == 4, 'var must be 4D (time plus space).' - 58 K, N, M, L = var.shape - 59 else: - 60 assert len(var.shape) == 3, 'var must be 3D (no time dependency).' - 61 N, M, L = var.shape - 62 - 63 # determine where on the C-grid these variable lies - 64 if N == Np and M == Mp and L == Lp: - 65 Cpos='rho' - 66 lon = grd.hgrid.lon_vert - 67 lat = grd.hgrid.lat_vert - 68 mask = grd.hgrid.mask_rho - 69 - 70 if N == Np and M == Mp and L == Lp-1: - 71 Cpos='u' - 72 lon = 0.5 * (grd.hgrid.lon_vert[:,:-1] + grd.hgrid.lon_vert[:,1:]) - 73 lat = 0.5 * (grd.hgrid.lat_vert[:,:-1] + grd.hgrid.lat_vert[:,1:]) - 74 mask = grd.hgrid.mask_u - 75 - 76 if N == Np and M == Mp-1 and L == Lp: - 77 Cpos='v' - 78 lon = 0.5 * (grd.hgrid.lon_vert[:-1,:] + grd.hgrid.lon_vert[1:,:]) - 79 lat = 0.5 * (grd.hgrid.lat_vert[:-1,:] + grd.hgrid.lat_vert[1:,:]) - 80 mask = grd.hgrid.mask_v - 81 - 82 # get constante-lat slice - 83 if tindex == -1: - 84 var = var[:,:,:] - 85 else: - 86 var = var[tindex,:,:,:] - 87 - 88 if fill == True: - 89 latslice, zs, lons, lats, = pyroms.tools.latslice(var, latitude, grd, Cpos) - 90 else: - 91 latslice, zs, lons, lats, = pyroms.tools.latslice(var, latitude, grd, Cpos, vert=True) - 92 - 93 - 94 # plot - 95 if cmin is None: - 96 cmin = latslice.min() - 97 else: - 98 cmin = float(cmin) - 99 -100 if cmax is None: -101 cmax = latslice.max() -102 else: -103 cmax = float(cmax) -104 -105 if clev is None: -106 clev = 100. -107 else: -108 clev = float(clev) -109 -110 dc = (cmax - cmin)/clev ; vc = np.arange(cmin,cmax+dc,dc) -111 -112 if pal is None: -113 pal = cm.jet -114 else: -115 pal = pal -116 -117 if fts is None: -118 fts = 12 -119 else: -120 fts = fts -121 -122 #pal.set_over('w', 1.0) -123 #pal.set_under('w', 1.0) -124 #pal.set_bad('w', 1.0) -125 -126 pal_norm = colors.BoundaryNorm(vc,ncolors=256, clip = False) -127 -128 # clear figure -129 #plt.clf() -130 -131 if map is True: -132 # set axes for the main plot in order to keep space for the map -133 if fts < 12: -134 ax=None -135 else: -136 ax = plt.axes([0.15, 0.08, 0.8, 0.65]) -137 else: -138 if fts < 12: -139 ax=None -140 else: -141 ax=plt.axes([0.15, 0.1, 0.8, 0.8]) -142 -143 -144 if fill is True: -145 cf = plt.contourf(lons, zs, latslice, vc, cmap = pal, norm = pal_norm, axes=ax) -146 else: -147 cf = plt.pcolor(lons, zs, latslice, cmap = pal, norm = pal_norm, axes=ax) -148 -149 if clb is True: -150 clb = plt.colorbar(cf, fraction=0.075,format='%.2f') -151 for t in clb.ax.get_yticklabels(): -152 t.set_fontsize(fts) -153 -154 if contour is True: -155 if fill is not True: -156 raise Warning, 'Please run again with fill=True for overlay contour.' -157 else: -158 plt.contour(lons, zs, latslice, vc[::d], colors='k', linewidths=0.5, linestyles='solid', axes=ax) -159 -160 -161 if lonrange is not None: -162 plt.xlim(lonrange) -163 -164 if hrange is not None: -165 plt.ylim(hrange) -166 -167 if title is not None: -168 if map is True: -169 # move the title on the right -170 xmin, xmax = ax.get_xlim() -171 ymin, ymax = ax.get_ylim() -172 xt = xmin - (xmax-xmin)/9. -173 yt = ymax + (ymax-ymin)/7. -174 plt.text(xt, yt, title, fontsize=fts+4) -175 else: -176 plt.title(title, fontsize=fts+4) -177 -178 plt.xlabel('Latitude', fontsize=fts) -179 plt.ylabel('Depth', fontsize=fts) -180 -181 if map is True: -182 # draw a map with constant-i slice location -183 ax_map = plt.axes([0.4, 0.76, 0.2, 0.23]) -184 varm = np.ma.masked_where(mask[:,:] == 0, var[var.shape[0]-1,:,:]) -185 lon_min = lon.min() -186 lon_max = lon.max() -187 lon_0 = (lon_min + lon_max) / 2. -188 lat_min = lat.min() -189 lat_max = lat.max() -190 lat_0 = (lat_min + lat_max) / 2. -191 map = Basemap(projection='merc', llcrnrlon=lon_min, llcrnrlat=lat_min, \ -192 urcrnrlon=lon_max, urcrnrlat=lat_max, lat_0=lat_0, lon_0=lon_0, \ -193 resolution='i', area_thresh=10.) -194 x, y = map(lon,lat) -195 if lonrange is None: -196 xs, ys = map(lons[0,:],lats[0,:]) -197 else: -198 c1 = lats[0,:] >= lonrange[0] -199 c2 = lats[0,:] <= lonrange[1] -200 c = c1 & c2 -201 idx = np.where(c == True) -202 xs, ys = map(lons[0,idx[0]],lats[0,idx[0]]) -203 # fill land and draw coastlines -204 map.drawcoastlines() -205 map.fillcontinents(color='grey') -206 #map.drawmapboundary() -207 Basemap.pcolor(map, x, y, varm, axes=ax_map) -208 Basemap.plot(map, xs, ys, 'k-', linewidth=3, axes=ax_map) -209 -210 -211 if outfile is not None: -212 if outfile.find('.png') != -1 or outfile.find('.svg') != -1 or outfile.find('.eps') != -1: -213 print 'Write figure to file', outfile -214 plt.savefig(outfile, dpi=200, facecolor='w', edgecolor='w', orientation='portrait') -215 else: -216 print 'Unrecognized file extension. Please use .png, .svg or .eps file extension.' -217 -218 -219 return -
220 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.lonview'-module.html b/pyroms_toolbox/docs/pyroms_toolbox.lonview'-module.html deleted file mode 100644 index 9a6cf74..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.lonview'-module.html +++ /dev/null @@ -1,265 +0,0 @@ - - - - - pyroms_toolbox.lonview' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Module lonview' - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module lonview'

source code

- - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
lonview(var, - tindex, - longitude, - gridid, - filename=None, - cmin=None, - cmax=None, - clev=None, - fill=False, - contour=False, - d=4, - latrange=None, - hrange=None, - fts=None, - title=None, - map=False, - pal=None, - clb=True, - outfile=None)
- lonview(var, tindex, longitude, gridid, {optional switch})
- source code - -
- -
- - - - - - -
- - - - - -
Function Details[hide private]
-
- -
- -
- - -
-

lonview(var, - tindex, - longitude, - gridid, - filename=None, - cmin=None, - cmax=None, - clev=None, - fill=False, - contour=False, - d=4, - latrange=None, - hrange=None, - fts=None, - title=None, - map=False, - pal=None, - clb=True, - outfile=None) -

-
source code  -
- -

lonview(var, tindex, longitude, gridid, {optional switch})

-

optional switch:

-
    -
  • - filename if defined, load the variable from file -
  • -
  • - cmin set color minimum limit -
  • -
  • - cmax set color maximum limit -
  • -
  • - clev set the number of color step -
  • -
  • - fill use contourf instead of pcolor -
  • -
  • - contour overlay contour (request fill=True) -
  • -
  • - d contour density (default d=4) -
  • -
  • - latrange latitude range -
  • -
  • - hrange h range -
  • -
  • - fts set font size (default: 12) -
  • -
  • - title add title to the plot -
  • -
  • - map if True, draw a map showing islice location -
  • -
  • - pal set color map (default: cm.jet) -
  • -
  • - clb add colorbar (defaul: True) -
  • -
  • - outfile if defined, write figure to file -
  • -
-

plot a constante-longitudinal slice of variable var. If filename is - provided, var must be a string and the variable will be load from the - file. grid can be a grid object or a gridid. In the later case, the grid - object correponding to the provided gridid will be loaded.

-
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.lonview'-pysrc.html b/pyroms_toolbox/docs/pyroms_toolbox.lonview'-pysrc.html deleted file mode 100644 index acc5c53..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.lonview'-pysrc.html +++ /dev/null @@ -1,330 +0,0 @@ - - - - - pyroms_toolbox.lonview' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Module lonview' - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms_toolbox.lonview'

-
-  1  import numpy as np 
-  2  import matplotlib.pyplot as plt 
-  3  from  matplotlib import cm, colors 
-  4  from mpl_toolkits.basemap import Basemap 
-  5  import pyroms 
-  6   
-  7   
-
8 -def lonview(var, tindex, longitude, gridid, filename=None, \ - 9 cmin=None, cmax=None, clev=None, fill=False, \ - 10 contour=False, d=4, latrange=None, hrange=None,\ - 11 fts=None, title=None, map=False, \ - 12 pal=None, clb=True, outfile=None): -
13 """ - 14 lonview(var, tindex, longitude, gridid, {optional switch}) - 15 - 16 optional switch: - 17 - filename if defined, load the variable from file - 18 - cmin set color minimum limit - 19 - cmax set color maximum limit - 20 - clev set the number of color step - 21 - fill use contourf instead of pcolor - 22 - contour overlay contour (request fill=True) - 23 - d contour density (default d=4) - 24 - latrange latitude range - 25 - hrange h range - 26 - fts set font size (default: 12) - 27 - title add title to the plot - 28 - map if True, draw a map showing islice location - 29 - pal set color map (default: cm.jet) - 30 - clb add colorbar (defaul: True) - 31 - outfile if defined, write figure to file - 32 - 33 plot a constante-longitudinal slice of variable var. If filename - 34 is provided, var must be a string and the variable will be load - 35 from the file. - 36 grid can be a grid object or a gridid. In the later case, the grid - 37 object correponding to the provided gridid will be loaded. - 38 """ - 39 - 40 # get grid - 41 if type(gridid).__name__ == 'ROMS_Grid': - 42 grd = gridid - 43 else: - 44 grd = pyroms.grid.get_ROMS_grid(gridid) - 45 - 46 # get variable - 47 if filename == None: - 48 var = var - 49 else: - 50 data = pyroms.io.Dataset(filename) - 51 - 52 var = data.variables[var] - 53 - 54 Np, Mp, Lp = grd.vgrid.z_r[0,:].shape - 55 - 56 if tindex is not -1: - 57 assert len(var.shape) == 4, 'var must be 4D (time plus space).' - 58 K, N, M, L = var.shape - 59 else: - 60 assert len(var.shape) == 3, 'var must be 3D (no time dependency).' - 61 N, M, L = var.shape - 62 - 63 # determine where on the C-grid these variable lies - 64 if N == Np and M == Mp and L == Lp: - 65 Cpos='rho' - 66 lon = grd.hgrid.lon_vert - 67 lat = grd.hgrid.lat_vert - 68 mask = grd.hgrid.mask_rho - 69 - 70 if N == Np and M == Mp and L == Lp-1: - 71 Cpos='u' - 72 lon = 0.5 * (grd.hgrid.lon_vert[:,:-1] + grd.hgrid.lon_vert[:,1:]) - 73 lat = 0.5 * (grd.hgrid.lat_vert[:,:-1] + grd.hgrid.lat_vert[:,1:]) - 74 mask = grd.hgrid.mask_u - 75 - 76 if N == Np and M == Mp-1 and L == Lp: - 77 Cpos='v' - 78 lon = 0.5 * (grd.hgrid.lon_vert[:-1,:] + grd.hgrid.lon_vert[1:,:]) - 79 lat = 0.5 * (grd.hgrid.lat_vert[:-1,:] + grd.hgrid.lat_vert[1:,:]) - 80 mask = grd.hgrid.mask_v - 81 - 82 # get constante-lon slice - 83 if tindex == -1: - 84 var = var[:,:,:] - 85 else: - 86 var = var[tindex,:,:,:] - 87 - 88 if fill == True: - 89 lonslice, zs, lons, lats, = pyroms.tools.lonslice(var, longitude, grd, Cpos) - 90 else: - 91 lonslice, zs, lons, lats, = pyroms.tools.lonslice(var, longitude, grd, Cpos, vert=True) - 92 - 93 - 94 # plot - 95 if cmin is None: - 96 cmin = lonslice.min() - 97 else: - 98 cmin = float(cmin) - 99 -100 if cmax is None: -101 cmax = lonslice.max() -102 else: -103 cmax = float(cmax) -104 -105 if clev is None: -106 clev = 100. -107 else: -108 clev = float(clev) -109 -110 dc = (cmax - cmin)/clev ; vc = np.arange(cmin,cmax+dc,dc) -111 -112 if pal is None: -113 pal = cm.jet -114 else: -115 pal = pal -116 -117 if fts is None: -118 fts = 12 -119 else: -120 fts = fts -121 -122 #pal.set_over('w', 1.0) -123 #pal.set_under('w', 1.0) -124 #pal.set_bad('w', 1.0) -125 -126 pal_norm = colors.BoundaryNorm(vc,ncolors=256, clip = False) -127 -128 # clear figure -129 #plt.clf() -130 -131 if map is True: -132 # set axes for the main plot in order to keep space for the map -133 if fts < 12: -134 ax=None -135 else: -136 ax = plt.axes([0.15, 0.08, 0.8, 0.65]) -137 else: -138 if fts < 12: -139 ax=None -140 else: -141 ax=plt.axes([0.15, 0.1, 0.8, 0.8]) -142 -143 -144 if fill is True: -145 cf = plt.contourf(lats, zs, lonslice, vc, cmap = pal, norm = pal_norm, axes=ax) -146 else: -147 cf = plt.pcolor(lats, zs, lonslice, cmap = pal, norm = pal_norm, axes=ax) -148 -149 if clb is True: -150 clb = plt.colorbar(cf, fraction=0.075,format='%.2f') -151 for t in clb.ax.get_yticklabels(): -152 t.set_fontsize(fts) -153 -154 if contour is True: -155 if fill is not True: -156 raise Warning, 'Please run again with fill=True for overlay contour.' -157 else: -158 plt.contour(lats, zs, lonslice, vc[::d], colors='k', linewidths=0.5, linestyles='solid', axes=ax) -159 -160 -161 if latrange is not None: -162 plt.xlim(latrange) -163 -164 if hrange is not None: -165 plt.ylim(hrange) -166 -167 if title is not None: -168 if map is True: -169 # move the title on the right -170 xmin, xmax = ax.get_xlim() -171 ymin, ymax = ax.get_ylim() -172 xt = xmin - (xmax-xmin)/9. -173 yt = ymax + (ymax-ymin)/7. -174 plt.text(xt, yt, title, fontsize=fts+4) -175 else: -176 plt.title(title, fontsize=fts+4) -177 -178 plt.xlabel('Latitude', fontsize=fts) -179 plt.ylabel('Depth', fontsize=fts) -180 -181 if map is True: -182 # draw a map with constant-i slice location -183 ax_map = plt.axes([0.4, 0.76, 0.2, 0.23]) -184 varm = np.ma.masked_where(mask[:,:] == 0, var[var.shape[0]-1,:,:]) -185 lon_min = lon.min() -186 lon_max = lon.max() -187 lon_0 = (lon_min + lon_max) / 2. -188 lat_min = lat.min() -189 lat_max = lat.max() -190 lat_0 = (lat_min + lat_max) / 2. -191 map = Basemap(projection='merc', llcrnrlon=lon_min, llcrnrlat=lat_min, \ -192 urcrnrlon=lon_max, urcrnrlat=lat_max, lat_0=lat_0, lon_0=lon_0, \ -193 resolution='i', area_thresh=10.) -194 x, y = map(lon,lat) -195 if latrange is None: -196 xs, ys = map(lons[0,:],lats[0,:]) -197 else: -198 c1 = lats[0,:] >= latrange[0] -199 c2 = lats[0,:] <= latrange[1] -200 c = c1 & c2 -201 idx = np.where(c == True) -202 xs, ys = map(lons[0,idx[0]],lats[0,idx[0]]) -203 # fill land and draw coastlines -204 map.drawcoastlines() -205 map.fillcontinents(color='grey') -206 #map.drawmapboundary() -207 Basemap.pcolor(map, x, y, varm, axes=ax_map) -208 Basemap.plot(map, xs, ys, 'k-', linewidth=3, axes=ax_map) -209 -210 -211 if outfile is not None: -212 if outfile.find('.png') != -1 or outfile.find('.svg') != -1 or outfile.find('.eps') != -1: -213 print 'Write figure to file', outfile -214 plt.savefig(outfile, dpi=200, facecolor='w', edgecolor='w', orientation='portrait') -215 else: -216 print 'Unrecognized file extension. Please use .png, .svg or .eps file extension.' -217 -218 -219 return -
220 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.lsq_phase_amplitude'-module.html b/pyroms_toolbox/docs/pyroms_toolbox.lsq_phase_amplitude'-module.html deleted file mode 100644 index 0a399d7..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.lsq_phase_amplitude'-module.html +++ /dev/null @@ -1,142 +0,0 @@ - - - - - pyroms_toolbox.lsq_phase_amplitude' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Module lsq_phase_amplitude' - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module lsq_phase_amplitude'

source code

- - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
lsq_phase_amplitude(omega, - ue, - un, - t)
- Amp, Pha = lsq_phase_amplitude(omega,ue,un,t)
- source code - -
- -
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.lsq_phase_amplitude'-pysrc.html b/pyroms_toolbox/docs/pyroms_toolbox.lsq_phase_amplitude'-pysrc.html deleted file mode 100644 index 5eae4fa..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.lsq_phase_amplitude'-pysrc.html +++ /dev/null @@ -1,154 +0,0 @@ - - - - - pyroms_toolbox.lsq_phase_amplitude' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Module lsq_phase_amplitude' - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms_toolbox.lsq_phase_amplitude'

-
- 1  import numpy as np 
- 2  import matplotlib.pyplot as plt 
- 3   
- 4   
-
5 -def lsq_phase_amplitude(omega,ue,un,t): -
6 ''' - 7 Amp, Pha = lsq_phase_amplitude(omega,ue,un,t) - 8 ''' - 9 nc = omega.shape[0] -10 m = 1 + 2*nc -11 -12 # Build Matrix -13 c = np.ones((m,t.shape[0])) -14 for i in range(nc): -15 c[2*i+1,:] = np.cos(omega[i]*t) -16 c[2*i+2,:] = np.sin(omega[i]*t) -17 -18 A = np.zeros((m,m)) -19 for i in range(m): -20 for j in range(m): -21 A[i,j] = np.mean(c[i,:] * c[j,:]) -22 -23 b1 = np.zeros(m) -24 b2 = np.zeros(m) -25 for i in range(m): -26 b1[i] = np.mean(ue * c[i,:]) -27 b2[i] = np.mean(un * c[i,:]) -28 -29 -30 # Get solution -31 x1 = np.linalg.solve(A,b1) -32 x2 = np.linalg.solve(A,b2) -33 -34 C = x1[1:2*nc:2]; D = x1[2:2*nc+1:2] -35 -36 Amp = np.sqrt(C*C + D*D) -37 Pha = 180*np.arctan2(C,D)/np.pi -38 -39 for i in range(nc): -40 if Pha[i] < 0: -41 Pha[i] = Pha[i] + 360 -42 -43 return Amp, Pha -
44 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.nc_create_roms_bdry_file'-module.html b/pyroms_toolbox/docs/pyroms_toolbox.nc_create_roms_bdry_file'-module.html deleted file mode 100644 index 2b530fd..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.nc_create_roms_bdry_file'-module.html +++ /dev/null @@ -1,140 +0,0 @@ - - - - - pyroms_toolbox.nc_create_roms_bdry_file' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Module nc_create_roms_bdry_file' - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module nc_create_roms_bdry_file'

source code

- - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
nc_create_roms_bdry_file(filename, - grd, - ocean_time) - source code - -
- -
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.nc_create_roms_bdry_file'-pysrc.html b/pyroms_toolbox/docs/pyroms_toolbox.nc_create_roms_bdry_file'-pysrc.html deleted file mode 100644 index ef304fd..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.nc_create_roms_bdry_file'-pysrc.html +++ /dev/null @@ -1,204 +0,0 @@ - - - - - pyroms_toolbox.nc_create_roms_bdry_file' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Module nc_create_roms_bdry_file' - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms_toolbox.nc_create_roms_bdry_file'

-
- 1  import numpy as np 
- 2  from datetime import datetime 
- 3  try: 
- 4    import netCDF4 as netCDF 
- 5  except: 
- 6    import netCDF3 as netCDF 
- 7   
- 8   
-
9 -def nc_create_roms_bdry_file(filename, grd, ocean_time): -
10 -11 # create file -12 nc = netCDF.Dataset(filename, 'w', format='NETCDF3_CLASSIC') -13 nc.Description = 'ROMS file' -14 nc.Author = 'pyroms_toolbox.nc_create_roms_file' -15 nc.Created = datetime.now().strftime("%Y-%m-%d %H:%M:%S") -16 nc.title = 'ROMS file' -17 -18 nc.createDimension('xi_rho', np.size(grd.hgrid.mask_rho,1)) -19 nc.createDimension('xi_u', np.size(grd.hgrid.mask_u,1)) -20 nc.createDimension('xi_v', np.size(grd.hgrid.mask_v,1)) -21 nc.createDimension('xi_psi', np.size(grd.hgrid.mask_psi,1)) -22 nc.createDimension('eta_rho', np.size(grd.hgrid.mask_rho,0)) -23 nc.createDimension('eta_u', np.size(grd.hgrid.mask_u,0)) -24 nc.createDimension('eta_v', np.size(grd.hgrid.mask_v,0)) -25 nc.createDimension('eta_psi', np.size(grd.hgrid.mask_psi,0)) -26 nc.createDimension('s_rho', grd.vgrid.N) -27 nc.createDimension('s_w', grd.vgrid.Np) -28 nc.createDimension('ocean_time', None) -29 -30 # write time and grid information -31 nc.createVariable('theta_s', 'f8', ()) -32 nc.variables['theta_s'].long_name = 'S-coordinate surface control parameter' -33 nc.variables['theta_s'][:] = grd.vgrid.theta_s -34 -35 nc.createVariable('theta_b', 'f8', ()) -36 nc.variables['theta_b'].long_name = 'S-coordinate bottom control parameter' -37 nc.variables['theta_b'][:] = grd.vgrid.theta_b -38 -39 nc.createVariable('Tcline', 'f8', ()) -40 nc.variables['Tcline'].long_name = 'S-cordinate surface/bottom layer width' -41 nc.variables['Tcline'].units = 'meter' -42 nc.variables['Tcline'][:] = grd.vgrid.Tcline -43 -44 nc.createVariable('hc', 'f8', ()) -45 nc.variables['hc'].long_name = 'S-coordinate parameter, critical depth' -46 nc.variables['hc'].units = 'meter' -47 nc.variables['hc'][:] = grd.vgrid.hc -48 -49 nc.createVariable('s_rho', 'f8', ('s_rho')) -50 nc.variables['s_rho'].long_name = 'S-coordinate at RHO-points' -51 nc.variables['s_rho'].valid_min = '-1' -52 nc.variables['s_rho'].valid_max = '0' -53 nc.variables['s_rho'].field = 's_rho,scalar' -54 nc.variables['s_rho'][:] = grd.vgrid.s_rho -55 -56 nc.createVariable('s_w', 'f8', ('s_w')) -57 nc.variables['s_w'].long_name = 'S-coordinate at W-points' -58 nc.variables['s_w'].valid_min = '-1' -59 nc.variables['s_w'].valid_max = '0' -60 nc.variables['s_w'].field = 's_w,scalar' -61 nc.variables['s_w'][:] = grd.vgrid.s_w -62 -63 nc.createVariable('Cs_r', 'f8', ('s_rho')) -64 nc.variables['Cs_r'].long_name = 'S-coordinate stretching curves at RHO-points' -65 nc.variables['Cs_r'].valid_min = '-1' -66 nc.variables['Cs_r'].valid_max = '0' -67 nc.variables['Cs_r'].field = 'Cs_r,scalar' -68 nc.variables['Cs_r'][:] = grd.vgrid.Cs_r -69 -70 nc.createVariable('Cs_w', 'f8', ('s_w')) -71 nc.variables['Cs_w'].long_name = 'S-coordinate stretching curves at W-points' -72 nc.variables['Cs_w'].valid_min = '-1' -73 nc.variables['Cs_w'].valid_max = '0' -74 nc.variables['Cs_w'].field = 'Cs_w,scalar' -75 nc.variables['Cs_w'][:] = grd.vgrid.Cs_w -76 -77 nc.createVariable('h', 'f8', ('eta_rho', 'xi_rho')) -78 nc.variables['h'].long_name = 'bathymetry at RHO-points' -79 nc.variables['h'].units ='meter' -80 nc.variables['h'].coordinates = 'lon_rho lat_rho' -81 nc.variables['h'].field = 'bath, scalar' -82 nc.variables['h'][:] = grd.vgrid.h -83 -84 -85 nc.createVariable('ocean_time', 'f8', ('ocean_time')) -86 nc.variables['ocean_time'].long_name = ocean_time.long_name -87 nc.variables['ocean_time'].units = ocean_time.units -88 try: -89 nc.variables['ocean_time'].field = ocean_time.field -90 except: -91 nc.variables['ocean_time'].field = ' ' -92 -93 nc.close() -
94 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.nc_create_roms_file'-module.html b/pyroms_toolbox/docs/pyroms_toolbox.nc_create_roms_file'-module.html deleted file mode 100644 index 1744e90..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.nc_create_roms_file'-module.html +++ /dev/null @@ -1,140 +0,0 @@ - - - - - pyroms_toolbox.nc_create_roms_file' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Module nc_create_roms_file' - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module nc_create_roms_file'

source code

- - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
nc_create_roms_file(filename, - grd, - ocean_time) - source code - -
- -
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.nc_create_roms_file'-pysrc.html b/pyroms_toolbox/docs/pyroms_toolbox.nc_create_roms_file'-pysrc.html deleted file mode 100644 index a270f7e..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.nc_create_roms_file'-pysrc.html +++ /dev/null @@ -1,300 +0,0 @@ - - - - - pyroms_toolbox.nc_create_roms_file' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Module nc_create_roms_file' - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms_toolbox.nc_create_roms_file'

-
-  1  import numpy as np 
-  2  from datetime import datetime 
-  3  try: 
-  4    import netCDF4 as netCDF 
-  5  except: 
-  6    import netCDF3 as netCDF 
-  7   
-  8   
-
9 -def nc_create_roms_file(filename, grd, ocean_time): -
10 - 11 # create file - 12 nc = netCDF.Dataset(filename, 'w', format='NETCDF3_CLASSIC') - 13 nc.Description = 'ROMS file' - 14 nc.Author = 'pyroms_toolbox.nc_create_roms_file' - 15 nc.Created = datetime.now().strftime("%Y-%m-%d %H:%M:%S") - 16 nc.title = 'ROMS file' - 17 - 18 nc.createDimension('xi_rho', np.size(grd.hgrid.mask_rho,1)) - 19 nc.createDimension('xi_u', np.size(grd.hgrid.mask_u,1)) - 20 nc.createDimension('xi_v', np.size(grd.hgrid.mask_v,1)) - 21 nc.createDimension('xi_psi', np.size(grd.hgrid.mask_psi,1)) - 22 nc.createDimension('eta_rho', np.size(grd.hgrid.mask_rho,0)) - 23 nc.createDimension('eta_u', np.size(grd.hgrid.mask_u,0)) - 24 nc.createDimension('eta_v', np.size(grd.hgrid.mask_v,0)) - 25 nc.createDimension('eta_psi', np.size(grd.hgrid.mask_psi,0)) - 26 nc.createDimension('s_rho', grd.vgrid.N) - 27 nc.createDimension('s_w', grd.vgrid.Np) - 28 nc.createDimension('ocean_time', None) - 29 - 30 # write time and grid information - 31 nc.createVariable('theta_s', 'f8', ()) - 32 nc.variables['theta_s'].long_name = 'S-coordinate surface control parameter' - 33 nc.variables['theta_s'][:] = grd.vgrid.theta_s - 34 - 35 nc.createVariable('theta_b', 'f8', ()) - 36 nc.variables['theta_b'].long_name = 'S-coordinate bottom control parameter' - 37 nc.variables['theta_b'][:] = grd.vgrid.theta_b - 38 - 39 nc.createVariable('Tcline', 'f8', ()) - 40 nc.variables['Tcline'].long_name = 'S-cordinate surface/bottom layer width' - 41 nc.variables['Tcline'].units = 'meter' - 42 nc.variables['Tcline'][:] = grd.vgrid.Tcline - 43 - 44 nc.createVariable('hc', 'f8', ()) - 45 nc.variables['hc'].long_name = 'S-coordinate parameter, critical depth' - 46 nc.variables['hc'].units = 'meter' - 47 nc.variables['hc'][:] = grd.vgrid.hc - 48 - 49 nc.createVariable('s_rho', 'f8', ('s_rho')) - 50 nc.variables['s_rho'].long_name = 'S-coordinate at RHO-points' - 51 nc.variables['s_rho'].valid_min = '-1' - 52 nc.variables['s_rho'].valid_max = '0' - 53 nc.variables['s_rho'].field = 's_rho,scalar' - 54 nc.variables['s_rho'][:] = grd.vgrid.s_rho - 55 - 56 nc.createVariable('s_w', 'f8', ('s_w')) - 57 nc.variables['s_w'].long_name = 'S-coordinate at W-points' - 58 nc.variables['s_w'].valid_min = '-1' - 59 nc.variables['s_w'].valid_max = '0' - 60 nc.variables['s_w'].field = 's_w,scalar' - 61 nc.variables['s_w'][:] = grd.vgrid.s_w - 62 - 63 nc.createVariable('Cs_r', 'f8', ('s_rho')) - 64 nc.variables['Cs_r'].long_name = 'S-coordinate stretching curves at RHO-points' - 65 nc.variables['Cs_r'].valid_min = '-1' - 66 nc.variables['Cs_r'].valid_max = '0' - 67 nc.variables['Cs_r'].field = 'Cs_r,scalar' - 68 nc.variables['Cs_r'][:] = grd.vgrid.Cs_r - 69 - 70 nc.createVariable('Cs_w', 'f8', ('s_w')) - 71 nc.variables['Cs_w'].long_name = 'S-coordinate stretching curves at W-points' - 72 nc.variables['Cs_w'].valid_min = '-1' - 73 nc.variables['Cs_w'].valid_max = '0' - 74 nc.variables['Cs_w'].field = 'Cs_w,scalar' - 75 nc.variables['Cs_w'][:] = grd.vgrid.Cs_w - 76 - 77 nc.createVariable('h', 'f8', ('eta_rho', 'xi_rho')) - 78 nc.variables['h'].long_name = 'bathymetry at RHO-points' - 79 nc.variables['h'].units ='meter' - 80 nc.variables['h'].coordinates = 'lon_rho lat_rho' - 81 nc.variables['h'].field = 'bath, scalar' - 82 nc.variables['h'][:] = grd.vgrid.h - 83 - 84 nc.createVariable('pm', 'f8', ('eta_rho', 'xi_rho')) - 85 nc.variables['pm'].long_name = 'curvilinear coordinate metric in XI' - 86 nc.variables['pm'].units ='meter-1' - 87 nc.variables['pm'].coordinates = 'lon_rho lat_rho' - 88 nc.variables['pm'].field = 'pm, scalar' - 89 nc.variables['pm'][:] = 1. / grd.hgrid.dx - 90 - 91 nc.createVariable('pn', 'f8', ('eta_rho', 'xi_rho')) - 92 nc.variables['pn'].long_name = 'curvilinear coordinate metric in ETA' - 93 nc.variables['pn'].units ='meter-1' - 94 nc.variables['pn'].coordinates = 'lon_rho lat_rho' - 95 nc.variables['pn'].field = 'pn, scalar' - 96 nc.variables['pn'][:] = 1. / grd.hgrid.dy - 97 - 98 nc.createVariable('lon_rho', 'f8', ('eta_rho', 'xi_rho')) - 99 nc.variables['lon_rho'].long_name = 'longitude of RHO-points' -100 nc.variables['lon_rho'].units = 'degree_east' -101 nc.variables['lon_rho'].field = 'lon_rho, scalar' -102 nc.variables['lon_rho'][:] = grd.hgrid.lon_rho -103 -104 nc.createVariable('lat_rho', 'f8', ('eta_rho', 'xi_rho')) -105 nc.variables['lat_rho'].long_name = 'latitude of RHO-points' -106 nc.variables['lat_rho'].units = 'degree_north' -107 nc.variables['lat_rho'].field = 'lat_rho, scalar' -108 nc.variables['lat_rho'][:] = grd.hgrid.lat_rho -109 -110 nc.createVariable('lon_u', 'f8', ('eta_u', 'xi_u')) -111 nc.variables['lon_u'].long_name = 'longitude of U-points' -112 nc.variables['lon_u'].units = 'degree_east' -113 nc.variables['lon_u'].field = 'lon_u, scalar' -114 nc.variables['lon_u'][:] = grd.hgrid.lon_u -115 -116 nc.createVariable('lat_u', 'f8', ('eta_u', 'xi_u')) -117 nc.variables['lat_u'].long_name = 'latitude of U-points' -118 nc.variables['lat_u'].units = 'degree_north' -119 nc.variables['lat_u'].field = 'lat_u, scalar' -120 nc.variables['lat_u'][:] = grd.hgrid.lat_u -121 -122 nc.createVariable('lon_v', 'f8', ('eta_v', 'xi_v')) -123 nc.variables['lon_v'].long_name = 'longitude of V-points' -124 nc.variables['lon_v'].units = 'degree_east' -125 nc.variables['lon_v'].field = 'lon_v, scalar' -126 nc.variables['lon_v'][:] = grd.hgrid.lon_v -127 -128 nc.createVariable('lat_v', 'f8', ('eta_v', 'xi_v')) -129 nc.variables['lat_v'].long_name = 'latitude of V-points' -130 nc.variables['lat_v'].units = 'degree_north' -131 nc.variables['lat_v'].field = 'lat_v, scalar' -132 nc.variables['lat_v'][:] = grd.hgrid.lat_v -133 -134 nc.createVariable('lon_psi', 'f8', ('eta_psi', 'xi_psi')) -135 nc.variables['lon_psi'].long_name = 'longitude of PSI-points' -136 nc.variables['lon_psi'].units = 'degree_east' -137 nc.variables['lon_psi'].field = 'lon_psi, scalar' -138 nc.variables['lon_psi'][:] = grd.hgrid.lon_psi -139 -140 nc.createVariable('lat_psi', 'f8', ('eta_psi', 'xi_psi')) -141 nc.variables['lat_psi'].long_name = 'latitude of PSI-points' -142 nc.variables['lat_psi'].units = 'degree_north' -143 nc.variables['lat_psi'].field = 'lat_psi, scalar' -144 nc.variables['lat_psi'][:] = grd.hgrid.lat_psi -145 -146 nc.createVariable('angle', 'f8', ('eta_rho', 'xi_rho')) -147 nc.variables['angle'].long_name = 'angle between XI-axis and EAST' -148 nc.variables['angle'].units = 'radians' -149 nc.variables['angle'].coordinates = 'lon_rho lat_rho' -150 nc.variables['angle'].field = 'angle, scalar' -151 nc.variables['angle'][:] = grd.hgrid.angle_rho -152 -153 nc.createVariable('mask_rho', 'f8', ('eta_rho', 'xi_rho')) -154 nc.variables['mask_rho'].long_name = 'mask on RHO-points' -155 nc.variables['mask_rho'].option_0 = 'land' -156 nc.variables['mask_rho'].option_1 = 'water' -157 nc.variables['mask_rho'].coordinates = 'lon_rho lat_rho' -158 nc.variables['mask_rho'][:] = grd.hgrid.mask_rho -159 -160 nc.createVariable('mask_u', 'f8', ('eta_u', 'xi_u')) -161 nc.variables['mask_u'].long_name = 'mask on U-points' -162 nc.variables['mask_u'].option_0 = 'land' -163 nc.variables['mask_u'].option_1 = 'water' -164 nc.variables['mask_u'].coordinates = 'lon_u lat_u' -165 nc.variables['mask_u'][:] = grd.hgrid.mask_u -166 -167 nc.createVariable('mask_v', 'f8', ('eta_v', 'xi_v')) -168 nc.variables['mask_v'].long_name = 'mask on V-points' -169 nc.variables['mask_v'].option_0 = 'land' -170 nc.variables['mask_v'].option_1 = 'water' -171 nc.variables['mask_v'].coordinates = 'lon_v lat_v' -172 nc.variables['mask_v'][:] = grd.hgrid.mask_v -173 -174 nc.createVariable('mask_psi', 'f8', ('eta_psi', 'xi_psi')) -175 nc.variables['mask_psi'].long_name = 'mask on PSI-points' -176 nc.variables['mask_psi'].option_0 = 'land' -177 nc.variables['mask_psi'].option_1 = 'water' -178 nc.variables['mask_psi'].coordinates = 'lon_psi lat_psi' -179 nc.variables['mask_psi'][:] = grd.hgrid.mask_psi -180 -181 nc.createVariable('ocean_time', 'f8', ('ocean_time')) -182 nc.variables['ocean_time'].long_name = ocean_time.long_name -183 nc.variables['ocean_time'].units = ocean_time.units -184 try: -185 nc.variables['ocean_time'].field = ocean_time.field -186 except: -187 nc.variables['ocean_time'].field = ' ' -188 -189 nc.close() -
190 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.plot_coast_line'-module.html b/pyroms_toolbox/docs/pyroms_toolbox.plot_coast_line'-module.html deleted file mode 100644 index d3103f0..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.plot_coast_line'-module.html +++ /dev/null @@ -1,181 +0,0 @@ - - - - - pyroms_toolbox.plot_coast_line' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Module plot_coast_line' - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module plot_coast_line'

source code

- - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
plot_coast_line(grd, - proj=None)
- plot_coast_line(grd, {proj})
- source code - -
- -
- - - - - - -
- - - - - -
Function Details[hide private]
-
- -
- -
- - -
-

plot_coast_line(grd, - proj=None) -

-
source code  -
- -

plot_coast_line(grd, {proj})

-

plot the coastline from the object grid. proj=map (optional) is a - Basemap object for projection.

-
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.plot_coast_line'-pysrc.html b/pyroms_toolbox/docs/pyroms_toolbox.plot_coast_line'-pysrc.html deleted file mode 100644 index 275a92f..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.plot_coast_line'-pysrc.html +++ /dev/null @@ -1,147 +0,0 @@ - - - - - pyroms_toolbox.plot_coast_line' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Module plot_coast_line' - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms_toolbox.plot_coast_line'

-
- 1  import numpy as np 
- 2  import matplotlib.pyplot as plt 
- 3  import matplotlib.collections as collections 
- 4  from pyroms_toolbox import get_coast_line 
- 5   
- 6   
-
7 -def plot_coast_line(grd, proj=None): -
8 ''' - 9 plot_coast_line(grd, {proj}) -10 -11 plot the coastline from the object grid. -12 proj=map (optional) is a Basemap object for -13 projection. -14 ''' -15 -16 -17 a = plt.gca() -18 -19 coast = get_coast_line(grd) -20 c = np.array(coast) -21 -22 if proj is None: -23 col = collections.LineCollection(c) -24 else: -25 cp = np.zeros(c.shape) -26 for i in range(c.shape[0]): -27 cp[i,:,0], cp[i,:,1] = proj(c[i,:,0], c[i,:,1]) -28 -29 col = collections.LineCollection(cp) -30 -31 -32 a.add_collection(col, autolim=True) -33 col.set_color('k') -
34 #a.autoscale_view() -35 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.plot_mask'-module.html b/pyroms_toolbox/docs/pyroms_toolbox.plot_mask'-module.html deleted file mode 100644 index 2a8bab2..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.plot_mask'-module.html +++ /dev/null @@ -1,141 +0,0 @@ - - - - - pyroms_toolbox.plot_mask' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Module plot_mask' - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module plot_mask'

source code

- - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
plot_mask(gridid, - Cpos='rho', - proj=None, - **kwargs) - source code - -
- -
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.plot_mask'-pysrc.html b/pyroms_toolbox/docs/pyroms_toolbox.plot_mask'-pysrc.html deleted file mode 100644 index e806c60..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.plot_mask'-pysrc.html +++ /dev/null @@ -1,179 +0,0 @@ - - - - - pyroms_toolbox.plot_mask' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Module plot_mask' - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms_toolbox.plot_mask'

-
- 1  import numpy as np 
- 2  import matplotlib.pyplot as plt 
- 3  from mpl_toolkits.basemap import Basemap 
- 4  import pyroms 
- 5  import pyroms_toolbox 
- 6   
- 7   
-
8 -def plot_mask(gridid, Cpos='rho', proj=None, **kwargs): -
9 -10 -11 # get grid -12 if type(gridid).__name__ == 'ROMS_Grid': -13 grd = gridid -14 else: -15 grd = pyroms.grid.get_ROMS_grid(gridid) -16 -17 Cpos = str(Cpos) -18 print Cpos -19 -20 # get grid information -21 if Cpos == 'rho': -22 lon = grd.hgrid.lon_vert -23 lat = grd.hgrid.lat_vert -24 mask = grd.hgrid.mask_rho -25 -26 elif Cpos == 'u': -27 lon = 0.5 * (grd.hgrid.lon_vert[:,:-1] + grd.hgrid.lon_vert[:,1:]) -28 lat = 0.5 * (grd.hgrid.lat_vert[:,:-1] + grd.hgrid.lat_vert[:,1:]) -29 mask = grd.hgrid.mask_u -30 -31 elif Cpos == 'v': -32 lon = 0.5 * (grd.hgrid.lon_vert[:-1,:] + grd.hgrid.lon_vert[1:,:]) -33 lat = 0.5 * (grd.hgrid.lat_vert[:-1,:] + grd.hgrid.lat_vert[1:,:]) -34 mask = grd.hgrid.mask_v -35 -36 else: -37 raise Warning, 'Cpos must be rho, u or v' -38 -39 # defined color map -40 land_color = kwargs.pop('land_color', (0.6, 1.0, 0.6)) -41 sea_color = kwargs.pop('sea_color', (0.6, 0.6, 1.0)) -42 -43 cm = plt.matplotlib.colors.ListedColormap([land_color, sea_color], -44 name='land/sea') -45 -46 -47 -48 if proj is None: -49 plt.pcolor(lon, lat, mask, cmap=cm, vmin=0, vmax=1, \ -50 edgecolor='k', **kwargs) -51 pyroms_toolbox.plot_coast_line(grd) -52 else: -53 x, y = proj(lon, lat) -54 Basemap.pcolor(proj, x, y, mask, cmap=cm, vmin=0, vmax=1, \ -55 edgecolor='k', **kwargs) -56 pyroms_toolbox.plot_coast_line(grd, proj=proj) -57 -58 lon_min = lon.min() -59 lon_max = lon.max() -60 lat_min = lat.min() -61 lat_max = lat.max() -62 -63 proj.drawmeridians(np.arange(lon_min,lon_max,(lon_max-lon_min)/5.001), \ -64 labels=[0,0,0,1], fmt='%.1f') -65 proj.drawparallels(np.arange(lat_min,lat_max,(lat_max-lat_min)/5.001), \ -66 labels=[1,0,0,0], fmt='%.1f') -
67 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.quiver'-module.html b/pyroms_toolbox/docs/pyroms_toolbox.quiver'-module.html deleted file mode 100644 index 1183589..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.quiver'-module.html +++ /dev/null @@ -1,1547 +0,0 @@ - - - - - pyroms_toolbox.quiver' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Module quiver' - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module quiver'

source code

-
-

Version: - 1.3.0 -

-
- - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
quiver(uvar, - vvar, - tindex, - depth, - gridid)
- optional switch:
- source code - -
- -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - -
Variables[hide private]
-
-   - - ALLOW_THREADS = 1 -
-   - - BUFSIZE = 10000 -
-   - - CLIP = 0 -
-   - - ERR_CALL = 3 -
-   - - ERR_DEFAULT = 0 -
-   - - ERR_DEFAULT2 = 2084 -
-   - - ERR_IGNORE = 0 -
-   - - ERR_LOG = 5 -
-   - - ERR_PRINT = 4 -
-   - - ERR_RAISE = 2 -
-   - - ERR_WARN = 1 -
-   - - FLOATING_POINT_SUPPORT = 1 -
-   - - FPE_DIVIDEBYZERO = 1 -
-   - - FPE_INVALID = 8 -
-   - - FPE_OVERFLOW = 2 -
-   - - FPE_UNDERFLOW = 4 -
-   - - False_ = False -
-   - - Inf = inf -
-   - - Infinity = inf -
-   - - MAXDIMS = 32 -
-   - - NAN = nan -
-   - - NINF = -inf -
-   - - NZERO = -0.0 -
-   - - NaN = nan -
-   - - PINF = inf -
-   - - PZERO = 0.0 -
-   - - RAISE = 2 -
-   - - SHIFT_DIVIDEBYZERO = 0 -
-   - - SHIFT_INVALID = 9 -
-   - - SHIFT_OVERFLOW = 3 -
-   - - SHIFT_UNDERFLOW = 6 -
-   - - ScalarType = (<type 'int'>, <type 'float'>, <type 'complex'>, ... -
-   - - True_ = True -
-   - - UFUNC_BUFSIZE_DEFAULT = 10000 -
-   - - UFUNC_PYVALS_NAME = 'UFUNC_PYVALS' -
-   - - WRAP = 1 -
-   - - absolute = <ufunc 'absolute'> -
-   - - add = <ufunc 'add'> -
-   - - arccos = <ufunc 'arccos'> -
-   - - arccosh = <ufunc 'arccosh'> -
-   - - arcsin = <ufunc 'arcsin'> -
-   - - arcsinh = <ufunc 'arcsinh'> -
-   - - arctan = <ufunc 'arctan'> -
-   - - arctan2 = <ufunc 'arctan2'> -
-   - - arctanh = <ufunc 'arctanh'> -
-   - - bitwise_and = <ufunc 'bitwise_and'> -
-   - - bitwise_not = <ufunc 'invert'> -
-   - - bitwise_or = <ufunc 'bitwise_or'> -
-   - - bitwise_xor = <ufunc 'bitwise_xor'> -
-   - - c_ = <numpy.lib.index_tricks.CClass object at 0x7fcf4da53050> -
-   - - cast = {<type 'numpy.int64'>: <function <lambda> at 0x7fcf4f4e... -
-   - - ceil = <ufunc 'ceil'> -
-   - - conj = <ufunc 'conjugate'> -
-   - - conjugate = <ufunc 'conjugate'> -
-   - - cos = <ufunc 'cos'> -
-   - - cosh = <ufunc 'cosh'> -
-   - - deg2rad = <ufunc 'deg2rad'> -
-   - - degrees = <ufunc 'degrees'> -
-   - - divide = <ufunc 'divide'> -
-   - - e = 2.71828182846 -
-   - - equal = <ufunc 'equal'> -
-   - - exp = <ufunc 'exp'> -
-   - - exp2 = <ufunc 'exp2'> -
-   - - expm1 = <ufunc 'expm1'> -
-   - - fabs = <ufunc 'fabs'> -
-   - - floor = <ufunc 'floor'> -
-   - - floor_divide = <ufunc 'floor_divide'> -
-   - - fmax = <ufunc 'fmax'> -
-   - - fmin = <ufunc 'fmin'> -
-   - - fmod = <ufunc 'fmod'> -
-   - - frexp = <ufunc 'frexp'> -
-   - - greater = <ufunc 'greater'> -
-   - - greater_equal = <ufunc 'greater_equal'> -
-   - - hypot = <ufunc 'hypot'> -
-   - - index_exp = <numpy.lib.index_tricks.IndexExpression object at ... -
-   - - inf = inf -
-   - - infty = inf -
-   - - invert = <ufunc 'invert'> -
-   - - isfinite = <ufunc 'isfinite'> -
-   - - isinf = <ufunc 'isinf'> -
-   - - isnan = <ufunc 'isnan'> -
-   - - ldexp = <ufunc 'ldexp'> -
-   - - left_shift = <ufunc 'left_shift'> -
-   - - less = <ufunc 'less'> -
-   - - less_equal = <ufunc 'less_equal'> -
-   - - little_endian = True -
-   - - log = <ufunc 'log'> -
-   - - log10 = <ufunc 'log10'> -
-   - - log1p = <ufunc 'log1p'> -
-   - - logaddexp = <ufunc 'logaddexp'> -
-   - - logaddexp2 = <ufunc 'logaddexp2'> -
-   - - logical_and = <ufunc 'logical_and'> -
-   - - logical_not = <ufunc 'logical_not'> -
-   - - logical_or = <ufunc 'logical_or'> -
-   - - logical_xor = <ufunc 'logical_xor'> -
-   - - maximum = <ufunc 'maximum'> -
-   - - mgrid = <numpy.lib.index_tricks.nd_grid object at 0x7fcf4decbe90> -
-   - - minimum = <ufunc 'minimum'> -
-   - - mod = <ufunc 'remainder'> -
-   - - modf = <ufunc 'modf'> -
-   - - multiply = <ufunc 'multiply'> -
-   - - nan = nan -
-   - - nbytes = {<type 'numpy.int64'>: 8, <type 'numpy.int16'>: 2, <t... -
-   - - negative = <ufunc 'negative'> -
-   - - newaxis = None -
-   - - not_equal = <ufunc 'not_equal'> -
-   - - ogrid = <numpy.lib.index_tricks.nd_grid object at 0x7fcf4decbed0> -
-   - - ones_like = <ufunc 'ones_like'> -
-   - - pi = 3.14159265359 -
-   - - power = <ufunc 'power'> -
-   - - r_ = <numpy.lib.index_tricks.RClass object at 0x7fcf4decbf90> -
-   - - rad2deg = <ufunc 'rad2deg'> -
-   - - radians = <ufunc 'radians'> -
-   - - reciprocal = <ufunc 'reciprocal'> -
-   - - remainder = <ufunc 'remainder'> -
-   - - right_shift = <ufunc 'right_shift'> -
-   - - rint = <ufunc 'rint'> -
-   - - s_ = <numpy.lib.index_tricks.IndexExpression object at 0x7fcf4... -
-   - - sctypeDict = {0: <type 'numpy.bool_'>, 1: <type 'numpy.int8'>,... -
-   - - sctypeNA = {'?': 'Bool', 'B': 'UInt8', 'Bool': <type 'numpy.bo... -
-   - - sctypes = {'complex': [<type 'numpy.complex64'>, <type 'numpy.... -
-   - - sign = <ufunc 'sign'> -
-   - - signbit = <ufunc 'signbit'> -
-   - - sin = <ufunc 'sin'> -
-   - - sinh = <ufunc 'sinh'> -
-   - - sqrt = <ufunc 'sqrt'> -
-   - - square = <ufunc 'square'> -
-   - - subtract = <ufunc 'subtract'> -
-   - - tan = <ufunc 'tan'> -
-   - - tanh = <ufunc 'tanh'> -
-   - - true_divide = <ufunc 'true_divide'> -
-   - - trunc = <ufunc 'trunc'> -
-   - - typeDict = {0: <type 'numpy.bool_'>, 1: <type 'numpy.int8'>, 2... -
-   - - typeNA = {'?': 'Bool', 'B': 'UInt8', 'Bool': <type 'numpy.bool... -
-   - - typecodes = {'All': '?bhilqpBHILQPfdgFDGSUVO', 'AllFloat': 'fd... -
- - - - - - -
- - - - - -
Function Details[hide private]
-
- -
- -
- - -
-

quiver(uvar, - vvar, - tindex, - depth, - gridid) -

-
source code  -
- -

optional switch:

-
    -
  • - filename if defined, load the variable from file -
  • -
  • - proj Basemap object returned by sview, zview, ... -
  • -
  • - d arrow density parameter -
  • -
  • - uscale data units per arrow length unit parameter -
  • -
  • - xkey x location of the key -
  • -
  • - ykey y location of the key -
  • -
  • - ukey length of the key -
  • -
  • - outfile if defined, write figure to file -
  • -
-

overlay a 2-D field of arrows for velocity (uvar, vvar) above an - existing horizontal 2D plot. If filename is provided, uvar and vvar must - be strings and the variables will be load from the file. grid can be a - grid object or a gridid. In the later case, the grid object correponding - to the provided gridid will be loaded. For projection, use proj=map, map - being the Basemap object returned by sview, zview, ...

-

Note: if quiver is called before any other part of the plot has been - created, you must create an axis which covers the region to be plotted. - to do this, you can call - axis([Longitude_min,Longitude_max,Latitude_min,Latitude_max] where - Longitude_min, etc, are replaced with the appropriate longitudes and - latitudes.

-
-
-
-
-
- - - - - - -
- - - - - -
Variables Details[hide private]
-
- -
- -
-

ScalarType

- -
-
-
-
Value:
-
-(<type 'int'>,
- <type 'float'>,
- <type 'complex'>,
- <type 'long'>,
- <type 'bool'>,
- <type 'str'>,
- <type 'unicode'>,
- <type 'buffer'>,
-...
-
-
-
-
-
- -
- -
-

cast

- -
-
-
-
Value:
-
-{<type 'numpy.int64'>: <function <lambda> at 0x7fcf4f4e90c8>, <type 'n\
-umpy.int16'>: <function <lambda> at 0x7fcf4f4e9140>, <type 'numpy.uint\
-64'>: <function <lambda> at 0x7fcf4f4e9758>, <type 'numpy.uint64'>: <f\
-unction <lambda> at 0x7fcf4f4e91b8>, <type 'numpy.float32'>: <function\
- <lambda> at 0x7fcf4f4e99b0>, <type 'numpy.uint32'>: <function <lambda\
-> at 0x7fcf4f4e9410>, <type 'numpy.float128'>: <function <lambda> at 0\
-x7fcf4f4e9320>, <type 'numpy.string_'>: <function <lambda> at 0x7fcf4f\
-4e9398>, <type 'numpy.complex256'>: <function <lambda> at 0x7fcf4f4e92\
-...
-
-
-
-
-
- -
- -
-

index_exp

- -
-
-
-
Value:
-
-<numpy.lib.index_tricks.IndexExpression object at 0x7fcf4da53150>
-
-
-
-
-
- -
- -
-

nbytes

- -
-
-
-
Value:
-
-{<type 'numpy.int64'>: 8, <type 'numpy.int16'>: 2, <type 'numpy.uint64\
-'>: 8, <type 'numpy.object_'>: 8, <type 'numpy.complex256'>: 32, <type\
- 'numpy.float128'>: 16, <type 'numpy.string_'>: 0, <type 'numpy.uint32\
-'>: 4, <type 'numpy.bool_'>: 1, <type 'numpy.unicode_'>: 0, <type 'num\
-py.complex64'>: 8, <type 'numpy.float64'>: 8, <type 'numpy.uint8'>: 1,\
- <type 'numpy.int64'>: 8, <type 'numpy.uint64'>: 8, <type 'numpy.int32\
-'>: 4, <type 'numpy.int8'>: 1, <type 'numpy.void'>: 0, <type 'numpy.ui\
-nt16'>: 2, <type 'numpy.float32'>: 4, <type 'numpy.complex128'>: 16}
-
-
-
-
-
- -
- -
-

s_

- -
-
-
-
Value:
-
-<numpy.lib.index_tricks.IndexExpression object at 0x7fcf4da531d0>
-
-
-
-
-
- -
- -
-

sctypeDict

- -
-
-
-
Value:
-
-{0: <type 'numpy.bool_'>,
- 1: <type 'numpy.int8'>,
- 2: <type 'numpy.uint8'>,
- 3: <type 'numpy.int16'>,
- 4: <type 'numpy.uint16'>,
- 5: <type 'numpy.int32'>,
- 6: <type 'numpy.uint32'>,
- 7: <type 'numpy.int64'>,
-...
-
-
-
-
-
- -
- -
-

sctypeNA

- -
-
-
-
Value:
-
-{'?': 'Bool',
- 'B': 'UInt8',
- 'Bool': <type 'numpy.bool_'>,
- 'Complex128': <type 'numpy.complex256'>,
- 'Complex32': <type 'numpy.complex64'>,
- 'Complex64': <type 'numpy.complex128'>,
- 'D': 'Complex64',
- 'F': 'Complex32',
-...
-
-
-
-
-
- -
- -
-

sctypes

- -
-
-
-
Value:
-
-{'complex': [<type 'numpy.complex64'>,
-             <type 'numpy.complex128'>,
-             <type 'numpy.complex256'>],
- 'float': [<type 'numpy.float32'>,
-           <type 'numpy.float64'>,
-           <type 'numpy.float128'>],
- 'int': [<type 'numpy.int8'>,
-         <type 'numpy.int16'>,
-...
-
-
-
-
-
- -
- -
-

typeDict

- -
-
-
-
Value:
-
-{0: <type 'numpy.bool_'>,
- 1: <type 'numpy.int8'>,
- 2: <type 'numpy.uint8'>,
- 3: <type 'numpy.int16'>,
- 4: <type 'numpy.uint16'>,
- 5: <type 'numpy.int32'>,
- 6: <type 'numpy.uint32'>,
- 7: <type 'numpy.int64'>,
-...
-
-
-
-
-
- -
- -
-

typeNA

- -
-
-
-
Value:
-
-{'?': 'Bool',
- 'B': 'UInt8',
- 'Bool': <type 'numpy.bool_'>,
- 'Complex128': <type 'numpy.complex256'>,
- 'Complex32': <type 'numpy.complex64'>,
- 'Complex64': <type 'numpy.complex128'>,
- 'D': 'Complex64',
- 'F': 'Complex32',
-...
-
-
-
-
-
- -
- -
-

typecodes

- -
-
-
-
Value:
-
-{'All': '?bhilqpBHILQPfdgFDGSUVO',
- 'AllFloat': 'fdgFDG',
- 'AllInteger': 'bBhHiIlLqQpP',
- 'Character': 'c',
- 'Complex': 'FDG',
- 'Float': 'fdg',
- 'Integer': 'bhilqp',
- 'UnsignedInteger': 'BHILQP'}
-
-
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.quiver'-pysrc.html b/pyroms_toolbox/docs/pyroms_toolbox.quiver'-pysrc.html deleted file mode 100644 index b5d3e4a..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.quiver'-pysrc.html +++ /dev/null @@ -1,231 +0,0 @@ - - - - - pyroms_toolbox.quiver' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Module quiver' - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms_toolbox.quiver'

-
-  1  from numpy import * 
-  2  import matplotlib.pyplot as plt 
-  3  from mpl_toolkits.basemap import Basemap 
-  4  import pyroms 
-  5   
-  6   
-
7 -def quiver(uvar, vvar, tindex, depth, gridid, \ - 8 filename=None, proj=None, d=2, uscale=None, \ - 9 xkey=0.9, ykey=0.1, ukey=1, outfile=None): -
10 """ - 11 quiver(uvar, vvar, tindex, depth, gridid) - 12 - 13 optional switch: - 14 - filename if defined, load the variable from file - 15 - proj Basemap object returned by sview, zview, ... - 16 - d arrow density parameter - 17 - uscale data units per arrow length unit parameter - 18 - xkey x location of the key - 19 - ykey y location of the key - 20 - ukey length of the key - 21 - outfile if defined, write figure to file - 22 - 23 - 24 overlay a 2-D field of arrows for velocity (uvar, vvar) above an - 25 existing horizontal 2D plot. If filename is provided, uvar and vvar - 26 must be strings and the variables will be load from the file. - 27 grid can be a grid object or a gridid. In the later case, the grid - 28 object correponding to the provided gridid will be loaded. - 29 For projection, use proj=map, map being the Basemap object returned - 30 by sview, zview, ... - 31 - 32 Note: if quiver is called before any other part of the plot has been - 33 created, you must create an axis which covers the region to be plotted. - 34 to do this, you can call axis([Longitude_min,Longitude_max,Latitude_min,Latitude_max] - 35 where Longitude_min, etc, are replaced with the appropriate longitudes and latitudes. - 36 """ - 37 - 38 - 39 # get grid - 40 if type(gridid).__name__ == 'ROMS_Grid': - 41 grd = gridid - 42 else: - 43 grd = pyroms.grid.get_ROMS_grid(gridid) - 44 lon = grd.hgrid.lon_rho - 45 lat = grd.hgrid.lat_rho - 46 mask = grd.hgrid.mask_rho - 47 - 48 # get u and v - 49 if filename == None: - 50 u = uvar[tindex,:,:,:] - 51 v = vvar[tindex,:,:,:] - 52 else: - 53 data = pyroms.io.Dataset(filename) - 54 u = data.variables[uvar][tindex,:,:,:] - 55 v = data.variables[vvar][tindex,:,:,:] - 56 - 57 # get u and v slice at requested depth - 58 zsliceu, lonu, latu = pyroms.tools.zslice(u, depth, grd, Cpos='u') - 59 zslicev, lonv, latv = pyroms.tools.zslice(v, depth, grd, Cpos='v') - 60 - 61 # average field at rho point position - 62 zsliceu = 0.5 * (zsliceu[:,:-1] + zsliceu[:,1:]) - 63 zslicev = 0.5 * (zslicev[:-1,:] + zslicev[1:,:]) - 64 - 65 # correct dimension with zeros at edges - 66 zsliceu = concatenate((zeros((zsliceu.shape[0],1)), zsliceu, \ - 67 zeros((zsliceu.shape[0],1))),1) - 68 zsliceu = ma.masked_where(mask == 0, zsliceu) - 69 zsliceu = ma.masked_where(zsliceu >= 1000, zsliceu) - 70 zslicev = concatenate((zeros((1,zslicev.shape[1])), zslicev, \ - 71 zeros((1,zslicev.shape[1]))),0) - 72 zslicev = ma.masked_where(mask == 0, zslicev) - 73 zslicev = ma.masked_where(zslicev >= 1000, zslicev) - 74 - 75 # rotate velocity vector according to grid angle - 76 rotzsliceu = zsliceu * cos(grd.hgrid.angle_rho) - \ - 77 zslicev * sin(grd.hgrid.angle_rho) - 78 rotzslicev = zsliceu * sin(grd.hgrid.angle_rho) + \ - 79 zslicev * cos(grd.hgrid.angle_rho) - 80 - 81 # plot - 82 if proj is not None: - 83 x, y = proj(lon,lat) - 84 else: - 85 range = plt.axis() - 86 - 87 if uscale is None: - 88 if proj is not None: - 89 qv = Basemap.quiver(proj, x[::d,::d], y[::d,::d], \ - 90 rotzsliceu[::d,::d], rotzslicev[::d,::d], \ - 91 linewidths=0.01) - 92 else: - 93 qv = plt.quiver(lon[::d,::d], lat[::d,::d], \ - 94 rotzsliceu[::d,::d], rotzslicev[::d,::d], \ - 95 linewidths=0.01) - 96 else: - 97 if proj is not None: - 98 qv = Basemap.quiver(proj, x[::d,::d], y[::d,::d], \ - 99 rotzsliceu[::d,::d], rotzslicev[::d,::d], \ -100 scale=uscale, linewidths=0.01) -101 else: -102 qv = plt.quiver(lon[::d,::d], lat[::d,::d], \ -103 rotzsliceu[::d,::d], rotzslicev[::d,::d], \ -104 scale=uscale, linewidths=0.01) -105 -106 if proj is None: -107 plt.axis(range) -108 -109 plt.quiverkey(qv, xkey, ykey, ukey, str(ukey) + ' ms$^{-1}$') -110 -111 if outfile is not None: -112 if outfile.find('.png') != -1 or outfile.find('.svg') != -1 or \ -113 outfile.find('.eps') != -1: -114 print 'Write figure to file', outfile -115 plt.savefig(outfile, dpi=100, facecolor='w', edgecolor='w', \ -116 orientation='portrait') -117 else: -118 print 'Unrecognized file extension. Please use .png, .svg or .eps file extension.' -119 -120 return -
121 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.remapping'-module.html b/pyroms_toolbox/docs/pyroms_toolbox.remapping'-module.html deleted file mode 100644 index 5ec258d..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.remapping'-module.html +++ /dev/null @@ -1,152 +0,0 @@ - - - - - pyroms_toolbox.remapping' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Module remapping' - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module remapping'

source code

- - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
remapping(varname, - srcfile, - wts_files, - srcgrd, - dstgrd, - rotate_uv=False, - trange=None, - irange=None, - jrange=None, - dstdir='./', - zlevel=None, - dmax=0, - cdepth=0, - kk=0)
- A remaping function to go from a ROMS grid to another ROMS grid.
- source code - -
- -
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.remapping'-pysrc.html b/pyroms_toolbox/docs/pyroms_toolbox.remapping'-pysrc.html deleted file mode 100644 index 967cc74..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.remapping'-pysrc.html +++ /dev/null @@ -1,421 +0,0 @@ - - - - - pyroms_toolbox.remapping' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Module remapping' - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms_toolbox.remapping'

-
-  1  # encoding: utf-8 
-  2   
-  3  import os 
-  4  import numpy as np 
-  5  import glob 
-  6  try: 
-  7    import netCDF4 as netCDF 
-  8  except: 
-  9    import netCDF3 as netCDF 
- 10   
- 11  import pyroms 
- 12  import pyroms_toolbox 
- 13  import _remapping 
- 14   
- 15  import matplotlib.pyplot as plt 
- 16   
- 17  import datetime 
- 18   
-
19 -def remapping(varname, srcfile, wts_files, srcgrd, dstgrd, \ - 20 rotate_uv=False, trange=None, irange=None, jrange=None, \ - 21 dstdir='./' ,zlevel=None, dmax=0, cdepth=0, kk=0): -
22 ''' - 23 A remaping function to go from a ROMS grid to another ROMS grid. - 24 ''' - 25 - 26 # get input and output grid - 27 if type(srcgrd).__name__ == 'ROMS_Grid': - 28 srcgrd = srcgrd - 29 else: - 30 srcgrd = pyroms.grid.get_ROMS_grid(srcgrd) - 31 if type(dstgrd).__name__ == 'ROMS_Grid': - 32 dstgrd = dstgrd - 33 else: - 34 dstgrd = pyroms.grid.get_ROMS_grid(dstgrd) - 35 - 36 # build intermediaire zgrid - 37 if zlevel is None: - 38 zlevel = np.array([-4500.,-4000.,-3500.,-3000.,-2500.,-2000.,-1750.,\ - 39 -1500.,-1250.,-1000.,-900.,-800.,-700.,-600.,-500.,\ - 40 -400.,-300.,-250.,-200.,-175.,-150.,-125.,-100.,-90.,\ - 41 -80.,-70.,-60.,-50.,-45.,-40.,-35.,-30.,-25.,-20.,-17.5,\ - 42 -15.,-12.5,-10.,-7.5,-5.,-2.5,0.]) - 43 else: - 44 zlevel = np.sort(-abs(zlevel)) - 45 nzlevel = len(zlevel) - 46 src_zcoord = pyroms.vgrid.z_coordinate(srcgrd.vgrid.h, zlevel, nzlevel) - 47 dst_zcoord = pyroms.vgrid.z_coordinate(dstgrd.vgrid.h, zlevel, nzlevel) - 48 srcgrdz = pyroms.grid.ROMS_Grid(srcgrd.name+'_Z', srcgrd.hgrid, src_zcoord) - 49 dstgrdz = pyroms.grid.ROMS_Grid(dstgrd.name+'_Z', dstgrd.hgrid, dst_zcoord) - 50 - 51 # varname argument - 52 if type(varname).__name__ == 'list': - 53 nvar = len(varname) - 54 elif type(varname).__name__ == 'str': - 55 varname = [varname] - 56 nvar = len(varname) - 57 else: - 58 raise ValueError, 'varname must be a str or a list of str' - 59 # if rotate_uv=True, check that u and v are in varname - 60 if rotate_uv is True: - 61 if varname.__contains__('u') == 0 or varname.__contains__('v') == 0: - 62 raise Warning, 'varname must include u and v in order to rotate \ - 63 the velocity field' - 64 else: - 65 varname.remove('u') - 66 varname.remove('v') - 67 nvar = nvar-2 - 68 - 69 # srcfile argument - 70 if type(srcfile).__name__ == 'list': - 71 nfile = len(srcfile) - 72 elif type(srcfile).__name__ == 'str': - 73 srcfile = sorted(glob.glob(srcfile)) - 74 nfile = len(srcfile) - 75 else: - 76 raise ValueError, 'src_srcfile must be a str or a list of str' - 77 - 78 # get wts_file - 79 if type(wts_files).__name__ == 'str': - 80 wts_files = sorted(glob.glob(wts_files)) - 81 - 82 # loop over the srcfile - 83 for nf in range(nfile): - 84 print 'Working with file', srcfile[nf], '...' - 85 - 86 # get time - 87 ocean_time = pyroms.utility.get_nc_var('ocean_time', srcfile[nf]) - 88 ntime = len(ocean_time[:]) - 89 - 90 # trange argument - 91 if trange is None: - 92 trange = range(ntime) - 93 else: - 94 trange = range(trange[0], trange[1]+1) - 95 - 96 # create destination file - 97 dstfile = dstdir + os.path.basename(srcfile[nf])[:-3] + '_' + dstgrd.name + '.nc' - 98 print 'Creating destination file', dstfile - 99 if os.path.exists(dstfile) is True: -100 os.remove(dstfile) -101 pyroms_toolbox.nc_create_roms_file(dstfile, dstgrd, ocean_time) -102 -103 # open destination file -104 nc = netCDF.Dataset(dstfile, 'a', format='NETCDF3_CLASSIC') -105 -106 -107 nctidx = 0 -108 # loop over time -109 for nt in trange: -110 -111 nc.variables['ocean_time'][nctidx] = ocean_time[nt] -112 -113 # loop over variable -114 for nv in range(nvar): -115 print ' ' -116 print 'remapping', varname[nv], 'from', srcgrd.name, \ -117 'to', dstgrd.name -118 print 'time =', ocean_time[nt] -119 -120 # get source data -121 src_var = pyroms.utility.get_nc_var(varname[nv], srcfile[nf]) -122 -123 # determine variable dimension -124 ndim = len(src_var.dimensions)-1 -125 -126 # get spval -127 try: -128 spval = src_var._FillValue -129 except: -130 raise Warning, 'Did not find a _FillValue attribute.' -131 -132 # irange -133 if irange is None: -134 iirange = (0,src_var._shape()[-1]) -135 else: -136 iirange = irange -137 -138 # jrange -139 if jrange is None: -140 jjrange = (0,src_var._shape()[-2]) -141 else: -142 jjrange = jrange -143 -144 # determine where on the C-grid these variable lies -145 if src_var.dimensions[2].find('_rho') != -1: -146 Cpos='rho' -147 if src_var.dimensions[2].find('_u') != -1: -148 Cpos='u' -149 if src_var.dimensions[2].find('_v') != -1: -150 Cpos='v' -151 if src_var.dimensions[1].find('_w') != -1: -152 Cpos='w' -153 -154 print 'Arakawa C-grid position is', Cpos -155 -156 # create variable in _destination file -157 if nt == trange[0]: -158 print 'Creating variable', varname[nv] -159 nc.createVariable(varname[nv], 'f8', src_var.dimensions) -160 nc.variables[varname[nv]].long_name = src_var.long_name -161 if varname[nv] != 'salt': -162 nc.variables[varname[nv]].units = src_var.units -163 nc.variables[varname[nv]].field = src_var.field -164 nc.variables[varname[nv]]._FillValue = spval -165 -166 # get the right remap weights file -167 for s in range(len(wts_files)): -168 if wts_files[s].__contains__(Cpos+'_to_'+Cpos+'.nc'): -169 wts_file = wts_files[s] -170 break -171 else: -172 if s == len(wts_files) - 1: -173 raise ValueError, 'Did not find the appropriate remap weights file' -174 -175 if ndim == 3: -176 # vertical interpolation from sigma to standard z level -177 print 'vertical interpolation from sigma to standard z level' -178 src_varz = pyroms.remapping.roms2z( \ -179 src_var[nt,:,jjrange[0]:jjrange[1],iirange[0]:iirange[1]], \ -180 srcgrd, srcgrdz, Cpos=Cpos, spval=spval, \ -181 irange=iirange, jrange=jjrange) -182 -183 # flood the grid -184 print 'flood the grid' -185 src_varz = pyroms.remapping.flood(src_varz, srcgrdz, Cpos=Cpos, \ -186 irange=iirange, jrange=jjrange, spval=spval, \ -187 dmax=dmax, cdepth=cdepth, kk=kk) -188 -189 tmp_src_varz = np.zeros((nzlevel, src_var[:].shape[-2], src_var[:].shape[-1])) -190 tmp_src_varz[:,jjrange[0]:jjrange[1],iirange[0]:iirange[1]] = src_varz -191 else: -192 src_varz = src_var[nt,jjrange[0]:jjrange[1],iirange[0]:iirange[1]] -193 tmp_src_varz = np.zeros(src_var[nt,:].shape) -194 tmp_src_varz[jjrange[0]:jjrange[1],iirange[0]:iirange[1]] = src_varz -195 -196 print datetime.datetime.now() -197 # horizontal interpolation using scrip weights -198 print 'horizontal interpolation using scrip weights' -199 dst_varz = pyroms.remapping.remap(tmp_src_varz, wts_file, \ -200 spval=spval) -201 -202 print datetime.datetime.now() -203 -204 if ndim == 3: -205 # vertical interpolation from standard z level to sigma -206 print 'vertical interpolation from standard z level to sigma' -207 dst_var = pyroms.remapping.z2roms(dst_varz, dstgrdz, dstgrd, \ -208 Cpos=Cpos, spval=spval, flood=False) -209 else: -210 dst_var = dst_varz -211 -212 # write data in destination file -213 print 'write data in destination file' -214 nc.variables[varname[nv]][nctidx] = dst_var -215 -216 -217 # rotate the velocity field if requested -218 if rotate_uv is True: -219 print ' ' -220 print 'remapping and rotating u and v from', srcgrd.name, \ -221 'to', dstgrd.name -222 -223 # get source data -224 src_u = pyroms.utility.get_nc_var('u', srcfile[nf]) -225 src_v = pyroms.utility.get_nc_var('v', srcfile[nf]) -226 -227 # create variable in destination file -228 print 'Creating variable u' -229 nc.createVariable('u', 'f8', src_u.dimensions) -230 nc.variables['u'].long_name = src_u.long_name -231 nc.variables['u'].units = src_u.units -232 nc.variables['u'].field = src_u.field -233 nc.variables['u']._FillValue = spval -234 print 'Creating variable v' -235 nc.createVariable('v', 'f8', src_v.dimensions) -236 nc.variables['v'].long_name = src_v.long_name -237 nc.variables['v'].units = src_v.units -238 nc.variables['v'].field = src_v.field -239 nc.variables['v']._FillValue = spval -240 -241 # get the right remap weights file -242 for s in range(len(wts_files)): -243 if wts_files[s].__contains__('u_to_rho.nc'): -244 wts_file_u = wts_files[s] -245 if wts_files[s].__contains__('v_to_rho.nc'): -246 wts_file_v = wts_files[s] -247 -248 # vertical interpolation from sigma to standard z level -249 print 'vertical interpolation from sigma to standard z level' -250 src_uz = pyroms.remapping.roms2z( \ -251 src_u[nt,:,jjrange[0]:jjrange[1],iirange[0]:iirange[1]], \ -252 srcgrd, srcgrdz, Cpos='u', irange=iirange, jrange=jjrange) -253 src_vz = pyroms.remapping.roms2z( \ -254 src_v[nt,:,jjrange[0]:jjrange[1],iirange[0]:iirange[1]], \ -255 srcgrd, srcgrdz, Cpos='v', irange=iirange, jrange=jjrange) -256 -257 # flood the grid -258 print 'flood the grid' -259 src_uz = pyroms.remapping.flood(src_uz, srcgrdz, Cpos='u', \ -260 irange=iirange, jrange=jjrange, \ -261 spval=spval, dmax=dmax, cdepth=cdepth, kk=kk) -262 src_vz = pyroms.remapping.flood(src_vz, srcgrdz, Cpos='v', \ -263 irange=iirange, jrange=jjrange, \ -264 spval=spval, dmax=dmax, cdepth=cdepth, kk=kk) -265 -266 # horizontal interpolation using scrip weights -267 print 'horizontal interpolation using scrip weights' -268 dst_uz = pyroms.remapping.remap(src_uz, wts_file_u, \ -269 spval=spval) -270 dst_vz = pyroms.remapping.remap(src_vz, wts_file_v, \ -271 spval=spval) -272 -273 # vertical interpolation from standard z level to sigma -274 print 'vertical interpolation from standard z level to sigma' -275 dst_u = pyroms.remapping.z2roms(dst_uz, dstgrdz, dstgrd, \ -276 Cpos='rho', spval=spval, flood=False) -277 dst_v = pyroms.remapping.z2roms(dst_vz, dstgrdz, dstgrd, \ -278 Cpos='rho', spval=spval, flood=False) -279 -280 # rotate u,v fields -281 for s in range(len(wts_files)): -282 if wts_files[s].__contains__('rho_to_rho.nc'): -283 wts_file = wts_files[s] -284 src_angle = pyroms.remapping.remap(srcgrd.hgrid.angle_rho, wts_file) -285 dst_angle = dstgrd.hgrid.angle_rho -286 angle = dst_angle - src_angle -287 angle = np.tile(angle, (dstgrd.vgrid.N, 1, 1)) -288 -289 U = dst_u + dst_v*1j -290 eitheta = np.exp(-1j*angle) -291 U = U * eitheta -292 -293 dst_u = np.real(U) -294 dst_v = np.imag(U) -295 -296 # move back to u,v points -297 dst_u = 0.5 * (dst_u[:,:,:-1] + dst_u[:,:,1:]) -298 dst_v = 0.5 * (dst_v[:,:-1,:] + dst_v[:,1:,:]) -299 -300 # write data in destination file -301 print 'write data in destination file' -302 nc.variables['u'][nctidx] = dst_u -303 nc.variables['v'][nctidx] = dst_v -304 -305 nctidx = nctidx + 1 -306 -307 # close destination file -308 nc.close() -309 -310 return -
311 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.rfactor'-module.html b/pyroms_toolbox/docs/pyroms_toolbox.rfactor'-module.html deleted file mode 100644 index effb1c3..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.rfactor'-module.html +++ /dev/null @@ -1,192 +0,0 @@ - - - - - pyroms_toolbox.rfactor' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Module rfactor' - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module rfactor'

source code

- - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
rfactor(h, - rmask)
- function r = rfactor(h,rmask)
- source code - -
- -
- - - - - - -
- - - - - -
Function Details[hide private]
-
- -
- -
- - -
-

rfactor(h, - rmask) -

-
source code  -
- -
-
-function r = rfactor(h,rmask)
-
-This function computes the bathymetry slope from a SCRUM NetCDF file.
-
-On Input:
-   h           bathymetry at RHO-points.
-   rmask       Land/Sea masking at RHO-points.
-
-On Output:
-   r           R-factor.
-
-
-
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.rfactor'-pysrc.html b/pyroms_toolbox/docs/pyroms_toolbox.rfactor'-pysrc.html deleted file mode 100644 index 332d44d..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.rfactor'-pysrc.html +++ /dev/null @@ -1,170 +0,0 @@ - - - - - pyroms_toolbox.rfactor' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Module rfactor' - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms_toolbox.rfactor'

-
-
1 -def rfactor(h,rmask): -
2 """ - 3 function r = rfactor(h,rmask) - 4 - 5 This function computes the bathymetry slope from a SCRUM NetCDF file. - 6 - 7 On Input: - 8 h bathymetry at RHO-points. - 9 rmask Land/Sea masking at RHO-points. -10 -11 On Output: -12 r R-factor. -13 """ -14 -15 Lp, Mp = h.shape -16 L=Lp-1 -17 M=Mp-1 -18 -19 # Land/Sea mask on U-points. -20 umask = np.zeros((L,Mp)) -21 for j in range(Mp): -22 for i in range(1,Lp): -23 umask[i-1,j] = rmask[i,j] * rmask[i-1,j] -24 -25 # Land/Sea mask on V-points. -26 vmask = np.zeros((Lp,M)) -27 for j in range(1,Mp): -28 for i in range(Lp): -29 vmask[i,j-1] = rmask[i,j] * rmask[i,j-1] -30 -31 #------------------------------------------------------------------- -32 # Compute R-factor. -33 #------------------------------------------------------------------- -34 -35 hx = np.zeros((L,Mp)) -36 hy = np.zeros((Lp,M)) -37 -38 hx = abs(h[1:,:] - h[:-1,:]) / (h[1:,:] + h[:-1,:]) -39 hy = abs(h[:,1:] - h[:,:-1]) / (h[:,1:] + h[:,:-1]) -40 -41 hx = hx * umask -42 hy = hy * vmask -43 -44 r = np.zeros((L,M)) -45 -46 r = np.maximum(np.maximum(hx[:,:-1],hx[:,1:]), np.maximum(hy[:-1,:],hy[1:,:])) -47 -48 rmin = r.min() -49 rmax = r.max() -50 ravg = r.mean() -51 rmed = np.median(r) -52 -53 print ' ' -54 print 'Minimum r-value = ', rmin -55 print 'Maximum r-value = ', rmax -56 print 'Mean r-value = ', ravg -57 print 'Median r-value = ', rmed -58 -59 return r -
60 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.rvalue'-module.html b/pyroms_toolbox/docs/pyroms_toolbox.rvalue'-module.html deleted file mode 100644 index 803db28..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.rvalue'-module.html +++ /dev/null @@ -1,190 +0,0 @@ - - - - - pyroms_toolbox.rvalue' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Module rvalue' - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module rvalue'

source code

- - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
rvalue(h)
- function rv = rvalue(h)
- source code - -
- -
- - - - - - -
- - - - - -
Function Details[hide private]
-
- -
- -
- - -
-

rvalue(h) -

-
source code  -
- -
-
-function rv = rvalue(h)
-
-This function compute ROMS stiffness parameter 
-(ripped from John Wilkin rvalue.m)
-
-On Input:
-   h           bathymetry at RHO-points.
-
-On Output:
-   rv          ROMS stiffness parameter.
-
-
-
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.rvalue'-pysrc.html b/pyroms_toolbox/docs/pyroms_toolbox.rvalue'-pysrc.html deleted file mode 100644 index 6a9aff5..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.rvalue'-pysrc.html +++ /dev/null @@ -1,151 +0,0 @@ - - - - - pyroms_toolbox.rvalue' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Module rvalue' - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms_toolbox.rvalue'

-
-
1 -def rvalue(h): -
2 """ - 3 function rv = rvalue(h) - 4 - 5 This function compute ROMS stiffness parameter - 6 (ripped from John Wilkin rvalue.m) - 7 - 8 On Input: - 9 h bathymetry at RHO-points. -10 -11 On Output: -12 rv ROMS stiffness parameter. -13 """ -14 #check that h is 2D -15 if (len(h.squeeze().shape)!=2): -16 raise ValueError, 'h must be two dimensions' -17 -18 #check whether h contains any NaNs -19 if np.isnan(h).any(): raise Warning, 'the height array contains NaNs' -20 -21 #compute diff(h)/2*mean(h) at each velocity grid point -22 dhdx_u = np.diff(h, axis=1) -23 dhdy_v = np.diff(h, axis=0) -24 th_u = 2 * 0.5*(h[:,1:] + h[:,:-1]) -25 th_v = 2 * 0.5*(h[1:,:] + h[:-1,:]) -26 r_u = abs(dhdx_u / th_u) -27 r_v = abs(dhdy_v / th_v) -28 -29 #for each rho point, find the maximum rvalue at the 4 -30 #surrounding u,v points -31 r_u = np.maximum(r_u[:,1:],r_u[:,:-1]) -32 r_v = np.maximum(r_v[1:,:],r_v[:-1,:]) -33 -34 # pad rows and columns to give a result the same size as the input h -35 r_u = np.c_[r_u[:,0], r_u, r_u[:,-1]] -36 r_v = np.r_[np.tile(r_v[0,:], (1, 1)), r_v, np.tile(r_v[-1,:], (1, 1))] -37 -38 rv = np.maximum(r_u,r_v) -39 -40 return rv -
41 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.seawater-module.html b/pyroms_toolbox/docs/pyroms_toolbox.seawater-module.html deleted file mode 100644 index 88cdfb5..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.seawater-module.html +++ /dev/null @@ -1,211 +0,0 @@ - - - - - pyroms_toolbox.seawater - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Package seawater - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Package seawater

source code

-
-
-Seawater -- Python functions for properties of sea water
-   Bjørn Ådlandsvik <bjorn@imr.no>,
-   Institute of Marine Research,
-   Version 1.1, 13 November 2002
-
-Public functions:
-Density related
-  dens(S,T,P)           Density of sea water              kg/m**3
-  delta(S,T,P)          Specific volume anomaly           m**3/kg
-  sigma(S,T,P)          Density anomaly                   kg/m**3
-  drhodt(S,T,P)         Temperature derivative of density kg/(K*m**3)
-  alpha(S,T,P)          Thermal expansion coefficient     1/K 
-  drhods(S,T,P)         Salinity derivative of density    kg/m**3
-  beta(S,T,P)           Salinity expansion coefficient
-  
-Salinity related
-  salt(R,T,P)           Salinity
-  cond(S,T,P)           Conductivity ratio
-
-Heat related
-  heatcap(S,T,P)        Heat capacity                     J/(kg*K)
-  adtgrad(S,T,P)        Adiabatic lapse rate              K/dbar
-  temppot(S,T,P,Pref)   Potential temperature             °C
-  temppot0(S,T,P)       Potential temperature             °C
-  
-Miscellaneous
-  freezept(S,P)         Freezing point                    °C
-  soundvel(S,T,P)       Sound velocity                    m/s
-  depth(P,lat)          Depth                             m
-
-Arguments:
-  S     = Salinity                 
-  T     = Temperature               °C
-  P     = Pressure                  dbar 
-  R     = Conductivity ratio
-  Pref  = Reference pressure        dbar
-  lat   = Latitude                  deg
-
-References:
-  [Bryden 1973], New polynomials for thermal expansion, adiabatic
-  temperature gradient and potential temperature gradient of sea water
-  Deep-Sea Res. 20, 401-408
-
-  [UNESCO 1981], Tenth report of the joint panel on oceanographic
-  tables and standards, Unesco technical papers in marine science 36.
-
-  [UNESCO 1983], N.P. Fofonoff and R.C. Millard Jr., Algorithms for
-  computation of fundamental properties of seawater, Unesco technical
-  papers in marine science 44.
-
-
- - - - - - - - -
- - - - - -
Submodules[hide private]
-
-
- -
- - - - - - - - - -
- - - - - -
Classes[hide private]
-
-   - - OutOfRangeError -
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.seawater-pysrc.html b/pyroms_toolbox/docs/pyroms_toolbox.seawater-pysrc.html deleted file mode 100644 index 1914009..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.seawater-pysrc.html +++ /dev/null @@ -1,173 +0,0 @@ - - - - - pyroms_toolbox.seawater - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Package seawater - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Package pyroms_toolbox.seawater

-
- 1  # --- encoding: iso-8859-1 --- 
- 2   
- 3  """ 
- 4   Seawater -- Python functions for properties of sea water 
- 5      Bjørn Ådlandsvik <bjorn@imr.no>, 
- 6      Institute of Marine Research, 
- 7      Version 1.1, 13 November 2002 
- 8   
- 9   Public functions: 
-10   Density related 
-11     dens(S,T,P)           Density of sea water              kg/m**3 
-12     delta(S,T,P)          Specific volume anomaly           m**3/kg 
-13     sigma(S,T,P)          Density anomaly                   kg/m**3 
-14     drhodt(S,T,P)         Temperature derivative of density kg/(K*m**3) 
-15     alpha(S,T,P)          Thermal expansion coefficient     1/K  
-16     drhods(S,T,P)         Salinity derivative of density    kg/m**3 
-17     beta(S,T,P)           Salinity expansion coefficient 
-18      
-19   Salinity related 
-20     salt(R,T,P)           Salinity 
-21     cond(S,T,P)           Conductivity ratio 
-22   
-23   Heat related 
-24     heatcap(S,T,P)        Heat capacity                     J/(kg*K) 
-25     adtgrad(S,T,P)        Adiabatic lapse rate              K/dbar 
-26     temppot(S,T,P,Pref)   Potential temperature             °C 
-27     temppot0(S,T,P)       Potential temperature             °C 
-28      
-29   Miscellaneous 
-30     freezept(S,P)         Freezing point                    °C 
-31     soundvel(S,T,P)       Sound velocity                    m/s 
-32     depth(P,lat)          Depth                             m 
-33   
-34   Arguments: 
-35     S     = Salinity                  
-36     T     = Temperature               °C 
-37     P     = Pressure                  dbar  
-38     R     = Conductivity ratio 
-39     Pref  = Reference pressure        dbar 
-40     lat   = Latitude                  deg 
-41   
-42   References: 
-43     [Bryden 1973], New polynomials for thermal expansion, adiabatic 
-44     temperature gradient and potential temperature gradient of sea water 
-45     Deep-Sea Res. 20, 401-408 
-46   
-47     [UNESCO 1981], Tenth report of the joint panel on oceanographic 
-48     tables and standards, Unesco technical papers in marine science 36. 
-49   
-50     [UNESCO 1983], N.P. Fofonoff and R.C. Millard Jr., Algorithms for 
-51     computation of fundamental properties of seawater, Unesco technical 
-52     papers in marine science 44. 
-53   
-54  """ 
-55   
-56  # --- Exceptions --- 
-
57 -class OutOfRangeError(Exception): pass -
58 -59 from density import dens, svan, sigma, drhodt, alpha, drhods, beta -60 from salinity import salt, cond -61 from heat import heatcap, adtgrad, temppot, temppot0 -62 from misc import freezept, soundvel, depth -63 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.seawater.OutOfRangeError-class.html b/pyroms_toolbox/docs/pyroms_toolbox.seawater.OutOfRangeError-class.html deleted file mode 100644 index 81944e6..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.seawater.OutOfRangeError-class.html +++ /dev/null @@ -1,186 +0,0 @@ - - - - - pyroms_toolbox.seawater.OutOfRangeError - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Package seawater :: - Class OutOfRangeError - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Class OutOfRangeError

source code

-
-              object --+        
-                       |        
-exceptions.BaseException --+    
-                           |    
-        exceptions.Exception --+
-                               |
-                              OutOfRangeError
-
- -
- - - - - - - - - -
- - - - - -
Instance Methods[hide private]
-
-

Inherited from exceptions.Exception: - __init__, - __new__ -

-

Inherited from exceptions.BaseException: - __delattr__, - __getattribute__, - __getitem__, - __getslice__, - __reduce__, - __repr__, - __setattr__, - __setstate__, - __str__ -

-

Inherited from object: - __hash__, - __reduce_ex__ -

-
- - - - - - - - - -
- - - - - -
Properties[hide private]
-
-

Inherited from exceptions.BaseException: - args, - message -

-

Inherited from object: - __class__ -

-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.seawater.density-module.html b/pyroms_toolbox/docs/pyroms_toolbox.seawater.density-module.html deleted file mode 100644 index 1d12980..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.seawater.density-module.html +++ /dev/null @@ -1,567 +0,0 @@ - - - - - pyroms_toolbox.seawater.density - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Package seawater :: - Module density - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module density

source code

-

Seawater density module

-

dens(S, T[, P]) -- Density svan(S, T[, P]) -- Specific volume - anomaly sigma(S, T[, P]) -- Density anomaly drhodt(S, T[, P]) -- - Temperature derivative of density alpha(S, T[, P]) -- Thermal expansion - coefficient drhods(S, T[, P]) -- Salinity derivative of density beta(S, - T[, P]) -- Saline expansion coefficient

-

Bjørn Ådlandsvik <bjorn@imr.no>, 07 November 2004

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
_dens0(S, - T)
- Density of seawater at zero pressure
- source code - -
- -
-   - - - - - - -
_seck(S, - T, - P=0)
- Secant bulk modulus
- source code - -
- -
-   - - - - - - -
dens(S, - T, - P=0)
- Compute density of seawater from salinity, temperature, and pressure
- source code - -
- -
-   - - - - - - -
svan(S, - T, - P=0)
- Compute specific volume anomaly
- source code - -
- -
-   - - - - - - -
sigma(S, - T, - P=0)
- Compute density anomaly, sigma-T
- source code - -
- -
-   - - - - - - -
drhodt(S, - T, - P=0)
- Compute temperature derivative of density
- source code - -
- -
-   - - - - - - -
alpha(S, - T, - P=0)
- Compute thermal expansion coefficient
- source code - -
- -
-   - - - - - - -
drhods(S, - T, - P=0)
- Compute salinity derivative of density
- source code - -
- -
-   - - - - - - -
beta(S, - T, - P=0)
- Compute saline expansion coefficient
- source code - -
- -
- - - - - - -
- - - - - -
Function Details[hide private]
-
- -
- -
- - -
-

dens(S, - T, - P=0) -

-
source code  -
- -
-Compute density of seawater from salinity, temperature, and pressure
-
-Usage: dens(S, T, [P])
-
-Input:               
-    S = Salinity,     [PSS-78]
-    T = Temperature,  [°C]
-    P = Pressure,     [dbar = 10**4 Pa]
-P is optional, with default value zero
-
-Output:
-    Density,          [kg/m**3]
-
-Algorithm: UNESCO 1983
-
-
-
-
-
-
- -
- -
- - -
-

svan(S, - T, - P=0) -

-
source code  -
- -
-Compute specific volume anomaly
-
-Usage: svan(S, T, [P])
-
-Input:
-    S = Salinity,     [PSS-78]
-    T = Temperature,  [°C]
-    P = Pressure,     [dbar]
-P is optional, with a default value = zero
-
-Output:
-    Specific volume anomaly  [m**3/kg]
-
-
-
-
-
-
- -
- -
- - -
-

sigma(S, - T, - P=0) -

-
source code  -
- -
-Compute density anomaly, sigma-T
-
-Usage: sigma(S, T, [P])
-
-Input:
-    S = Salinity,     [PSS-78]
-    T = Temperature,  [°C]
-    P = Pressure,     [dbar]
-P is optional, with a default value = zero
-
-Output:
-   Density anomaly,  [kg/m**3]
-
-
-
-
-
-
- -
- -
- - -
-

drhodt(S, - T, - P=0) -

-
source code  -
- -
-Compute temperature derivative of density
-
-Usage: drhodt(S, T, [P])
-
-Input:
-    S = Salinity,     [PSS-78]
-    T = Temperature,  [°C]
-    P = Pressure,     [dbar]
-P is optional, with a default value = zero
-
-Output:
-    Temperature derivative of density  [kg /(K m**3)]
-
-
-
-
-
-
- -
- -
- - -
-

alpha(S, - T, - P=0) -

-
source code  -
- -
-Compute thermal expansion coefficient
-
-Usage: alpha(S, T, [P])
-
-Input:
-    S = Salinity,     [PSS-78]
-    T = Temperature,  [°C]
-    P = Pressure,     [dbar]
-P is optional, with a default value = zero
-
-Output:
-    Thermal expansion coefficient,  [1/K]
-
-
-
-
-
-
- -
- -
- - -
-

drhods(S, - T, - P=0) -

-
source code  -
- -
-Compute salinity derivative of density
-
-Usage: drhodt(S, T, [P])
-
-Input:
-    S = Salinity,     [PSS-78]
-    T = Temperature,  [°C]
-    P = Pressure,     [dbar]
-P is optional, with a default value = zero
-
-Output:
-    Salinity derivative of density [kg/m**3]
-
-
-
-
-
-
- -
- -
- - -
-

beta(S, - T, - P=0) -

-
source code  -
- -
-Compute saline expansion coefficient
-
-Usage: alpha(S, T, [P])
-
-Input:
-    S = Salinity,     [PSS-78]
-    T = Temperature,  [°C]
-    P = Pressure,     [dbar]
-P is optional, with a default value = zero
-
-Output:
-    Saline expansion coefficient
-
-
-
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.seawater.density-pysrc.html b/pyroms_toolbox/docs/pyroms_toolbox.seawater.density-pysrc.html deleted file mode 100644 index 49e0867..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.seawater.density-pysrc.html +++ /dev/null @@ -1,551 +0,0 @@ - - - - - pyroms_toolbox.seawater.density - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Package seawater :: - Module density - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms_toolbox.seawater.density

-
-  1  # --- encoding: iso-8859-1 --- 
-  2   
-  3  """Seawater density module 
-  4   
-  5  dens(S, T[, P])   -- Density 
-  6  svan(S, T[, P])   -- Specific volume anomaly 
-  7  sigma(S, T[, P])  -- Density anomaly 
-  8  drhodt(S, T[, P]) -- Temperature derivative of density 
-  9  alpha(S, T[, P])  -- Thermal expansion coefficient 
- 10  drhods(S, T[, P]) -- Salinity derivative of density 
- 11  beta(S, T[, P])   -- Saline expansion coefficient 
- 12   
- 13  Bjørn Ådlandsvik <bjorn@imr.no>, 07 November 2004 
- 14   
- 15  """ 
- 16   
- 17  # ----------------------------------------------- 
- 18   
-
19 -def _dens0(S,T): -
20 """Density of seawater at zero pressure""" - 21 - 22 # --- Define constants --- - 23 a0 = 999.842594 - 24 a1 = 6.793952e-2 - 25 a2 = -9.095290e-3 - 26 a3 = 1.001685e-4 - 27 a4 = -1.120083e-6 - 28 a5 = 6.536332e-9 - 29 - 30 b0 = 8.24493e-1 - 31 b1 = -4.0899e-3 - 32 b2 = 7.6438e-5 - 33 b3 = -8.2467e-7 - 34 b4 = 5.3875e-9 - 35 - 36 c0 = -5.72466e-3 - 37 c1 = 1.0227e-4 - 38 c2 = -1.6546e-6 - 39 - 40 d0 = 4.8314e-4 - 41 - 42 # --- Computations --- - 43 # Density of pure water - 44 SMOW = a0 + (a1 + (a2 + (a3 + (a4 + a5*T)*T)*T)*T)*T - 45 - 46 # More temperature polynomials - 47 RB = b0 + (b1 + (b2 + (b3 + b4*T)*T)*T)*T - 48 RC = c0 + (c1 + c2*T)*T - 49 - 50 return SMOW + RB*S + RC*(S**1.5) + d0*S*S -
51 - 52 # ----------------------------------------------------------------- - 53 -
54 -def _seck(S, T, P=0): -
55 """Secant bulk modulus""" - 56 - 57 # --- Pure water terms --- - 58 - 59 h0 = 3.239908 - 60 h1 = 1.43713E-3 - 61 h2 = 1.16092E-4 - 62 h3 = -5.77905E-7 - 63 AW = h0 + (h1 + (h2 + h3*T)*T)*T - 64 - 65 k0 = 8.50935E-5 - 66 k1 = -6.12293E-6 - 67 k2 = 5.2787E-8 - 68 BW = k0 + (k1 + k2*T)*T - 69 - 70 e0 = 19652.21 - 71 e1 = 148.4206 - 72 e2 = -2.327105 - 73 e3 = 1.360477E-2 - 74 e4 = -5.155288E-5 - 75 KW = e0 + (e1 + (e2 + (e3 + e4*T)*T)*T)*T - 76 - 77 # --- seawater, P = 0 --- - 78 - 79 SR = S**0.5 - 80 - 81 i0 = 2.2838E-3 - 82 i1 = -1.0981E-5 - 83 i2 = -1.6078E-6 - 84 j0 = 1.91075E-4 - 85 A = AW + (i0 + (i1 + i2*T)*T + j0*SR)*S - 86 - 87 f0 = 54.6746 - 88 f1 = -0.603459 - 89 f2 = 1.09987E-2 - 90 f3 = -6.1670E-5 - 91 g0 = 7.944E-2 - 92 g1 = 1.6483E-2 - 93 g2 = -5.3009E-4 - 94 K0 = KW + (f0 + (f1 + (f2 + f3*T)*T)*T \ - 95 + (g0 + (g1 + g2*T)*T)*SR)*S - 96 - 97 # --- General expression --- - 98 - 99 m0 = -9.9348E-7 -100 m1 = 2.0816E-8 -101 m2 = 9.1697E-10 -102 B = BW + (m0 + (m1 + m2*T)*T)*S -103 -104 K = K0 + (A + B*P)*P -105 -106 return K -
107 -108 # ---------------------------------------------- -109 -
110 -def dens(S, T, P=0): -
111 """Compute density of seawater from salinity, temperature, and pressure -112 -113 Usage: dens(S, T, [P]) -114 -115 Input: -116 S = Salinity, [PSS-78] -117 T = Temperature, [°C] -118 P = Pressure, [dbar = 10**4 Pa] -119 P is optional, with default value zero -120 -121 Output: -122 Density, [kg/m**3] -123 -124 Algorithm: UNESCO 1983 -125 -126 """ -127 -128 P = 0.1*P # Convert to bar -129 return _dens0(S,T)/(1 - P/_seck(S,T,P)) -
130 -131 # ------------------------------------------- -132 -
133 -def svan(S,T,P=0): -
134 """Compute specific volume anomaly -135 -136 Usage: svan(S, T, [P]) -137 -138 Input: -139 S = Salinity, [PSS-78] -140 T = Temperature, [°C] -141 P = Pressure, [dbar] -142 P is optional, with a default value = zero -143 -144 Output: -145 Specific volume anomaly [m**3/kg] -146 -147 """ -148 return 1.0/dens(S,T,P) - 1.0/dens(35,0,P) -
149 -150 # ----------------------------------------------- -151 -
152 -def sigma(S,T,P=0): -
153 """Compute density anomaly, sigma-T -154 -155 Usage: sigma(S, T, [P]) -156 -157 Input: -158 S = Salinity, [PSS-78] -159 T = Temperature, [°C] -160 P = Pressure, [dbar] -161 P is optional, with a default value = zero -162 -163 Output: -164 Density anomaly, [kg/m**3] -165 -166 """ -167 return dens(S,T,P) - 1000.0 -
168 -169 # ---------------------------------------------- -170 -
171 -def drhodt(S,T,P=0): -
172 """Compute temperature derivative of density -173 -174 Usage: drhodt(S, T, [P]) -175 -176 Input: -177 S = Salinity, [PSS-78] -178 T = Temperature, [°C] -179 P = Pressure, [dbar] -180 P is optional, with a default value = zero -181 -182 Output: -183 Temperature derivative of density [kg /(K m**3)] -184 -185 """ -186 -187 a1 = 6.793952e-2 -188 a2 = -1.819058e-2 -189 a3 = 3.005055e-4 -190 a4 = -4.480332e-6 -191 a5 = 3.268166e-8 -192 -193 b1 = -4.0899e-3 -194 b2 = 1.52876e-4 -195 b3 = -2.47401e-6 -196 b4 = 2.155e-8 -197 -198 c1 = 1.0227e-4 -199 c2 = -3.3092e-6 -200 -201 e1 = 148.4206 -202 e2 = -4.65421 -203 e3 = 4.081431e-2 -204 e4 = -2.0621152e-4 -205 -206 f1 = -0.603459 -207 f2 = 2.19974e-2 -208 f3 = -1.8501e-4 -209 -210 g1 = 1.6483e-2 -211 g2 = -1.06018e-3 -212 -213 h1 = 1.43713e-3 -214 h2 = 2.32184e-4 -215 h3 = -1.733715e-6 -216 -217 i1 = -1.0981e-5 -218 i2 = -3.2156e-6 -219 -220 k1 = -6.12293e-6 -221 k2 = 1.05574e-7 -222 -223 m1 = 2.0816e-8 -224 m2 = 1.83394e-9 -225 -226 P = P/10.0 -227 -228 DSMOV = a1 + (a2 + (a3 + (a4 + a5*T)*T)*T)*T -229 DRHO0 = DSMOV + (b1 + (b2 + (b3 + b4*T)*T)*T)*S + (c1 + c2*T)*S**1.5 -230 -231 DAW = h1 + (h2 + h3*T) -232 DA = DAW + (i1 + i2*T)*S -233 -234 -235 DBW = k1 + k2*T -236 DB = DBW + (m1 + m2*T)*S -237 -238 DKW = e1 + (e2 + (e3 + e4*T)*T)*T -239 DK0 = DKW + (f1 + (f2 + f3*T)*T)*S + (g1 + g2*T)*S**1.5 -240 DK = DK0 + (DA + DB*P)*P -241 -242 K = _seck(S,T,P) -243 RHO0 = _dens0(S,T) -244 denom = 1. - P/K -245 return (DRHO0 * denom - RHO0 * P * DK / (K*K)) / (denom*denom) -
246 -247 # ----------------------------------------------- -248 -
249 -def alpha(S,T,P=0): -
250 """Compute thermal expansion coefficient -251 -252 Usage: alpha(S, T, [P]) -253 -254 Input: -255 S = Salinity, [PSS-78] -256 T = Temperature, [°C] -257 P = Pressure, [dbar] -258 P is optional, with a default value = zero -259 -260 Output: -261 Thermal expansion coefficient, [1/K] -262 -263 """ -264 -265 ALPHA = - drhodt(S,T,P) / dens(S,T,P) -266 return ALPHA -
267 -268 # ------------------------------------------------ -269 -
270 -def drhods(S,T,P=0): -
271 """Compute salinity derivative of density -272 -273 Usage: drhodt(S, T, [P]) -274 -275 Input: -276 S = Salinity, [PSS-78] -277 T = Temperature, [°C] -278 P = Pressure, [dbar] -279 P is optional, with a default value = zero -280 -281 Output: -282 Salinity derivative of density [kg/m**3] -283 -284 """ -285 -286 b0 = 8.24493e-1 -287 b1 = -4.0899e-3 -288 b2 = 7.6438e-5 -289 b3 = -8.2467e-7 -290 b4 = 5.3875e-9 -291 -292 c0 = -5.72466e-3 -293 c1 = 1.0227e-4 -294 c2 = -1.6546e-6 -295 -296 d0 = 9.6628e-4 -297 -298 f0 = 54.6746 -299 f1 = -0.603459 -300 f2 = 1.09987e-2 -301 f3 = -6.1670e-5 -302 -303 g0 = 7.944e-2 -304 g1 = 1.6483e-2 -305 g2 = -5.3009e-4 -306 -307 i0 = 2.2838e-3 -308 i1 = -1.0981e-5 -309 i2 = -1.6078e-6 -310 -311 j0 = 2.866125e-4 -312 -313 m0 = -9.9348e-7 -314 m1 = 2.0816e-8 -315 m2 = 9.1697e-10 -316 -317 P = 0.1*P # Convert to bar -318 -319 DRHO0 = b0 + T*(b1 + T*(b2 + T*(b3 + T*b4))) + \ -320 1.5*S**0.5*(c0 + T*(c1 + T*c2)) + S*d0 -321 DK0 = f0 + T*(f1 + T*(f2 + T*f3)) + \ -322 1.5*S**0.5*(g0 + T*(g1 + T*g2)) -323 DA = i0 + T*(i1 + T*i2) + j0*S**0.5 -324 DB = m0 + T*(m1 + T*m2) -325 DK = DK0 + P*(DA + P*DB) -326 RHO0 = _dens0(S,T) -327 K = _seck(S,T,P) -328 denom = 1. - P/K -329 DRHO = (DRHO0 * denom - RHO0 * P * DK / (K*K)) / (denom*denom) -330 return DRHO -
331 -332 # ------------------------------------------------------ -333 -
334 -def beta(S,T,P=0): -
335 """Compute saline expansion coefficient -336 -337 Usage: alpha(S, T, [P]) -338 -339 Input: -340 S = Salinity, [PSS-78] -341 T = Temperature, [°C] -342 P = Pressure, [dbar] -343 P is optional, with a default value = zero -344 -345 Output: -346 Saline expansion coefficient -347 -348 """ -349 -350 BETA = drhods(S,T,P) / dens(S,T,P) -351 return BETA -
352 -353 -354 ### SALINITY FUNCTIONS ################################# -355 -356 ## def _sal(XR,XT): -357 -358 ## a0 = 0.0080 -359 ## a1 = -0.1692 -360 ## a2 = 25.3851 -361 ## a3 = 14.0941 -362 ## a4 = -7.0261 -363 ## a5 = 2.7081 -364 -365 ## b0 = 0.0005 -366 ## b1 = -0.0056 -367 ## b2 = -0.0066 -368 ## b3 = -0.0375 -369 ## b4 = 0.0636 -370 ## b5 = -0.0144 -371 -372 ## k = 0.0162 -373 -374 ## DS = (XT / (1+k*XT) ) * \ -375 ## (b0 + (b1 + (b2 + (b3 + (b4 + b5*XR)*XR)*XR)*XR)*XR) -376 -377 ## return a0 + (a1 + (a2 + (a3 + (a4 + a5*XR)*XR)*XR)*XR)*XR + DS -378 -379 ## # --------------------------------------------------- -380 -381 ## def _dsal(XR,XT): -382 -383 ## a1 = -0.1692 -384 ## a2 = 25.3851 -385 ## a3 = 14.0941 -386 ## a4 = -7.0261 -387 ## a5 = 2.7081 -388 -389 ## b1 = -0.0056 -390 ## b2 = -0.0066 -391 ## b3 = -0.0375 -392 ## b4 = 0.0636 -393 ## b5 = -0.0144 -394 -395 ## k = 0.0162 -396 -397 ## dDS = (XT / (1+k*XT) ) * \ -398 ## (b1 + (b2*2 + (b3*3 + (b4*4 + b5*5*XR)*XR)*XR)*XR) -399 -400 ## return a1 + (a2*2 + (a3*3 + (a4*4 + a5*5*XR)*XR)*XR)*XR + dDS -401 -402 ## # --------------------------------------------- -403 -404 ## def _rt(T): -405 -406 ## c0 = 0.6766097 -407 ## c1 = 2.00564e-2 -408 ## c2 = 1.104259e-4 -409 ## c3 = -6.9698e-7 -410 ## c4 = 1.0031e-9 -411 -412 ## return c0 + (c1 + (c2 + (c3 + c4*T)*T)*T)*T -413 -414 ## # --------------------------------------------------- -415 -416 ## def _c(P): -417 -418 ## e1 = 2.070e-5 -419 ## e2 = -6.370e-10 -420 ## e3 = 3.989e-15 -421 -422 ## return (e1 + (e2 + e3*P)*P)*P -423 -424 ## # --------------------------------------------------- -425 -426 ## def _b(T): -427 -428 ## d1 = 3.426e-2 -429 ## d2 = 4.464e-4 -430 -431 ## return 1.0 + (d1 + d2*T)*T -432 -433 ## # --------------------------------------------------- -434 -435 ## def _a(T): -436 -437 ## d3 = 4.215e-1 -438 ## d4 = -3.107e-3 -439 ## return d3 + d4*T -440 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.seawater.heat-module.html b/pyroms_toolbox/docs/pyroms_toolbox.seawater.heat-module.html deleted file mode 100644 index 3e7cb79..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.seawater.heat-module.html +++ /dev/null @@ -1,378 +0,0 @@ - - - - - pyroms_toolbox.seawater.heat - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Package seawater :: - Module heat - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module heat

source code

-

Seawater heat module

-

heatcap(S, T[, P]) -- Heat capacity adtgrad(S, T[, P]) -- - Adiabatic temperature gradiente temppot(S, T, P[, Pref]) -- Potential - temperature temppot0(S, T, P) -- Potential temperature, relative - to surface

-

Bjørn Ådlandsvik, <bjorn@imr.no>, 07 November 2004

- - - - - - - - - - - - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
heatcap(S, - T, - P=0)
- Compute heat capacity
- source code - -
- -
-   - - - - - - -
adtgrad(S, - T, - P=0)
- Compute adiabatic temperature gradient
- source code - -
- -
-   - - - - - - -
temppot(S, - T, - P, - Pref=0)
- Compute potential temperature
- source code - -
- -
-   - - - - - - -
temppot0(S, - T, - P)
- Compute potential temperature relative to surface
- source code - -
- -
- - - - - - -
- - - - - -
Function Details[hide private]
-
- -
- -
- - -
-

heatcap(S, - T, - P=0) -

-
source code  -
- -
-Compute heat capacity
-
-Usage: heatcap(S, T, [P])
-
-Input:
-    S = Salinity,     [PSS-78]
-    T = Temperature,  [°C]
-    P = Pressure,     [dbar]
-P is optional, with a default value = zero
-
-Output:
-    Heat capacity  [J/(kg*K)]
-
-Algorithm: UNESCO 1983
-
-
-
-
-
-
- -
- -
- - -
-

adtgrad(S, - T, - P=0) -

-
source code  -
- -
-Compute adiabatic temperature gradient
-
-Usage: adtgrad(S, T, [P])
-
-Input:
-    S = Salinity,     [PSS-78]
-    T = Temperature,  [°C]
-    P = Pressure,     [dbar]
-P is optional, with a default value = zero
-
-Output:
-    Adiabatic temperature gradient,  [K/dbar]
-
-Algorithm: UNESCO 1983
-
-
-
-
-
-
- -
- -
- - -
-

temppot(S, - T, - P, - Pref=0) -

-
source code  -
- -
-Compute potential temperature
-
-Usage: temppot(S, T, P, [Pref])
-
-Input:
-    S = Salinity,                [PSS-78]
-    T = Temperature,             [°C]
-    P = Pressure,                [dbar]
-    Pref = Reference pressure,   [dbar]
-Pref is optional, with a default value = zero
-
-Output:
-    Potential temperature,  [°C]
-
-Algorithm: UNESCO 1983
-
-
-
-
-
-
- -
- -
- - -
-

temppot0(S, - T, - P) -

-
source code  -
- -
-Compute potential temperature relative to surface
-
-Usage: temppot0(S, T, P)
-
-Input:
-    S = Salinity,                [PSS-78]
-    T = Temperature,             [°C]
-    P = Pressure,                [dbar]
-
-Output:
-    Potential temperature,       [°C]
-
-Algorithm: Bryden 1973
-
-Note: Due to different algorithms,
-    temppot0(S, T, P) != tempot(S, T, P, Pref=0)
-
-
-
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.seawater.heat-pysrc.html b/pyroms_toolbox/docs/pyroms_toolbox.seawater.heat-pysrc.html deleted file mode 100644 index 44cd8fe..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.seawater.heat-pysrc.html +++ /dev/null @@ -1,357 +0,0 @@ - - - - - pyroms_toolbox.seawater.heat - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Package seawater :: - Module heat - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms_toolbox.seawater.heat

-
-  1  # --- encoding: iso-8859-1 --- 
-  2   
-  3  """Seawater heat module 
-  4   
-  5  heatcap(S, T[, P])       -- Heat capacity 
-  6  adtgrad(S, T[, P])       -- Adiabatic temperature gradiente 
-  7  temppot(S, T, P[, Pref]) -- Potential temperature 
-  8  temppot0(S, T, P)        -- Potential temperature, relative to surface 
-  9   
- 10  Bjørn Ådlandsvik, <bjorn@imr.no>, 07 November 2004 
- 11   
- 12  """ 
- 13   
- 14  # ------------------------------------------------- 
- 15   
-
16 -def heatcap(S, T, P=0): -
17 """Compute heat capacity - 18 - 19 Usage: heatcap(S, T, [P]) - 20 - 21 Input: - 22 S = Salinity, [PSS-78] - 23 T = Temperature, [°C] - 24 P = Pressure, [dbar] - 25 P is optional, with a default value = zero - 26 - 27 Output: - 28 Heat capacity [J/(kg*K)] - 29 - 30 Algorithm: UNESCO 1983 - 31 - 32 """ - 33 - 34 P = 0.1*P # Conversion to bar - 35 - 36 # - Temperatur dependence - 37 c0 = 4217.4 - 38 c1 = -3.720283 - 39 c2 = 0.1412855 - 40 c3 = -2.654387e-3 - 41 c4 = 2.093236e-5 - 42 - 43 a0 = -7.64357 - 44 a1 = 0.1072763 - 45 a2 = -1.38385e-3 - 46 - 47 b0 = 0.1770383 - 48 b1 = -4.07718e-3 - 49 b2 = 5.148e-5 - 50 - 51 CP0 = c0 + c1*T + c2*T**2 + c3*T**3 + c4*T**4 \ - 52 + (a0 + a1*T + a2*T**2)*S \ - 53 + (b0 + b1*T + b2*T**2)*S**1.5 - 54 - 55 # - Pressure dependence - 56 a0 = -4.9592e-1 - 57 a1 = 1.45747e-2 - 58 a2 = -3.13885e-4 - 59 a3 = 2.0357e-6 - 60 a4 = 1.7168e-8 - 61 - 62 b0 = 2.4931e-4 - 63 b1 = -1.08645e-5 - 64 b2 = 2.87533e-7 - 65 b3 = -4.0027e-9 - 66 b4 = 2.2956e-11 - 67 - 68 c0 = -5.422e-8 - 69 c1 = 2.6380e-9 - 70 c2 = -6.5637e-11 - 71 c3 = 6.136e-13 - 72 - 73 CP1 = (a0 + a1*T + a2*T**2 + a3*T**3 + a4*T**4)*P \ - 74 + (b0 + b1*T + b2*T**2 + b3*T**3 + b4*T**4)*P**2 \ - 75 + (c0 + c1*T + c2*T**2 + c3*T**3)*P**3 - 76 - 77 # - Salinity dependence - 78 d0 = 4.9247e-3 - 79 d1 = -1.28315e-4 - 80 d2 = 9.802e-7 - 81 d3 = 2.5941e-8 - 82 d4 = -2.9179e-10 - 83 - 84 e0 = -1.2331e-4 - 85 e1 = -1.517e-6 - 86 e2 = 3.122e-8 - 87 - 88 f0 = -2.9558e-6 - 89 f1 = 1.17054e-7 - 90 f2 = -2.3905e-9 - 91 f3 = 1.8448e-11 - 92 - 93 g0 = 9.971e-8 - 94 - 95 h0 = 5.540e-10 - 96 h1 = -1.7682e-11 - 97 h2 = 3.513e-13 - 98 - 99 j1 = -1.4300e-12 -100 S3_2 = S**1.5 -101 -102 CP2 = ((d0 + d1*T + d2*T**2 + d3*T**3 + d4*T**4)*S \ -103 + (e0 + e1*T + e2*T**2)*S3_2)*P \ -104 + ((f0 + f1*T + f2*T**2 + f3*T**3)*S \ -105 + g0*S3_2)*P**2 \ -106 + ((h0 + h1*T + h2*T**2)*S + j1*T*S3_2)*P**3 -107 -108 -109 return CP0 + CP1 + CP2 -
110 -111 # -------------------------------------------------------------- -112 -
113 -def adtgrad(S, T, P=0): -
114 """Compute adiabatic temperature gradient -115 -116 Usage: adtgrad(S, T, [P]) -117 -118 Input: -119 S = Salinity, [PSS-78] -120 T = Temperature, [°C] -121 P = Pressure, [dbar] -122 P is optional, with a default value = zero -123 -124 Output: -125 Adiabatic temperature gradient, [K/dbar] -126 -127 Algorithm: UNESCO 1983 -128 -129 """ -130 -131 a0 = 3.5803e-5 -132 a1 = +8.5258e-6 -133 a2 = -6.836e-8 -134 a3 = 6.6228e-10 -135 -136 b0 = +1.8932e-6 -137 b1 = -4.2393e-8 -138 -139 c0 = +1.8741e-8 -140 c1 = -6.7795e-10 -141 c2 = +8.733e-12 -142 c3 = -5.4481e-14 -143 -144 d0 = -1.1351e-10 -145 d1 = 2.7759e-12 -146 -147 e0 = -4.6206e-13 -148 e1 = +1.8676e-14 -149 e2 = -2.1687e-16 -150 -151 return a0 + (a1 + (a2 + a3*T)*T)*T \ -152 + (b0 + b1*T)*(S-35) \ -153 + ( (c0 + (c1 + (c2 + c3*T)*T)*T) \ -154 + (d0 + d1*T)*(S-35) )*P \ -155 + (e0 + (e1 + e2*T)*T )*P*P -
156 -157 # --------------------------------------------------------------- -158 -
159 -def temppot(S, T, P, Pref=0): -
160 """Compute potential temperature -161 -162 Usage: temppot(S, T, P, [Pref]) -163 -164 Input: -165 S = Salinity, [PSS-78] -166 T = Temperature, [°C] -167 P = Pressure, [dbar] -168 Pref = Reference pressure, [dbar] -169 Pref is optional, with a default value = zero -170 -171 Output: -172 Potential temperature, [°C] -173 -174 Algorithm: UNESCO 1983 -175 -176 """ -177 -178 H = Pref-P -179 XK = H*adtgrad(S,T,P) -180 -181 T = T + 0.5*XK -182 Q = XK -183 P = P + 0.5*H -184 XK = H*adtgrad(S,T,P) -185 -186 T = T + 0.29289322*(XK-Q) -187 Q = 0.58578644*XK + 0.121320344*Q -188 XK = H*adtgrad(S,T,P) -189 -190 T = T + 1.707106781*(XK-Q) -191 Q = 3.414213562*XK - 4.121320344*Q -192 P = P + 0.5*H -193 XK = H*adtgrad(S,T,P) -194 -195 return T + (XK-2.0*Q)/6.0 -
196 -197 -198 # ------------------------------------------------------ -199 -
200 -def temppot0(S,T,P): -
201 """Compute potential temperature relative to surface -202 -203 Usage: temppot0(S, T, P) -204 -205 Input: -206 S = Salinity, [PSS-78] -207 T = Temperature, [°C] -208 P = Pressure, [dbar] -209 -210 Output: -211 Potential temperature, [°C] -212 -213 Algorithm: Bryden 1973 -214 -215 Note: Due to different algorithms, -216 temppot0(S, T, P) != tempot(S, T, P, Pref=0) -217 -218 """ -219 -220 P = P/10 # Conversion from dbar -221 -222 a0 = 3.6504e-4 -223 a1 = 8.3198e-5 -224 a2 = -5.4065e-7 -225 a3 = 4.0274e-9 -226 -227 b0 = 1.7439e-5 -228 b1 = -2.9778e-7 -229 -230 c0 = 8.9309e-7 -231 c1 = -3.1628e-8 -232 c2 = 2.1987e-10 -233 -234 d0 = 4.1057e-9 -235 -236 e0 = -1.6056e-10 -237 e1 = 5.0484e-12 -238 -239 S0 = S - 35.0 -240 -241 return T - (a0 + (a1 + (a2 + a3*T)*T)*T)*P \ -242 - (b0 + b1*T)*P*S0 \ -243 - (c0 + (c1 + c2*T)*T)*P*P \ -244 + d0*S0*P*P \ -245 - (e0 + e1*T)*P*P*P -
246 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.seawater.misc-module.html b/pyroms_toolbox/docs/pyroms_toolbox.seawater.misc-module.html deleted file mode 100644 index 208c76f..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.seawater.misc-module.html +++ /dev/null @@ -1,308 +0,0 @@ - - - - - pyroms_toolbox.seawater.misc - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Package seawater :: - Module misc - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module misc

source code

-

Miscellaneous sea water functions

-

freezept(S[, P]) -- Freezing point soundvel(S, T[, P]) -- Sound - velocity depth(P, lat) -- Depth from pressure

-

Bjørn Ådlandsvik, <bjorn@imr.no> 07 November 2004

- - - - - - - - - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
freezept(S, - P=0)
- Compute freezing temperature of sea water
- source code - -
- -
-   - - - - - - -
soundvel(S, - T, - P=0)
- Compute velocity of sound
- source code - -
- -
-   - - - - - - -
depth(P, - lat)
- Compute depth from pressure and latitude
- source code - -
- -
- - - - - - -
- - - - - -
Function Details[hide private]
-
- -
- -
- - -
-

freezept(S, - P=0) -

-
source code  -
- -
-Compute freezing temperature of sea water
-
-Usage: freezept(S, [P])
-
-Input:               
-    S = Salinity,      [psu]
-    P = Pressure,      [dbar]
-P is optional, with a default value = 0
-
-Output:
-    T = Freezing point,   [°C]
-
-Algorithm: UNESCO 1983 
-
-
-
-
-
-
- -
- -
- - -
-

soundvel(S, - T, - P=0) -

-
source code  -
- -
-Compute velocity of sound
-
-Usage: soundvel(S, T, [P])
-
-Input:
-    S = Salinity,     [PSS-78]
-    T = Temperature,  [°C]
-    P = Pressure,     [dbar]
-P is optional, with a default value = zero
-
-Output:
-    Sound velocity,  [m/s]
-
-Algorithm: UNESCO 1983 
-
-
-
-
-
-
- -
- -
- - -
-

depth(P, - lat) -

-
source code  -
- -
-Compute depth from pressure and latitude
-
-Usage: depth(P, lat)
-
-Input:
-    P = Pressure,     [dbar]
-    lat = Latitude    [deg]
-
-Output:
-    Depth             [m]
-
-Algorithm: UNESCO 1983 
-
-
-
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.seawater.misc-pysrc.html b/pyroms_toolbox/docs/pyroms_toolbox.seawater.misc-pysrc.html deleted file mode 100644 index b5f2cf2..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.seawater.misc-pysrc.html +++ /dev/null @@ -1,287 +0,0 @@ - - - - - pyroms_toolbox.seawater.misc - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Package seawater :: - Module misc - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms_toolbox.seawater.misc

-
-  1  # --- encoding: iso-8859-1 --- 
-  2   
-  3  """Miscellaneous sea water functions 
-  4   
-  5  freezept(S[, P])    -- Freezing point 
-  6  soundvel(S, T[, P]) -- Sound velocity 
-  7  depth(P, lat)       -- Depth from pressure 
-  8   
-  9  Bjørn Ådlandsvik, <bjorn@imr.no>  07 November 2004 
- 10   
- 11  """ 
- 12   
- 13  # ---------------------------------------------------------- 
- 14   
-
15 -def freezept(S, P=0): -
16 - 17 """Compute freezing temperature of sea water - 18 - 19 Usage: freezept(S, [P]) - 20 - 21 Input: - 22 S = Salinity, [psu] - 23 P = Pressure, [dbar] - 24 P is optional, with a default value = 0 - 25 - 26 Output: - 27 T = Freezing point, [°C] - 28 - 29 Algorithm: UNESCO 1983 - 30 - 31 """ - 32 - 33 a0 = -0.0575 - 34 a1 = 1.710523e-3 - 35 a2 = -2.154996e-4 - 36 b = -7.53e-4 - 37 - 38 Tf = a0*S + a1*S**1.5 + a2*S**2 + b*P - 39 return Tf -
40 - 41 # ---------------------------------------------------------------- - 42 -
43 -def soundvel(S, T, P=0): -
44 """Compute velocity of sound - 45 - 46 Usage: soundvel(S, T, [P]) - 47 - 48 Input: - 49 S = Salinity, [PSS-78] - 50 T = Temperature, [°C] - 51 P = Pressure, [dbar] - 52 P is optional, with a default value = zero - 53 - 54 Output: - 55 Sound velocity, [m/s] - 56 - 57 Algorithm: UNESCO 1983 - 58 - 59 """ - 60 - 61 P = 0.1*P # Conversion to bar - 62 - 63 c00 = 1402.388 - 64 c01 = 5.03711 - 65 c02 = -5.80852e-2 - 66 c03 = 3.3420e-4 - 67 c04 = -1.47800e-6 - 68 c05 = 3.1464e-9 - 69 - 70 c10 = 0.153563 - 71 c11 = 6.8982e-4 - 72 c12 = -8.1788e-6 - 73 c13 = 1.3621e-7 - 74 c14 = -6.1185e-10 - 75 - 76 c20 = 3.1260e-5 - 77 c21 = -1.7107e-6 - 78 c22 = 2.5974e-8 - 79 c23 = -2.5335e-10 - 80 c24 = 1.0405e-12 - 81 - 82 c30 = -9.7729e-9 - 83 c31 = 3.8504e-10 - 84 c32 = -2.3643e-12 - 85 - 86 P2 = P*P - 87 P3 = P2*P - 88 Cw = c00 + (c01 + (c02 + (c03 + (c04 + c05*T)*T)*T)*T)*T \ - 89 + (c10 + (c11 + (c12 + (c13 + c14*T)*T)*T)*T)*P \ - 90 + (c20 + (c21 + (c22 + (c23 + c24*T)*T)*T)*T)*P2 \ - 91 + (c30 + (c31 + c32*T)*T)*P3 - 92 - 93 a00 = 1.389 - 94 a01 = -1.262e-2 - 95 a02 = 7.164e-5 - 96 a03 = 2.006e-6 - 97 a04 = -3.21e-8 - 98 - 99 a10 = 9.4742e-5 -100 a11 = -1.2580e-5 -101 a12 = -6.4885e-8 -102 a13 = 1.0507e-8 -103 a14 = -2.0122e-10 -104 -105 a20 = -3.9064e-7 -106 a21 = 9.1041e-9 -107 a22 = -1.6002e-10 -108 a23 = 7.988e-12 -109 -110 a30 = 1.100e-10 -111 a31 = 6.649e-12 -112 a32 = -3.389e-13 -113 -114 A = a00 + (a01 + (a02 + (a03 + a04*T)*T)*T)*T \ -115 + (a10 + (a11 + (a12 + (a13 + a14*T)*T)*T)*T)*P \ -116 + (a20 + (a21 + (a22 + a23*T)*T)*T)*P2 \ -117 + (a30 + (a31 + a32*T)*T)*P3 -118 -119 b00 = -1.922e-2 -120 b01 = -4.42e-5 -121 b10 = 7.3637e-5 -122 b11 = 1.7945e-7 -123 -124 B = b00 + b01*T + (b10 + b11*T)*P -125 -126 d00 = 1.727e-3 -127 d10 = -7.9836e-6 -128 -129 D = d00 + d10*P -130 -131 return Cw + A*S + B*S**1.5 + D*S**2 -
132 -133 # ---------------------------------------------------------------- -134 -
135 -def depth(P, lat): -
136 """Compute depth from pressure and latitude -137 -138 Usage: depth(P, lat) -139 -140 Input: -141 P = Pressure, [dbar] -142 lat = Latitude [deg] -143 -144 Output: -145 Depth [m] -146 -147 Algorithm: UNESCO 1983 -148 -149 """ -150 -151 # Use Numeric for trigonometry if present -152 try: -153 from Numeric import sin, pi -154 except: -155 from math import sin, pi -156 -157 a1 = 9.72659 -158 a2 = -2.2512e-5 -159 a3 = 2.279e-10 -160 a4 = -1.82e-15 -161 -162 b = 1.092e-6 -163 -164 g0 = 9.780318 -165 g1 = 5.2788e-3 -166 g2 = 2.36e-5 -167 -168 rad = pi / 180. -169 -170 X = sin(lat*rad) -171 X = X*X -172 grav = g0 * (1.0 + (g1 + g2*X)*X) + b*P -173 nom = (a1 + (a2 + (a3 + a4*P)*P)*P)*P -174 -175 return nom / grav -
176 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.seawater.salinity-module.html b/pyroms_toolbox/docs/pyroms_toolbox.seawater.salinity-module.html deleted file mode 100644 index df5ea2e..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.seawater.salinity-module.html +++ /dev/null @@ -1,364 +0,0 @@ - - - - - pyroms_toolbox.seawater.salinity - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Package seawater :: - Module salinity - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module salinity

source code

-
-Seawater salinity module, providing salt and cond functions
-
-S = salt(R, T[, P]) -- Salinity    
-R = cond(S, T[, P]) -- Conductivity ratio
-
-Arguments:
-    R = Conductivity ratio
-    S = Salinity
-    T = Temperature         [°C]
-    P = Pressure,           [dbar = 10**4 Pa]
-
-Bjørn Ådlandsvik <bjorn@imr.no>, 07 November 2004
-
-
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
_sal(XR, - XT) - source code - -
- -
-   - - - - - - -
_dsal(XR, - XT) - source code - -
- -
-   - - - - - - -
_rt(T) - source code - -
- -
-   - - - - - - -
_c(P) - source code - -
- -
-   - - - - - - -
_b(T) - source code - -
- -
-   - - - - - - -
_a(T) - source code - -
- -
-   - - - - - - -
salt(R, - T, - P)
- Compute salinity from conductivity, temperature, and pressure
- source code - -
- -
-   - - - - - - -
cond(S, - T, - P)
- Compute conductivity ratio from salinity, temperature, and pressure
- source code - -
- -
- - - - - - -
- - - - - -
Function Details[hide private]
-
- -
- -
- - -
-

salt(R, - T, - P) -

-
source code  -
- -
-Compute salinity from conductivity, temperature, and pressure
-
-Usage: salt(R, T, [P])
-
-Input:
-    R = Conductivity ratio
-    T = Temperature        [°C]
-    P = Pressure,          [dbar = 10**4 Pa]
-P is optional, with default value zero
-  
-Output:
-    S = Salinity           [PSS-78]
-
-
-
-
-
-
- -
- -
- - -
-

cond(S, - T, - P) -

-
source code  -
- -
-Compute conductivity ratio from salinity, temperature, and pressure
-
-Usage: cond(S, T, [P])
-
-Input:
-    S = Salinity      [PSS-78]
-    T = Temperature   [°C]
-    P = Pressure,     [dbar = 10**4 Pa]
-P is optional, with default value zero
-  
-Output:
-    R = Conductivity ratio
-
-
-
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.seawater.salinity-pysrc.html b/pyroms_toolbox/docs/pyroms_toolbox.seawater.salinity-pysrc.html deleted file mode 100644 index 6d92ea7..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.seawater.salinity-pysrc.html +++ /dev/null @@ -1,283 +0,0 @@ - - - - - pyroms_toolbox.seawater.salinity - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Package seawater :: - Module salinity - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms_toolbox.seawater.salinity

-
-  1  # --- encoding: iso-8859-1 --- 
-  2   
-  3  """Seawater salinity module, providing salt and cond functions 
-  4   
-  5  S = salt(R, T[, P]) -- Salinity     
-  6  R = cond(S, T[, P]) -- Conductivity ratio 
-  7   
-  8  Arguments: 
-  9      R = Conductivity ratio 
- 10      S = Salinity 
- 11      T = Temperature         [°C] 
- 12      P = Pressure,           [dbar = 10**4 Pa] 
- 13   
- 14  Bjørn Ådlandsvik <bjorn@imr.no>, 07 November 2004 
- 15   
- 16  """ 
- 17   
- 18  # ------------------------------------------------------- 
- 19   
-
20 -def _sal(XR,XT): -
21 - 22 a0 = 0.0080 - 23 a1 = -0.1692 - 24 a2 = 25.3851 - 25 a3 = 14.0941 - 26 a4 = -7.0261 - 27 a5 = 2.7081 - 28 - 29 b0 = 0.0005 - 30 b1 = -0.0056 - 31 b2 = -0.0066 - 32 b3 = -0.0375 - 33 b4 = 0.0636 - 34 b5 = -0.0144 - 35 - 36 k = 0.0162 - 37 - 38 DS = (XT / (1+k*XT) ) * \ - 39 (b0 + (b1 + (b2 + (b3 + (b4 + b5*XR)*XR)*XR)*XR)*XR) - 40 - 41 return a0 + (a1 + (a2 + (a3 + (a4 + a5*XR)*XR)*XR)*XR)*XR + DS -
42 - 43 # --------------------------------------------------- - 44 -
45 -def _dsal(XR,XT): -
46 - 47 a1 = -0.1692 - 48 a2 = 25.3851 - 49 a3 = 14.0941 - 50 a4 = -7.0261 - 51 a5 = 2.7081 - 52 - 53 b1 = -0.0056 - 54 b2 = -0.0066 - 55 b3 = -0.0375 - 56 b4 = 0.0636 - 57 b5 = -0.0144 - 58 - 59 k = 0.0162 - 60 - 61 dDS = (XT / (1+k*XT) ) * \ - 62 (b1 + (b2*2 + (b3*3 + (b4*4 + b5*5*XR)*XR)*XR)*XR) - 63 - 64 return a1 + (a2*2 + (a3*3 + (a4*4 + a5*5*XR)*XR)*XR)*XR + dDS -
65 - 66 # --------------------------------------------- - 67 -
68 -def _rt(T): -
69 - 70 c0 = 0.6766097 - 71 c1 = 2.00564e-2 - 72 c2 = 1.104259e-4 - 73 c3 = -6.9698e-7 - 74 c4 = 1.0031e-9 - 75 - 76 return c0 + (c1 + (c2 + (c3 + c4*T)*T)*T)*T -
77 - 78 # --------------------------------------------------- - 79 -
80 -def _c(P): -
81 - 82 e1 = 2.070e-5 - 83 e2 = -6.370e-10 - 84 e3 = 3.989e-15 - 85 - 86 return (e1 + (e2 + e3*P)*P)*P -
87 - 88 # --------------------------------------------------- - 89 -
90 -def _b(T): -
91 - 92 d1 = 3.426e-2 - 93 d2 = 4.464e-4 - 94 - 95 return 1.0 + (d1 + d2*T)*T -
96 - 97 # --------------------------------------------------- - 98 -
99 -def _a(T): -
100 -101 d3 = 4.215e-1 -102 d4 = -3.107e-3 -103 -104 return d3 + d4*T -
105 -106 # -------------------------------------------------- -107 -
108 -def salt(R, T, P): -
109 """Compute salinity from conductivity, temperature, and pressure -110 -111 Usage: salt(R, T, [P]) -112 -113 Input: -114 R = Conductivity ratio -115 T = Temperature [°C] -116 P = Pressure, [dbar = 10**4 Pa] -117 P is optional, with default value zero -118 -119 Output: -120 S = Salinity [PSS-78] -121 -122 """ -123 -124 DT = T - 15.0 -125 RT = R/(_rt(T)*(1.0 + _c(P)/(_b(T) + _a(T)*R))) -126 RT = abs(RT)**0.5 -127 -128 return _sal(RT,DT) -
129 -130 # ------------------------------------------------- -131 -
132 -def cond(S, T, P): -
133 """Compute conductivity ratio from salinity, temperature, and pressure -134 -135 Usage: cond(S, T, [P]) -136 -137 Input: -138 S = Salinity [PSS-78] -139 T = Temperature [°C] -140 P = Pressure, [dbar = 10**4 Pa] -141 P is optional, with default value zero -142 -143 Output: -144 R = Conductivity ratio -145 -146 """ -147 -148 DT = T-15.0 -149 RT = (S/35.0)**0.5 -150 SI = _sal(RT,DT) -151 # Iteration -152 for n in xrange(100): -153 RT = RT + (S-SI)/_dsal(RT,DT) -154 SI = _sal(RT,DT) -155 try: -156 DELS = max(abs(SI-S)) -157 except TypeError: # Not sequence, i.e. scalar S -158 DELS = abs(SI-S) -159 if (DELS < 1.0E-4): -160 break -161 -162 RTT = _rt(T)*RT*RT -163 AT = _a(T) -164 BT = _b(T) -165 CP = _c(P) -166 CP = RTT*(CP + BT) -167 BT = BT - RTT*AT -168 -169 R = abs(BT*BT + 4.0*AT*CP)**0.5 - BT -170 -171 return 0.5*R/AT -
172 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.seawater.test-module.html b/pyroms_toolbox/docs/pyroms_toolbox.seawater.test-module.html deleted file mode 100644 index 7dc7202..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.seawater.test-module.html +++ /dev/null @@ -1,105 +0,0 @@ - - - - - pyroms_toolbox.seawater.test - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Package seawater :: - Module test - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module test

source code

- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.seawater.test-pysrc.html b/pyroms_toolbox/docs/pyroms_toolbox.seawater.test-pysrc.html deleted file mode 100644 index 57f16b8..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.seawater.test-pysrc.html +++ /dev/null @@ -1,202 +0,0 @@ - - - - - pyroms_toolbox.seawater.test - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Package seawater :: - Module test - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms_toolbox.seawater.test

-
- 1                                         
- 2  ### TEST-CODE ##################################### 
- 3   
- 4  if __name__ == '__main__': 
- 5   
- 6      format1 = "Computed: %25s = " 
- 7      format2 = "Check value                         = " 
- 8       
- 9      print 
-10      # Check value from UNESCO 1983, p. 20 
-11      print "Checking svan" 
-12      print 
-13      print "S = 40, T = 40 °C, P = 10000 dbar" 
-14      print format1 % "svan(40, 40, 10000)", svan(40, 40, 10000) 
-15      print format2, "981.30210E-8" 
-16   
-17      print 
-18      # Check value from UNESCO 1983, p. 20 
-19      print "Checking sigma" 
-20      print 
-21      print "S = 40, T = 40 °C, P = 10000 dbar" 
-22      print format1 % "sigma(40, 40, 10000)", sigma(40, 40, 10000) 
-23      print format2, 59.82037 
-24   
-25      print 
-26      # Check value from UNESCO 1983, p. 11 
-27      print "Checking salt" 
-28      print 
-29      print "Salinity = 40.0000" 
-30      print "cond = 1.888091, T = 40 °C, P = 10000 dbar" 
-31      print format1 % "salt(1.888091, 40, 10000)", salt(1.888091, 40, 10000) 
-32      print format2, 40.0000 
-33   
-34      print 
-35      # Check value from UNESCO 1983, p. 11 
-36      print "Checking cond" 
-37      print 
-38      print "S = 40, T = 40 °C, P = 10000 dbar" 
-39      print format1 % "cond(40, 40, 10000)", cond(40, 40, 10000) 
-40      print format2, 1.888091 
-41   
-42      print 
-43      # Check value from UNESCO 1983, p. 35 
-44      print "Checking heatcap" 
-45      print 
-46      print "S = 40, T = 40 °C, P = 10000 dbar" 
-47      print format1 % "heatcap(40, 40, 10000)", heatcap(40, 40, 10000) 
-48      print format2, "3849.500"  
-49   
-50      print 
-51      # Check value from UNESCO 1983, p. 36 
-52      print "Checking adtgrad" 
-53      print 
-54      print "S = 40, T = 40 °C, P = 10000 dbar" 
-55      print format1 % "adtgrad(40, 40, 10000)", adtgrad(40, 40, 10000) 
-56      print format2, "3.255976E-4" 
-57   
-58      print 
-59      # Check value from UNESCO 1983, p. 44 
-60      print "Checking temppot" 
-61      print  
-62      print "S = 40, T = 40 °C, P = 10000 dbar, Pref = 0" 
-63      print format1 % "temppot(40, 40, 10000)", temppot(40, 40, 10000) 
-64      print format2, 36.89073 
-65   
-66      print 
-67      # Check value from UNESCO 1983, p. 30 
-68      print "Checking freezept" 
-69      print 
-70      print "S = 40, p = 500 dbar" 
-71      print format1 % "freezept(40, 500)", freezept(40, 500) 
-72      print format2, -2.588567 
-73   
-74      print 
-75      # Check value from UNESCO 1983, p. 49 
-76      print "Checking soundvel" 
-77      print  
-78      print "S = 40, T = 40 °C,  P = 10000 dbar" 
-79      print format1 % "soundvel(40, 40, 10000)", soundvel(40, 40, 10000) 
-80      print format2, 1731.995 
-81   
-82      print 
-83      # Check value from UNESCO 1983, p. 28 
-84      print "Checking depth" 
-85      print 
-86      print "P = 10000 dbar, latitude = 30 degrees" 
-87      print format1 % "depth(10000, 30)", depth(10000, 30) 
-88      print format2, 9712.653 
-89   
-90  # ---------------------------------------------------------------- 
-91   
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.shapiro_filter-module.html b/pyroms_toolbox/docs/pyroms_toolbox.shapiro_filter-module.html deleted file mode 100644 index f27b750..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.shapiro_filter-module.html +++ /dev/null @@ -1,256 +0,0 @@ - - - - - pyroms_toolbox.shapiro_filter - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Module shapiro_filter - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module shapiro_filter

source code

- - - - - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
shapiro1(Finp, - order, - scheme=1)
- This function applies a 1D shapiro filter to input 1D field.
- source code - -
- -
-   - - - - - - -
shapiro2(Finp, - order, - scheme=1, - napp=1)
- This function applies a 2D shapiro filter to input 2D field.
- source code - -
- -
- - - - - - -
- - - - - -
Function Details[hide private]
-
- -
- -
- - -
-

shapiro1(Finp, - order, - scheme=1) -

-
source code  -
- -
-
-This function applies a 1D shapiro filter to input 1D field.
-(ripped from Hernan G. Arango shapiro1.m)
-
- On Input:
-
-     Finp        Field be filtered (1D array).
-     order       Order of the Shapiro filter (2,4,8,16,...).
-     scheme      Switch indicating the type of boundary scheme to use:
-                     scheme = 1  =>  No change at wall, constant order.
-                     scheme = 2  =>  Smoothing at wall, constant order.
-                     scheme = 3  =>  No change at wall, reduced order.
-                     scheme = 4  =>  Smoothing at wall, reduced order.
-                     scheme = 5  =>  Periodic, constant order.
-
-
-
-
-
-
- -
- -
- - -
-

shapiro2(Finp, - order, - scheme=1, - napp=1) -

-
source code  -
- -
-
-This function applies a 2D shapiro filter to input 2D field.
-(ripped from Hernan G. Arango shapiro2.m)
-
- On Input:
-
-     Finp        Field be filtered (2D array).
-     order       Order of the Shapiro filter (2,4,8,16,...).
-     scheme      Switch indicating the type of boundary scheme to use:
-                     scheme = 1  =>  No change at wall, constant order.
-                     scheme = 2  =>  Smoothing at wall, constant order.
-                     scheme = 3  =>  No change at wall, reduced order.
-                     scheme = 4  =>  Smoothing at wall, reduced order.
-                     scheme = 5  =>  Periodic, constant order.
-     napp        Number of Shapiro filter applications (optional). 
-
-
-
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.shapiro_filter-pysrc.html b/pyroms_toolbox/docs/pyroms_toolbox.shapiro_filter-pysrc.html deleted file mode 100644 index 6f23f9f..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.shapiro_filter-pysrc.html +++ /dev/null @@ -1,257 +0,0 @@ - - - - - pyroms_toolbox.shapiro_filter - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Module shapiro_filter - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms_toolbox.shapiro_filter

-
-
1 -def shapiro1(Finp,order,scheme=1): -
2 ''' - 3 This function applies a 1D shapiro filter to input 1D field. - 4 (ripped from Hernan G. Arango shapiro1.m) - 5 - 6 On Input: - 7 - 8 Finp Field be filtered (1D array). - 9 order Order of the Shapiro filter (2,4,8,16,...). - 10 scheme Switch indicating the type of boundary scheme to use: - 11 scheme = 1 => No change at wall, constant order. - 12 scheme = 2 => Smoothing at wall, constant order. - 13 scheme = 3 => No change at wall, reduced order. - 14 scheme = 4 => Smoothing at wall, reduced order. - 15 scheme = 5 => Periodic, constant order. - 16 ''' - 17 - 18 fourk=np.array([2.500000e-1, 6.250000e-2, 1.562500e-2, 3.906250e-3, \ - 19 9.765625e-4, 2.44140625e-4, 6.103515625e-5, 1.5258789063e-5, \ - 20 3.814697e-6, 9.536743e-7, 2.384186e-7, 5.960464e-8, \ - 21 1.490116e-8, 3.725290e-9, 9.313226e-10, 2.328306e-10, \ - 22 5.820766e-11, 1.455192e-11, 3.637979e-12, 9.094947e-13]) - 23 - 24 - 25 Im = Finp.shape[0] - 26 order2 = np.int(np.floor(order/2)) - 27 - 28 cor=np.zeros((Im)) - 29 Fcor=np.zeros((Im)) - 30 - 31 #Compute filter correction. - 32 - 33 if (scheme == 1): - 34 #Scheme 1: constant order and no change at wall. - 35 for n in range(order2): - 36 if n != order2: - 37 cor[0] = 2. * (Finp[0] - Finp[1]) - 38 cor[Im-1] = 2. * (Finp[Im-1] - Finp[Im-2]) - 39 else: - 40 cor[0] = 0. - 41 cor[Im-1] = 0. - 42 - 43 cor[1:-1] = 2. * Finp[1:-1] - Finp[0:-2] - Finp[2:] - 44 - 45 Fcor= cor * fourk[order2-1] - 46 - 47 elif scheme == 2: - 48 #Scheme 2: constant order, smoothed at edges. - 49 for n in range(order2): - 50 cor[0] = 2. * (Finp[0] - Finp[1]) - 51 cor[Im-1] = 2. * (Finp[Im-1] - Finp[Im-2]) - 52 cor[1:-1] = 2. * Finp[1:-1] - Finp[0:-2] - Finp[2:] - 53 - 54 Fcor= cor * fourk[order2-1] - 55 - 56 elif scheme == 3: - 57 #Scheme 3: reduced order and no change at wall. - 58 for n in range(order2): - 59 Istr = n - 60 Iend = Im-k+1 - 61 if n == 1: - 62 cor[0] = 2. * (Finp[0] - Finp[1]) - 63 cor[Im-1] = 2. * (Finp[Im-1] - Finp[Im-2]) - 64 cor[1:-1] = 2. * Finp[1:-1] - Finp[0:-2] - Finp[2:] - 65 else: - 66 cor[Istr:Iend] = 2. * Finp[Istr:Iend] - Finp[Istr-1:Iend-1] - Finp[Istr+1:Iend+1] - 67 - 68 Fcor[Istr] = cor[Istr] * fourk[n] - 69 Fcor[Iend] = cor[Iend] * fourk[n] - 70 - 71 Fcor[0] = 0. - 72 Fcor[Istr:Iend] = cor[Istr:Iend] * fourk[order2] - 73 Fcor[Im] = 0. - 74 - 75 elif scheme == 4: - 76 #Scheme 4: reduced order, smoothed at edges. - 77 for n in range(order2): - 78 Istr = n - 79 Iend = Im-k+1 - 80 if n == 1: - 81 cor[0] = 2. * (Finp[0] - Finp[1]) - 82 cor[Im-1] = 2. * (Finp[Im-1] - Finp[Im-2]) - 83 cor[1:-1] = 2. * Finp[1:-1] - Finp[0:-2] - Finp[2:] - 84 else: - 85 cor[Istr:Iend] = 2. * Finp[Istr:Iend] - Finp[Istr-1:Iend-1] - Finp[Istr+1:Iend+1] - 86 - 87 Fcor[Istr] = cor[Istr] * fourk[n] - 88 Fcor[Iend] = cor[Iend] * fourk[n] - 89 - 90 Fcor[Istr:Iend] = cor[Istr:Iend] * fourk[order2] - 91 - 92 elif scheme == 5: - 93 #Scheme 5: constant order, periodic. - 94 for n in range(order2): - 95 cor[0] = Finp[Im-2] - 96 cor[Im-1] = Finp[1] - 97 cor[1:-1] = 2. * Finp[1:-1] - Finp[0:-2] - Finp[2:] - 98 - 99 Fcor= cor * fourk[order2-1] -100 -101 #Apply correction. -102 Fout = Finp - Fcor -103 -104 return Fout -
105 -106 -107 -
108 -def shapiro2(Finp,order,scheme=1,napp=1): -
109 ''' -110 This function applies a 2D shapiro filter to input 2D field. -111 (ripped from Hernan G. Arango shapiro2.m) -112 -113 On Input: -114 -115 Finp Field be filtered (2D array). -116 order Order of the Shapiro filter (2,4,8,16,...). -117 scheme Switch indicating the type of boundary scheme to use: -118 scheme = 1 => No change at wall, constant order. -119 scheme = 2 => Smoothing at wall, constant order. -120 scheme = 3 => No change at wall, reduced order. -121 scheme = 4 => Smoothing at wall, reduced order. -122 scheme = 5 => Periodic, constant order. -123 napp Number of Shapiro filter applications (optional). -124 ''' -125 -126 Im, Jm = Finp.shape -127 -128 F=Finp.copy() -129 Fout = np.zeros((Im, Jm)) -130 -131 for n in range(napp): -132 -133 #Filter all rows. -134 for j in range(Jm): -135 Fraw = np.squeeze(F[:,j]) -136 Fraw = Fraw.T -137 Fwrk = shapiro1(Fraw,order,scheme) -138 Fout[:,j] = Fwrk.T -139 -140 #Filter all columns. -141 for i in range(Im): -142 Fraw = np.squeeze(Fout[i,:]) -143 Fwrk = shapiro1(Fraw,order,scheme) -144 Fout[i,:] = Fwrk -145 -146 return Fout -
147 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.sview'-module.html b/pyroms_toolbox/docs/pyroms_toolbox.sview'-module.html deleted file mode 100644 index a270e66..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.sview'-module.html +++ /dev/null @@ -1,266 +0,0 @@ - - - - - pyroms_toolbox.sview' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Module sview' - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module sview'

source code

- - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
sview(var, - tindex, - sindex, - grid, - filename=None, - cmin=None, - cmax=None, - clev=None, - fill=False, - contour=False, - d=4, - range=None, - fts=None, - title=None, - clb=True, - pal=None, - proj='merc', - fill_land=False, - outfile=None)
- map = sview(var, tindex, sindex, grid, {optional switch})
- source code - -
- -
- - - - - - -
- - - - - -
Function Details[hide private]
-
- -
- -
- - -
-

sview(var, - tindex, - sindex, - grid, - filename=None, - cmin=None, - cmax=None, - clev=None, - fill=False, - contour=False, - d=4, - range=None, - fts=None, - title=None, - clb=True, - pal=None, - proj='merc', - fill_land=False, - outfile=None) -

-
source code  -
- -

map = sview(var, tindex, sindex, grid, {optional switch})

-

optional switch:

-
    -
  • - filename if defined, load the variable from file -
  • -
  • - cmin set color minimum limit -
  • -
  • - cmax set color maximum limit -
  • -
  • - clev set the number of color step -
  • -
  • - fill use contourf instead of pcolor -
  • -
  • - contour overlay contour (request fill=True) -
  • -
  • - d contour density (default d=4) -
  • -
  • - range set axis limit -
  • -
  • - fts set font size (default: 12) -
  • -
  • - title add title to the plot -
  • -
  • - clb add colorbar (defaul: True) -
  • -
  • - pal set color map (default: cm.jet) -
  • -
  • - proj set projection type (default: merc) -
  • -
  • - fill_land fill land masked area with gray (defaul: True) -
  • -
  • - outfile if defined, write figure to file -
  • -
-

plot a constante-sigma slice of variable var. If filename is provided, - var must be a string and the variable will be load from the file. grid - can be a grid object or a gridid. In the later case, the grid object - correponding to the provided gridid will be loaded. If proj is not None, - return a Basemap object to be used with quiver for example.

-
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.sview'-pysrc.html b/pyroms_toolbox/docs/pyroms_toolbox.sview'-pysrc.html deleted file mode 100644 index f9327e6..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.sview'-pysrc.html +++ /dev/null @@ -1,328 +0,0 @@ - - - - - pyroms_toolbox.sview' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Module sview' - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms_toolbox.sview'

-
-  1  import numpy as np 
-  2  import matplotlib.pyplot as plt 
-  3  from  matplotlib import cm, colors 
-  4  from mpl_toolkits.basemap import Basemap 
-  5  import pyroms 
-  6  import pyroms_toolbox 
-  7   
-  8   
-
9 -def sview(var, tindex, sindex, grid, filename=None, \ - 10 cmin=None, cmax=None, clev=None, \ - 11 fill=False, contour=False, d=4, range=None, fts=None, \ - 12 title=None, clb=True, pal=None, proj='merc', \ - 13 fill_land=False, outfile=None): -
14 """ - 15 map = sview(var, tindex, sindex, grid, {optional switch}) - 16 - 17 optional switch: - 18 - filename if defined, load the variable from file - 19 - cmin set color minimum limit - 20 - cmax set color maximum limit - 21 - clev set the number of color step - 22 - fill use contourf instead of pcolor - 23 - contour overlay contour (request fill=True) - 24 - d contour density (default d=4) - 25 - range set axis limit - 26 - fts set font size (default: 12) - 27 - title add title to the plot - 28 - clb add colorbar (defaul: True) - 29 - pal set color map (default: cm.jet) - 30 - proj set projection type (default: merc) - 31 - fill_land fill land masked area with gray (defaul: True) - 32 - outfile if defined, write figure to file - 33 - 34 plot a constante-sigma slice of variable var. If filename is provided, - 35 var must be a string and the variable will be load from the file. - 36 grid can be a grid object or a gridid. In the later case, the grid - 37 object correponding to the provided gridid will be loaded. - 38 If proj is not None, return a Basemap object to be used with quiver - 39 for example. - 40 """ - 41 - 42 # get grid - 43 if type(grid).__name__ == 'ROMS_Grid': - 44 grd = grid - 45 else: - 46 grd = pyroms.grid.get_ROMS_grid(grid) - 47 - 48 - 49 # get variable - 50 if filename == None: - 51 var = var - 52 else: - 53 data = pyroms.io.Dataset(filename) - 54 - 55 var = data.variables[var] - 56 - 57 Np, Mp, Lp = grd.vgrid.z_r[0,:].shape - 58 - 59 if tindex is not -1: - 60 assert len(var.shape) == 4, 'var must be 4D (time plus space).' - 61 K, N, M, L = var.shape - 62 else: - 63 assert len(var.shape) == 3, 'var must be 3D (no time dependency).' - 64 N, M, L = var.shape - 65 - 66 # determine where on the C-grid these variable lies - 67 if N == Np and M == Mp and L == Lp: - 68 Cpos='rho' - 69 mask = grd.hgrid.mask_rho - 70 - 71 if N == Np and M == Mp and L == Lp-1: - 72 Cpos='u' - 73 mask = grd.hgrid.mask_u - 74 - 75 if N == Np and M == Mp-1 and L == Lp: - 76 Cpos='v' - 77 mask = grd.hgrid.mask_v - 78 - 79 # get constante-s slice - 80 if tindex == -1: - 81 var = var[:,:,:] - 82 else: - 83 var = var[tindex,:,:,:] - 84 - 85 if fill == True: - 86 sslice, lon, lat = pyroms.tools.sslice(var, sindex, grd, \ - 87 Cpos=Cpos) - 88 else: - 89 sslice, lon, lat = pyroms.tools.sslice(var, sindex, grd, \ - 90 Cpos=Cpos, vert=True) - 91 - 92 # plot - 93 if cmin is None: - 94 cmin = sslice.min() - 95 else: - 96 cmin = float(cmin) - 97 - 98 if cmax is None: - 99 cmax = sslice.max() -100 else: -101 cmax = float(cmax) -102 -103 if clev is None: -104 clev = 100. -105 else: -106 clev = float(clev) -107 -108 dc = (cmax - cmin)/clev ; vc = np.arange(cmin,cmax+dc,dc) -109 -110 if pal is None: -111 pal = cm.jet -112 else: -113 pal = pal -114 -115 if fts is None: -116 fts = 12 -117 else: -118 fts = fts -119 -120 #pal.set_over('w', 1.0) -121 #pal.set_under('w', 1.0) -122 #pal.set_bad('w', 1.0) -123 -124 pal_norm = colors.BoundaryNorm(vc,ncolors=256, clip = False) -125 -126 if range is None: -127 lon_min = lon.min() -128 lon_max = lon.max() -129 lon_0 = (lon_min + lon_max) / 2. -130 lat_min = lat.min() -131 lat_max = lat.max() -132 lat_0 = (lat_min + lat_max) / 2. -133 else: -134 lon_min = range[0] -135 lon_max = range[1] -136 lon_0 = (lon_min + lon_max) / 2. -137 lat_min = range[2] -138 lat_max = range[3] -139 lat_0 = (lat_min + lat_max) / 2. -140 -141 # clear figure -142 #plt.clf() -143 -144 if proj is not None: -145 map = Basemap(projection=proj, llcrnrlon=lon_min, llcrnrlat=lat_min, \ -146 urcrnrlon=lon_max, urcrnrlat=lat_max, lat_0=lat_0, lon_0=lon_0, \ -147 resolution='h', area_thresh=5.) -148 x, y = map(lon,lat) -149 -150 if fill_land is True and proj is not None: -151 # fill land and draw coastlines -152 map.drawcoastlines() -153 map.fillcontinents(color='grey') -154 else: -155 if proj is not None: -156 Basemap.pcolor(map, x, y, mask, vmin=-2, cmap=cm.gray) -157 pyroms_toolbox.plot_coast_line(grd, map) -158 else: -159 plt.pcolor(lon, lat, mask, vmin=-2, cmap=cm.gray) -160 pyroms_toolbox.plot_coast_line(grd) -161 -162 if fill is True: -163 if proj is not None: -164 cf = Basemap.contourf(map, x, y, sslice, vc, cmap = pal, \ -165 norm = pal_norm) -166 else: -167 cf = plt.contourf(lon, lat, sslice, vc, cmap = pal, \ -168 norm = pal_norm) -169 else: -170 if proj is not None: -171 cf = Basemap.pcolor(map, x, y, sslice, cmap = pal, norm = pal_norm) -172 else: -173 cf = plt.pcolor(lon, lat, sslice, cmap = pal, norm = pal_norm) -174 -175 if clb is True: -176 clb = plt.colorbar(cf, fraction=0.075,format='%.2f') -177 for t in clb.ax.get_yticklabels(): -178 t.set_fontsize(fts) -179 -180 if contour is True: -181 if fill is not True: -182 raise Warning, 'Please run again with fill=True to overlay contour.' -183 else: -184 if proj is not None: -185 Basemap.contour(map, x, y, sslice, vc[::d], colors='k', linewidths=0.5, linestyles='solid') -186 else: -187 plt.contour(lon, lat, sslice, vc[::d], colors='k', linewidths=0.5, linestyles='solid') -188 -189 if proj is None and range is not None: -190 plt.axis(range) -191 -192 -193 if title is not None: -194 plt.title(title, fontsize=fts+4) -195 -196 if proj is not None: -197 map.drawmeridians(np.arange(lon_min,lon_max, (lon_max-lon_min)/5.), \ -198 labels=[0,0,0,1], fmt='%.1f') -199 map.drawparallels(np.arange(lat_min,lat_max, (lat_max-lat_min)/5.), \ -200 labels=[1,0,0,0], fmt='%.1f') -201 -202 if outfile is not None: -203 if outfile.find('.png') != -1 or outfile.find('.svg') != -1 or \ -204 outfile.find('.eps') != -1: -205 print 'Write figure to file', outfile -206 plt.savefig(outfile, dpi=200, facecolor='w', edgecolor='w', \ -207 orientation='portrait') -208 else: -209 print 'Unrecognized file extension. Please use .png, .svg or .eps file extension.' -210 -211 -212 if proj is None: -213 return -214 else: -215 return map -
216 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.transectview'-module.html b/pyroms_toolbox/docs/pyroms_toolbox.transectview'-module.html deleted file mode 100644 index 333521b..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.transectview'-module.html +++ /dev/null @@ -1,254 +0,0 @@ - - - - - pyroms_toolbox.transectview' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Module transectview' - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module transectview'

source code

- - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
transectview(var, - tindex, - istart, - iend, - jstart, - jend, - gridid, - filename=None, - spval=1e+37, - cmin=None, - cmax=None, - clev=None, - fill=False, - contour=False, - d=4, - jrange=None, - hrange=None, - fts=None, - title=None, - map=False, - pal=None, - clb=True, - xaxis='lon', - outfile=None)
- transectview(var, tindex, istart, iend, jstart, jend, gridid, - {optional switch})
- source code - -
- -
- - - - - - -
- - - - - -
Function Details[hide private]
-
- -
- -
- - -
-

transectview(var, - tindex, - istart, - iend, - jstart, - jend, - gridid, - filename=None, - spval=1e+37, - cmin=None, - cmax=None, - clev=None, - fill=False, - contour=False, - d=4, - jrange=None, - hrange=None, - fts=None, - title=None, - map=False, - pal=None, - clb=True, - xaxis='lon', - outfile=None) -

-
source code  -
- -
-
-transectview(var, tindex, istart, iend, jstart, jend, gridid, 
-             {optional switch})
-
-optional switch:
-  - filename         if defined, load the variable from file
-  - spval            specify spval
-  - cmin             set color minimum limit
-  - cmax             set color maximum limit
-  - clev             set the number of color step
-  - fill             use contourf instead of pcolor
-  - contour          overlay contour (request fill=True)
-  - d                contour density (default d=4) 
-  - jrange           j range
-  - hrange           h range
-  - fts              set font size (default: 12)
-  - title            add title to the plot
-  - map              if True, draw a map showing transect location
-  - pal              set color map (default: cm.jet)
-  - clb              add colorbar (defaul: True)
-  - xaxis            use lon or lat for x axis
-  - outfile          if defined, write figure to file
-
-plot vertical transect between the points P1=(istart, jstart)
-and P2=(iend, jend) from 3D variable var. If filename is provided,
-var must be a string and the variable will be load from the file.
-grid can be a grid object or a gridid. In the later case, the grid
-object correponding to the provided gridid will be loaded.
-
-
-
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.transectview'-pysrc.html b/pyroms_toolbox/docs/pyroms_toolbox.transectview'-pysrc.html deleted file mode 100644 index ad8fa12..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.transectview'-pysrc.html +++ /dev/null @@ -1,332 +0,0 @@ - - - - - pyroms_toolbox.transectview' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Module transectview' - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms_toolbox.transectview'

-
-  1  import numpy as np 
-  2  import matplotlib.pyplot as plt 
-  3  from  matplotlib import cm, colors 
-  4  from mpl_toolkits.basemap import Basemap 
-  5  import pyroms 
-  6   
-  7   
-
8 -def transectview(var, tindex, istart, iend, jstart, jend, gridid, \ - 9 filename=None, spval=1e37, cmin=None, cmax=None, clev=None, \ - 10 fill=False, contour=False, d=4, jrange=None, hrange=None,\ - 11 fts=None, title=None, map=False, \ - 12 pal=None, clb=True, xaxis='lon', outfile=None): -
13 """ - 14 transectview(var, tindex, istart, iend, jstart, jend, gridid, - 15 {optional switch}) - 16 - 17 optional switch: - 18 - filename if defined, load the variable from file - 19 - spval specify spval - 20 - cmin set color minimum limit - 21 - cmax set color maximum limit - 22 - clev set the number of color step - 23 - fill use contourf instead of pcolor - 24 - contour overlay contour (request fill=True) - 25 - d contour density (default d=4) - 26 - jrange j range - 27 - hrange h range - 28 - fts set font size (default: 12) - 29 - title add title to the plot - 30 - map if True, draw a map showing transect location - 31 - pal set color map (default: cm.jet) - 32 - clb add colorbar (defaul: True) - 33 - xaxis use lon or lat for x axis - 34 - outfile if defined, write figure to file - 35 - 36 plot vertical transect between the points P1=(istart, jstart) - 37 and P2=(iend, jend) from 3D variable var. If filename is provided, - 38 var must be a string and the variable will be load from the file. - 39 grid can be a grid object or a gridid. In the later case, the grid - 40 object correponding to the provided gridid will be loaded. - 41 """ - 42 - 43 # get grid - 44 if type(gridid).__name__ == 'ROMS_Grid': - 45 grd = gridid - 46 else: - 47 grd = pyroms.grid.get_ROMS_grid(gridid) - 48 - 49 # get variable - 50 if filename == None: - 51 var = var - 52 else: - 53 data = pyroms.io.Dataset(filename) - 54 - 55 var = data.variables[var] - 56 - 57 Np, Mp, Lp = grd.vgrid.z_r[0,:].shape - 58 - 59 if tindex is not -1: - 60 assert len(var.shape) == 4, 'var must be 4D (time plus space).' - 61 K, N, M, L = var.shape - 62 else: - 63 assert len(var.shape) == 3, 'var must be 3D (no time dependency).' - 64 N, M, L = var.shape - 65 - 66 # determine where on the C-grid these variable lies - 67 if N == Np and M == Mp and L == Lp: - 68 Cpos='rho' - 69 lon = grd.hgrid.lon_vert - 70 lat = grd.hgrid.lat_vert - 71 mask = grd.hgrid.mask_rho - 72 - 73 if N == Np and M == Mp and L == Lp-1: - 74 Cpos='u' - 75 lon = 0.5 * (grd.hgrid.lon_vert[:,:-1] + grd.hgrid.lon_vert[:,1:]) - 76 lat = 0.5 * (grd.hgrid.lat_vert[:,:-1] + grd.hgrid.lat_vert[:,1:]) - 77 mask = grd.hgrid.mask_u - 78 - 79 if N == Np and M == Mp-1 and L == Lp: - 80 Cpos='v' - 81 lon = 0.5 * (grd.hgrid.lon_vert[:-1,:] + grd.hgrid.lon_vert[1:,:]) - 82 lat = 0.5 * (grd.hgrid.lat_vert[:-1,:] + grd.hgrid.lat_vert[1:,:]) - 83 mask = grd.hgrid.mask_v - 84 - 85 # get transect - 86 if tindex == -1: - 87 var = var[:,:,:] - 88 else: - 89 var = var[tindex,:,:,:] - 90 - 91 if fill == True: - 92 transect, zt, lont, latt, = pyroms.tools.transect(var, istart, iend, \ - 93 jstart, jend, grd, Cpos, spval=spval) - 94 else: - 95 transect, zt, lont, latt, = pyroms.tools.transect(var, istart, iend, \ - 96 jstart, jend, grd, Cpos, vert=True, spval=spval) - 97 - 98 if xaxis == 'lon': - 99 xt = lont -100 elif xaxis == 'lat': -101 xt = latt -102 -103 # plot -104 if cmin is None: -105 cmin = transect.min() -106 else: -107 cmin = float(cmin) -108 -109 if cmax is None: -110 cmax = transect.max() -111 else: -112 cmax = float(cmax) -113 -114 if clev is None: -115 clev = 100. -116 else: -117 clev = float(clev) -118 -119 dc = (cmax - cmin)/clev ; vc = np.arange(cmin,cmax+dc,dc) -120 -121 if pal is None: -122 pal = cm.jet -123 else: -124 pal = pal -125 -126 if fts is None: -127 fts = 12 -128 else: -129 fts = fts -130 -131 #pal.set_over('w', 1.0) -132 #pal.set_under('w', 1.0) -133 #pal.set_bad('w', 1.0) -134 -135 pal_norm = colors.BoundaryNorm(vc,ncolors=256, clip = False) -136 -137 # clear figure -138 #plt.clf() -139 -140 if map is True: -141 # set axes for the main plot in order to keep space for the map -142 if fts < 12: -143 ax=None -144 else: -145 ax = plt.axes([0.15, 0.08, 0.8, 0.65]) -146 else: -147 if fts < 12: -148 ax=None -149 else: -150 ax=plt.axes([0.15, 0.1, 0.8, 0.8]) -151 -152 -153 if fill is True: -154 cf = plt.contourf(xt, zt, transect, vc, cmap = pal, norm = pal_norm, axes=ax) -155 else: -156 cf = plt.pcolor(xt, zt, transect, cmap = pal, norm = pal_norm, axes=ax) -157 -158 if clb is True: -159 clb = plt.colorbar(cf, fraction=0.075,format='%.2f') -160 for t in clb.ax.get_yticklabels(): -161 t.set_fontsize(fts) -162 -163 if contour is True: -164 if fill is not True: -165 raise Warning, 'Please run again with fill=True for overlay contour.' -166 else: -167 plt.contour(xt, zt, transect, vc[::d], colors='k', linewidths=0.5, linestyles='solid', axes=ax) -168 -169 -170 if jrange is not None: -171 plt.xlim(jrange) -172 -173 if hrange is not None: -174 plt.ylim(hrange) -175 -176 if title is not None: -177 if map is True: -178 # move the title on the right -179 xmin, xmax = ax.get_xlim() -180 ymin, ymax = ax.get_ylim() -181 xt = xmin - (xmax-xmin)/9. -182 yt = ymax + (ymax-ymin)/7. -183 plt.text(xt, yt, title, fontsize=fts+4) -184 else: -185 plt.title(title, fontsize=fts+4) -186 -187 plt.xlabel('Latitude', fontsize=fts) -188 plt.ylabel('Depth', fontsize=fts) -189 -190 if map is True: -191 # draw a map with constant-i slice location -192 ax_map = plt.axes([0.4, 0.76, 0.2, 0.23]) -193 varm = np.ma.masked_where(mask[:,:] == 0, var[var.shape[0]-1,:,:]) -194 lon_min = lon.min() -195 lon_max = lon.max() -196 lon_0 = (lon_min + lon_max) / 2. -197 lat_min = lat.min() -198 lat_max = lat.max() -199 lat_0 = (lat_min + lat_max) / 2. -200 map = Basemap(projection='merc', llcrnrlon=lon_min, llcrnrlat=lat_min, \ -201 urcrnrlon=lon_max, urcrnrlat=lat_max, lat_0=lat_0, lon_0=lon_0, \ -202 resolution='i', area_thresh=10.) -203 x, y = map(lon,lat) -204 xt, yt = map(lont[0,:],latt[0,:]) -205 # fill land and draw coastlines -206 map.drawcoastlines() -207 map.fillcontinents(color='grey') -208 #map.drawmapboundary() -209 Basemap.pcolor(map, x, y, varm, axes=ax_map) -210 Basemap.plot(map, xt, yt, 'k-', linewidth=3, axes=ax_map) -211 -212 -213 if outfile is not None: -214 if outfile.find('.png') != -1 or outfile.find('.svg') != -1 or outfile.find('.eps') != -1: -215 print 'Write figure to file', outfile -216 plt.savefig(outfile, dpi=200, facecolor='w', edgecolor='w', orientation='portrait') -217 else: -218 print 'Unrecognized file extension. Please use .png, .svg or .eps file extension.' -219 -220 -221 return -
222 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.twoDview'-module.html b/pyroms_toolbox/docs/pyroms_toolbox.twoDview'-module.html deleted file mode 100644 index 030685a..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.twoDview'-module.html +++ /dev/null @@ -1,264 +0,0 @@ - - - - - pyroms_toolbox.twoDview' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Module twoDview' - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module twoDview'

source code

- - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
twoDview(var, - tindex, - grid, - filename=None, - cmin=None, - cmax=None, - clev=None, - fill=False, - contour=False, - d=4, - range=None, - fts=None, - title=None, - clb=True, - pal=None, - proj='merc', - fill_land=False, - outfile=None)
- map = twoDview(var, tindex, grid, {optional switch})
- source code - -
- -
- - - - - - -
- - - - - -
Function Details[hide private]
-
- -
- -
- - -
-

twoDview(var, - tindex, - grid, - filename=None, - cmin=None, - cmax=None, - clev=None, - fill=False, - contour=False, - d=4, - range=None, - fts=None, - title=None, - clb=True, - pal=None, - proj='merc', - fill_land=False, - outfile=None) -

-
source code  -
- -

map = twoDview(var, tindex, grid, {optional switch})

-

optional switch:

-
    -
  • - filename if defined, load the variable from file -
  • -
  • - cmin set color minimum limit -
  • -
  • - cmax set color maximum limit -
  • -
  • - clev set the number of color step -
  • -
  • - fill use contourf instead of pcolor -
  • -
  • - contour overlay contour (request fill=True) -
  • -
  • - d contour density (default d=4) -
  • -
  • - range set axis limit -
  • -
  • - fts set font size (default: 12) -
  • -
  • - title add title to the plot -
  • -
  • - clb add colorbar (defaul: True) -
  • -
  • - pal set color map (default: cm.jet) -
  • -
  • - proj set projection type (default: merc) -
  • -
  • - fill_land fill land masked area with gray (defaul: True) -
  • -
  • - outfile if defined, write figure to file -
  • -
-

plot 2-dimensions variable var. If filename is provided, var must be a - string and the variable will be load from the file. grid can be a grid - object or a gridid. In the later case, the grid object correponding to - the provided gridid will be loaded. If proj is not None, return a Basemap - object to be used with quiver for example.

-
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.twoDview'-pysrc.html b/pyroms_toolbox/docs/pyroms_toolbox.twoDview'-pysrc.html deleted file mode 100644 index 20f665f..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.twoDview'-pysrc.html +++ /dev/null @@ -1,343 +0,0 @@ - - - - - pyroms_toolbox.twoDview' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Module twoDview' - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms_toolbox.twoDview'

-
-  1  import numpy as np 
-  2  import matplotlib.pyplot as plt 
-  3  from  matplotlib import cm, colors 
-  4  from mpl_toolkits.basemap import Basemap 
-  5  import pyroms 
-  6  import pyroms_toolbox 
-  7   
-  8   
-
9 -def twoDview(var, tindex, grid, filename=None, \ - 10 cmin=None, cmax=None, clev=None, fill=False, \ - 11 contour=False, d=4, range=None, fts=None, \ - 12 title=None, clb=True, pal=None, proj='merc', \ - 13 fill_land=False, outfile=None): -
14 """ - 15 map = twoDview(var, tindex, grid, {optional switch}) - 16 - 17 optional switch: - 18 - filename if defined, load the variable from file - 19 - cmin set color minimum limit - 20 - cmax set color maximum limit - 21 - clev set the number of color step - 22 - fill use contourf instead of pcolor - 23 - contour overlay contour (request fill=True) - 24 - d contour density (default d=4) - 25 - range set axis limit - 26 - fts set font size (default: 12) - 27 - title add title to the plot - 28 - clb add colorbar (defaul: True) - 29 - pal set color map (default: cm.jet) - 30 - proj set projection type (default: merc) - 31 - fill_land fill land masked area with gray (defaul: True) - 32 - outfile if defined, write figure to file - 33 - 34 plot 2-dimensions variable var. If filename is provided, - 35 var must be a string and the variable will be load from the file. - 36 grid can be a grid object or a gridid. In the later case, the grid - 37 object correponding to the provided gridid will be loaded. - 38 If proj is not None, return a Basemap object to be used with quiver - 39 for example. - 40 """ - 41 - 42 # get grid - 43 if type(grid).__name__ == 'ROMS_Grid': - 44 grd = grid - 45 else: - 46 grd = pyroms.grid.get_ROMS_grid(grid) - 47 - 48 - 49 # get variable - 50 if filename == None: - 51 var = var - 52 else: - 53 data = pyroms.io.Dataset(filename) - 54 - 55 var = data.variables[var] - 56 - 57 - 58 Np, Mp, Lp = grd.vgrid.z_r[0,:].shape - 59 - 60 if tindex is not -1: - 61 assert len(var.shape) == 3, 'var must be 3D (time plus space).' - 62 K, M, L = var.shape - 63 else: - 64 assert len(var.shape) == 2, 'var must be 2D (no time dependency).' - 65 M, L = var.shape - 66 - 67 # determine where on the C-grid these variable lies - 68 if M == Mp and L == Lp: - 69 Cpos='rho' - 70 if fill == True: - 71 lon = grd.hgrid.lon_rho - 72 lat = grd.hgrid.lat_rho - 73 else: - 74 lon = grd.hgrid.lon_vert - 75 lat = grd.hgrid.lat_vert - 76 mask = grd.hgrid.mask_rho - 77 - 78 if M == Mp and L == Lp-1: - 79 Cpos='u' - 80 if fill == True: - 81 lon = grd.hgrid.lon_u - 82 lat = grd.hgrid.lat_u - 83 else: - 84 lon = 0.5 * (grd.hgrid.lon_vert[:,:-1] + grd.hgrid.lon_vert[:,1:]) - 85 lat = 0.5 * (grd.hgrid.lat_vert[:,:-1] + grd.hgrid.lat_vert[:,1:]) - 86 mask = grd.hgrid.mask_u - 87 - 88 if M == Mp-1 and L == Lp: - 89 Cpos='v' - 90 if fill == True: - 91 lon = grd.hgrid.lon_v - 92 lat = grd.hgrid.lat_v - 93 else: - 94 lon = 0.5 * (grd.hgrid.lon_vert[:-1,:] + grd.hgrid.lon_vert[1:,:]) - 95 lat = 0.5 * (grd.hgrid.lat_vert[:-1,:] + grd.hgrid.lat_vert[1:,:]) - 96 mask = grd.hgrid.mask_v - 97 - 98 # get 2D var - 99 if tindex == -1: -100 var = var[:,:] -101 else: -102 var = var[tindex,:,:] -103 -104 # mask -105 var = np.ma.masked_where(mask == 0, var) -106 -107 # plot -108 if cmin is None: -109 cmin = var.min() -110 else: -111 cmin = float(cmin) -112 -113 if cmax is None: -114 cmax = var.max() -115 else: -116 cmax = float(cmax) -117 -118 if clev is None: -119 clev = 100. -120 else: -121 clev = float(clev) -122 -123 dc = (cmax - cmin)/clev ; vc = np.arange(cmin,cmax+dc,dc) -124 -125 if pal is None: -126 pal = cm.jet -127 else: -128 pal = pal -129 -130 if fts is None: -131 fts = 12 -132 else: -133 fts = fts -134 -135 #pal.set_over('w', 1.0) -136 #pal.set_under('w', 1.0) -137 #pal.set_bad('w', 1.0) -138 -139 pal_norm = colors.BoundaryNorm(vc,ncolors=256, clip = False) -140 -141 if range is None: -142 lon_min = lon.min() -143 lon_max = lon.max() -144 lon_0 = (lon_min + lon_max) / 2. -145 lat_min = lat.min() -146 lat_max = lat.max() -147 lat_0 = (lat_min + lat_max) / 2. -148 else: -149 lon_min = range[0] -150 lon_max = range[1] -151 lon_0 = (lon_min + lon_max) / 2. -152 lat_min = range[2] -153 lat_max = range[3] -154 lat_0 = (lat_min + lat_max) / 2. -155 -156 # clear figure -157 #plt.clf() -158 -159 if proj is not None: -160 map = Basemap(projection=proj, llcrnrlon=lon_min, llcrnrlat=lat_min, \ -161 urcrnrlon=lon_max, urcrnrlat=lat_max, lat_0=lat_0, lon_0=lon_0, \ -162 resolution='h', area_thresh=5.) -163 x, y = map(lon,lat) -164 -165 if fill_land is True and proj is not None: -166 # fill land and draw coastlines -167 map.drawcoastlines() -168 map.fillcontinents(color='grey') -169 else: -170 if proj is not None: -171 Basemap.pcolor(map, x, y, mask, vmin=-2, cmap=cm.gray) -172 pyroms_toolbox.plot_coast_line(grd, map) -173 else: -174 plt.pcolor(lon, lat, mask, vmin=-2, cmap=cm.gray) -175 pyroms_toolbox.plot_coast_line(grd) -176 -177 if fill is True: -178 if proj is not None: -179 cf = Basemap.contourf(map, x, y, var, vc, cmap = pal, \ -180 norm = pal_norm) -181 else: -182 cf = plt.contourf(lon, lat, var, vc, cmap = pal, \ -183 norm = pal_norm) -184 else: -185 if proj is not None: -186 cf = Basemap.pcolor(map, x, y, var, cmap = pal, norm = pal_norm) -187 else: -188 cf = plt.pcolor(lon, lat, var, cmap = pal, norm = pal_norm) -189 -190 if clb is True: -191 clb = plt.colorbar(cf, fraction=0.075,format='%.2f') -192 for t in clb.ax.get_yticklabels(): -193 t.set_fontsize(fts) -194 -195 if contour is True: -196 if fill is not True: -197 raise Warning, 'Please run again with fill=True to overlay contour.' -198 else: -199 if proj is not None: -200 Basemap.contour(map, x, y, var, vc[::d], colors='k', linewidths=0.5, linestyles='solid') -201 else: -202 plt.contour(lon, lat, var, vc[::d], colors='k', linewidths=0.5, linestyles='solid') -203 -204 if proj is None and range is not None: -205 plt.axis(range) -206 -207 -208 if title is not None: -209 plt.title(title, fontsize=fts+4) -210 -211 if proj is not None: -212 map.drawmeridians(np.arange(lon_min,lon_max, (lon_max-lon_min)/5.), \ -213 labels=[0,0,0,1], fmt='%.1f') -214 map.drawparallels(np.arange(lat_min,lat_max, (lat_max-lat_min)/5.), \ -215 labels=[1,0,0,0], fmt='%.1f') -216 -217 if outfile is not None: -218 if outfile.find('.png') != -1 or outfile.find('.svg') != -1 or \ -219 outfile.find('.eps') != -1: -220 print 'Write figure to file', outfile -221 plt.savefig(outfile, dpi=200, facecolor='w', edgecolor='w', \ -222 orientation='portrait') -223 else: -224 print 'Unrecognized file extension. Please use .png, .svg or .eps file extension.' -225 -226 -227 if proj is None: -228 return -229 else: -230 return map -
231 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.zview'-module.html b/pyroms_toolbox/docs/pyroms_toolbox.zview'-module.html deleted file mode 100644 index 3a26d43..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.zview'-module.html +++ /dev/null @@ -1,265 +0,0 @@ - - - - - pyroms_toolbox.zview' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Module zview' - - - - - - -
[hide private]
[frames] | no frames]
-
- -

Module zview'

source code

- - - - - - - - - -
- - - - - -
Functions[hide private]
-
-   - - - - - - -
zview(var, - tindex, - depth, - grid, - filename=None, - cmin=None, - cmax=None, - clev=None, - clb_format='%.2f', - fill=False, - contour=False, - d=4, - range=None, - fts=None, - title=None, - clb=True, - pal=None, - proj='merc', - fill_land=False, - outfile=None)
- map = zview(var, tindex, depth, grid, {optional switch})
- source code - -
- -
- - - - - - -
- - - - - -
Function Details[hide private]
-
- -
- -
- - -
-

zview(var, - tindex, - depth, - grid, - filename=None, - cmin=None, - cmax=None, - clev=None, - clb_format='%.2f', - fill=False, - contour=False, - d=4, - range=None, - fts=None, - title=None, - clb=True, - pal=None, - proj='merc', - fill_land=False, - outfile=None) -

-
source code  -
- -

map = zview(var, tindex, depth, grid, {optional switch})

-

optional switch:

-
    -
  • - filename if defined, load the variable from file -
  • -
  • - cmin set color minimum limit -
  • -
  • - cmax set color maximum limit -
  • -
  • - clev set the number of color step -
  • -
  • - fill use contourf instead of pcolor -
  • -
  • - contour overlay contour (request fill=True) -
  • -
  • - range set axis limit -
  • -
  • - fts set font size (default: 12) -
  • -
  • - title add title to the plot -
  • -
  • - clb add colorbar (defaul: True) -
  • -
  • - pal set color map (default: cm.jet) -
  • -
  • - proj set projection type (default: merc) -
  • -
  • - fill_land fill land masked area with gray (defaul: True) -
  • -
  • - outfile if defined, write figure to file -
  • -
-

plot a constante-z slice of variable var. If filename is provided, var - must be a string and the variable will be load from the file. grid can be - a grid object or a gridid. In the later case, the grid object - correponding to the provided gridid will be loaded. If proj is not None, - return a Basemap object to be used with quiver for example.

-
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/pyroms_toolbox.zview'-pysrc.html b/pyroms_toolbox/docs/pyroms_toolbox.zview'-pysrc.html deleted file mode 100644 index 385e644..0000000 --- a/pyroms_toolbox/docs/pyroms_toolbox.zview'-pysrc.html +++ /dev/null @@ -1,328 +0,0 @@ - - - - - pyroms_toolbox.zview' - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - Package pyroms_toolbox :: - Module zview' - - - - - - -
[hide private]
[frames] | no frames]
-
-

Source Code for Module pyroms_toolbox.zview'

-
-  1  import numpy as np 
-  2  import matplotlib.pyplot as plt 
-  3  from  matplotlib import cm, colors 
-  4  from mpl_toolkits.basemap import Basemap 
-  5  import pyroms 
-  6  import pyroms_toolbox 
-  7   
-  8   
-
9 -def zview(var, tindex, depth, grid, filename=None, \ - 10 cmin=None, cmax=None, clev=None, clb_format='%.2f', \ - 11 fill=False, contour=False, d=4, range=None, fts=None, \ - 12 title=None, clb=True, pal=None, proj='merc', \ - 13 fill_land=False, outfile=None): -
14 """ - 15 map = zview(var, tindex, depth, grid, {optional switch}) - 16 - 17 optional switch: - 18 - filename if defined, load the variable from file - 19 - cmin set color minimum limit - 20 - cmax set color maximum limit - 21 - clev set the number of color step - 22 - fill use contourf instead of pcolor - 23 - contour overlay contour (request fill=True) - 24 - range set axis limit - 25 - fts set font size (default: 12) - 26 - title add title to the plot - 27 - clb add colorbar (defaul: True) - 28 - pal set color map (default: cm.jet) - 29 - proj set projection type (default: merc) - 30 - fill_land fill land masked area with gray (defaul: True) - 31 - outfile if defined, write figure to file - 32 - 33 plot a constante-z slice of variable var. If filename is provided, - 34 var must be a string and the variable will be load from the file. - 35 grid can be a grid object or a gridid. In the later case, the grid - 36 object correponding to the provided gridid will be loaded. - 37 If proj is not None, return a Basemap object to be used with quiver - 38 for example. - 39 """ - 40 - 41 # get grid - 42 if type(grid).__name__ == 'ROMS_Grid': - 43 grd = grid - 44 else: - 45 grd = pyroms.grid.get_ROMS_grid(grid) - 46 - 47 - 48 # get variable - 49 if filename == None: - 50 var = var - 51 else: - 52 data = pyroms.io.Dataset(filename) - 53 - 54 var = data.variables[var] - 55 - 56 Np, Mp, Lp = grd.vgrid.z_r[0,:].shape - 57 - 58 if tindex is not -1: - 59 assert len(var.shape) == 4, 'var must be 4D (time plus space).' - 60 K, N, M, L = var.shape - 61 else: - 62 assert len(var.shape) == 3, 'var must be 3D (no time dependency).' - 63 N, M, L = var.shape - 64 - 65 # determine where on the C-grid these variable lies - 66 if N == Np and M == Mp and L == Lp: - 67 Cpos='rho' - 68 mask = grd.hgrid.mask_rho - 69 - 70 if N == Np and M == Mp and L == Lp-1: - 71 Cpos='u' - 72 mask = grd.hgrid.mask_u - 73 - 74 if N == Np and M == Mp-1 and L == Lp: - 75 Cpos='v' - 76 mask = grd.hgrid.mask_v - 77 - 78 # get constante-z slice - 79 if tindex == -1: - 80 var = var[:,:,:] - 81 else: - 82 var = var[tindex,:,:,:] - 83 - 84 depth = -abs(depth) - 85 if fill == True: - 86 zslice, lon, lat = pyroms.tools.zslice(var, depth, grd, \ - 87 Cpos=Cpos) - 88 else: - 89 zslice, lon, lat = pyroms.tools.zslice(var, depth, grd, \ - 90 Cpos=Cpos, vert=True) - 91 - 92 # plot - 93 if cmin is None: - 94 cmin = zslice.min() - 95 else: - 96 cmin = float(cmin) - 97 - 98 if cmax is None: - 99 cmax = zslice.max() -100 else: -101 cmax = float(cmax) -102 -103 if clev is None: -104 clev = 100. -105 else: -106 clev = float(clev) -107 -108 dc = (cmax - cmin)/clev ; vc = np.arange(cmin,cmax+dc,dc) -109 -110 if pal is None: -111 pal = cm.jet -112 else: -113 pal = pal -114 -115 if fts is None: -116 fts = 12 -117 else: -118 fts = fts -119 -120 #pal.set_over('w', 1.0) -121 #pal.set_under('w', 1.0) -122 #pal.set_bad('w', 1.0) -123 -124 pal_norm = colors.BoundaryNorm(vc,ncolors=256, clip = False) -125 -126 if range is None: -127 lon_min = lon.min() -128 lon_max = lon.max() -129 lon_0 = (lon_min + lon_max) / 2. -130 lat_min = lat.min() -131 lat_max = lat.max() -132 lat_0 = (lat_min + lat_max) / 2. -133 else: -134 lon_min = range[0] -135 lon_max = range[1] -136 lon_0 = (lon_min + lon_max) / 2. -137 lat_min = range[2] -138 lat_max = range[3] -139 lat_0 = (lat_min + lat_max) / 2. -140 -141 # clear figure -142 #plt.clf() -143 -144 if proj is not None: -145 map = Basemap(projection=proj, llcrnrlon=lon_min, llcrnrlat=lat_min, \ -146 urcrnrlon=lon_max, urcrnrlat=lat_max, lat_0=lat_0, lon_0=lon_0, \ -147 resolution='h', area_thresh=5.) -148 x, y = map(lon,lat) -149 -150 if fill_land is True and proj is not None: -151 # fill land and draw coastlines -152 map.drawcoastlines() -153 map.fillcontinents(color='grey') -154 else: -155 if proj is not None: -156 Basemap.pcolor(map, x, y, mask, vmin=-2, cmap=cm.gray) -157 pyroms_toolbox.plot_coast_line(grd, map) -158 else: -159 plt.pcolor(lon, lat, mask, vmin=-2, cmap=cm.gray) -160 pyroms_toolbox.plot_coast_line(grd) -161 -162 if fill is True: -163 if proj is not None: -164 cf = Basemap.contourf(map, x, y, zslice, vc, cmap = pal, \ -165 norm = pal_norm) -166 else: -167 cf = plt.contourf(lon, lat, zslice, vc, cmap = pal, \ -168 norm = pal_norm) -169 else: -170 if proj is not None: -171 cf = Basemap.pcolor(map, x, y, zslice, cmap = pal, norm = pal_norm) -172 else: -173 cf = plt.pcolor(lon, lat, zslice, cmap = pal, norm = pal_norm) -174 -175 if clb is True: -176 clb = plt.colorbar(cf, fraction=0.075,format=clb_format) -177 for t in clb.ax.get_yticklabels(): -178 t.set_fontsize(fts) -179 -180 if contour is True: -181 if fill is not True: -182 raise Warning, 'Please run again with fill=True to overlay contour.' -183 else: -184 if proj is not None: -185 Basemap.contour(map, x, y, zslice, vc[::d], colors='k', linewidths=0.5, linestyles='solid') -186 else: -187 plt.contour(lon, lat, zslice, vc[::d], colors='k', linewidths=0.5, linestyles='solid') -188 -189 if proj is None and range is not None: -190 plt.axis(range) -191 -192 -193 if title is not None: -194 plt.title(title, fontsize=fts+4) -195 -196 if proj is not None: -197 map.drawmeridians(np.arange(lon_min,lon_max, (lon_max-lon_min)/5.001), \ -198 labels=[0,0,0,1], fmt='%.1f') -199 map.drawparallels(np.arange(lat_min,lat_max, (lat_max-lat_min)/5.001), \ -200 labels=[1,0,0,0], fmt='%.1f') -201 -202 if outfile is not None: -203 if outfile.find('.png') != -1 or outfile.find('.svg') != -1 or \ -204 outfile.find('.eps') != -1: -205 print 'Write figure to file', outfile -206 plt.savefig(outfile, dpi=200, facecolor='w', edgecolor='w', \ -207 orientation='portrait') -208 else: -209 print 'Unrecognized file extension. Please use .png, .svg or .eps file extension.' -210 -211 -212 if proj is None: -213 return -214 else: -215 return map -
216 -
-
- - - - - - - - - - - - - - - - - - - - - - - -
- - - - diff --git a/pyroms_toolbox/docs/redirect.html b/pyroms_toolbox/docs/redirect.html deleted file mode 100644 index 210dcd2..0000000 --- a/pyroms_toolbox/docs/redirect.html +++ /dev/null @@ -1,38 +0,0 @@ -Epydoc Redirect Page - - - - - - - - -

Epydoc Auto-redirect page

- -

When javascript is enabled, this page will redirect URLs of -the form redirect.html#dotted.name to the -documentation for the object with the given fully-qualified -dotted name.

-

 

- - - - - diff --git a/pyroms_toolbox/docs/toc-everything.html b/pyroms_toolbox/docs/toc-everything.html deleted file mode 100644 index ae6421d..0000000 --- a/pyroms_toolbox/docs/toc-everything.html +++ /dev/null @@ -1,251 +0,0 @@ - - - - - Everything - - - - - -

Everything

-
-

All Classes

- pyroms_toolbox.BGrid_GFDL.BGrid_GFDL'.BGrid_GFDL
pyroms_toolbox.BGrid_GFDL.BGrid_GFDL_full.BGrid_GFDL
pyroms_toolbox.average'.avg_obj
pyroms_toolbox.seawater.OutOfRangeError

All Functions

- pyroms_toolbox.BGrid_GFDL.BGrid_GFDL_full.get_nc_BGrid_GFDL
pyroms_toolbox.BGrid_GFDL.BGrid_GFDL_full.make_remap_BGrid_GFDL_file
pyroms_toolbox.BGrid_GFDL.flood'.flood
pyroms_toolbox.BGrid_GFDL.get_Bgrid_proj'.get_Bgrid_proj
pyroms_toolbox.BGrid_GFDL.get_coast_line'.get_coast_line
pyroms_toolbox.BGrid_GFDL.get_nc_BGrid_GFDL'.get_nc_BGrid_GFDL
pyroms_toolbox.BGrid_GFDL.make_remap_grid_file'.make_remap_grid_file
pyroms_toolbox.BGrid_GFDL.plot_coast_line'.plot_coast_line
pyroms_toolbox.N2'.N2
pyroms_toolbox.O2_saturation'.O2_saturation
pyroms_toolbox.average'.average
pyroms_toolbox.change'.change
pyroms_toolbox.get_coast_line'.get_coast_line
pyroms_toolbox.isoview'.isoview
pyroms_toolbox.iview'.iview
pyroms_toolbox.jview'.jview
pyroms_toolbox.latview'.latview
pyroms_toolbox.lonview'.lonview
pyroms_toolbox.lsq_phase_amplitude'.lsq_phase_amplitude
pyroms_toolbox.nc_create_roms_bdry_file'.nc_create_roms_bdry_file
pyroms_toolbox.nc_create_roms_file'.nc_create_roms_file
pyroms_toolbox.plot_coast_line'.plot_coast_line
pyroms_toolbox.plot_mask'.plot_mask
pyroms_toolbox.quiver'.quiver
pyroms_toolbox.remapping'.remapping
pyroms_toolbox.rfactor'.rfactor
pyroms_toolbox.rvalue'.rvalue
- - pyroms_toolbox.seawater.density.alpha
pyroms_toolbox.seawater.density.beta
pyroms_toolbox.seawater.density.dens
pyroms_toolbox.seawater.density.drhods
pyroms_toolbox.seawater.density.drhodt
pyroms_toolbox.seawater.density.sigma
pyroms_toolbox.seawater.density.svan
pyroms_toolbox.seawater.heat.adtgrad
pyroms_toolbox.seawater.heat.heatcap
pyroms_toolbox.seawater.heat.temppot
pyroms_toolbox.seawater.heat.temppot0
pyroms_toolbox.seawater.misc.depth
pyroms_toolbox.seawater.misc.freezept
pyroms_toolbox.seawater.misc.soundvel
- - - - - - pyroms_toolbox.seawater.salinity.cond
pyroms_toolbox.seawater.salinity.salt
pyroms_toolbox.shapiro_filter.shapiro1
pyroms_toolbox.shapiro_filter.shapiro2
pyroms_toolbox.sview'.sview
pyroms_toolbox.transectview'.transectview
pyroms_toolbox.twoDview'.twoDview
pyroms_toolbox.zview'.zview

All Variables

- - - pyroms_toolbox.average'.avg
pyroms_toolbox.quiver'.ALLOW_THREADS
pyroms_toolbox.quiver'.BUFSIZE
pyroms_toolbox.quiver'.CLIP
pyroms_toolbox.quiver'.ERR_CALL
pyroms_toolbox.quiver'.ERR_DEFAULT
pyroms_toolbox.quiver'.ERR_DEFAULT2
pyroms_toolbox.quiver'.ERR_IGNORE
pyroms_toolbox.quiver'.ERR_LOG
pyroms_toolbox.quiver'.ERR_PRINT
pyroms_toolbox.quiver'.ERR_RAISE
pyroms_toolbox.quiver'.ERR_WARN
pyroms_toolbox.quiver'.FLOATING_POINT_SUPPORT
pyroms_toolbox.quiver'.FPE_DIVIDEBYZERO
pyroms_toolbox.quiver'.FPE_INVALID
pyroms_toolbox.quiver'.FPE_OVERFLOW
pyroms_toolbox.quiver'.FPE_UNDERFLOW
pyroms_toolbox.quiver'.False_
pyroms_toolbox.quiver'.Inf
pyroms_toolbox.quiver'.Infinity
pyroms_toolbox.quiver'.MAXDIMS
pyroms_toolbox.quiver'.NAN
pyroms_toolbox.quiver'.NINF
pyroms_toolbox.quiver'.NZERO
pyroms_toolbox.quiver'.NaN
pyroms_toolbox.quiver'.PINF
pyroms_toolbox.quiver'.PZERO
pyroms_toolbox.quiver'.RAISE
pyroms_toolbox.quiver'.SHIFT_DIVIDEBYZERO
pyroms_toolbox.quiver'.SHIFT_INVALID
pyroms_toolbox.quiver'.SHIFT_OVERFLOW
pyroms_toolbox.quiver'.SHIFT_UNDERFLOW
pyroms_toolbox.quiver'.ScalarType
pyroms_toolbox.quiver'.True_
pyroms_toolbox.quiver'.UFUNC_BUFSIZE_DEFAULT
pyroms_toolbox.quiver'.UFUNC_PYVALS_NAME
pyroms_toolbox.quiver'.WRAP
pyroms_toolbox.quiver'.absolute
pyroms_toolbox.quiver'.add
pyroms_toolbox.quiver'.arccos
pyroms_toolbox.quiver'.arccosh
pyroms_toolbox.quiver'.arcsin
pyroms_toolbox.quiver'.arcsinh
pyroms_toolbox.quiver'.arctan
pyroms_toolbox.quiver'.arctan2
pyroms_toolbox.quiver'.arctanh
pyroms_toolbox.quiver'.bitwise_and
pyroms_toolbox.quiver'.bitwise_not
pyroms_toolbox.quiver'.bitwise_or
pyroms_toolbox.quiver'.bitwise_xor
pyroms_toolbox.quiver'.c_
pyroms_toolbox.quiver'.cast
pyroms_toolbox.quiver'.ceil
pyroms_toolbox.quiver'.conj
pyroms_toolbox.quiver'.conjugate
pyroms_toolbox.quiver'.cos
pyroms_toolbox.quiver'.cosh
pyroms_toolbox.quiver'.deg2rad
pyroms_toolbox.quiver'.degrees
pyroms_toolbox.quiver'.divide
pyroms_toolbox.quiver'.e
pyroms_toolbox.quiver'.equal
pyroms_toolbox.quiver'.exp
pyroms_toolbox.quiver'.exp2
pyroms_toolbox.quiver'.expm1
pyroms_toolbox.quiver'.fabs
pyroms_toolbox.quiver'.floor
pyroms_toolbox.quiver'.floor_divide
pyroms_toolbox.quiver'.fmax
pyroms_toolbox.quiver'.fmin
pyroms_toolbox.quiver'.fmod
pyroms_toolbox.quiver'.frexp
pyroms_toolbox.quiver'.greater
pyroms_toolbox.quiver'.greater_equal
pyroms_toolbox.quiver'.hypot
pyroms_toolbox.quiver'.index_exp
pyroms_toolbox.quiver'.inf
pyroms_toolbox.quiver'.infty
pyroms_toolbox.quiver'.invert
pyroms_toolbox.quiver'.isfinite
pyroms_toolbox.quiver'.isinf
pyroms_toolbox.quiver'.isnan
pyroms_toolbox.quiver'.ldexp
pyroms_toolbox.quiver'.left_shift
pyroms_toolbox.quiver'.less
pyroms_toolbox.quiver'.less_equal
pyroms_toolbox.quiver'.little_endian
pyroms_toolbox.quiver'.log
pyroms_toolbox.quiver'.log10
pyroms_toolbox.quiver'.log1p
pyroms_toolbox.quiver'.logaddexp
pyroms_toolbox.quiver'.logaddexp2
pyroms_toolbox.quiver'.logical_and
pyroms_toolbox.quiver'.logical_not
pyroms_toolbox.quiver'.logical_or
pyroms_toolbox.quiver'.logical_xor
pyroms_toolbox.quiver'.maximum
pyroms_toolbox.quiver'.mgrid
pyroms_toolbox.quiver'.minimum
pyroms_toolbox.quiver'.mod
pyroms_toolbox.quiver'.modf
pyroms_toolbox.quiver'.multiply
pyroms_toolbox.quiver'.nan
pyroms_toolbox.quiver'.nbytes
pyroms_toolbox.quiver'.negative
pyroms_toolbox.quiver'.newaxis
pyroms_toolbox.quiver'.not_equal
pyroms_toolbox.quiver'.ogrid
pyroms_toolbox.quiver'.ones_like
pyroms_toolbox.quiver'.pi
pyroms_toolbox.quiver'.power
pyroms_toolbox.quiver'.r_
pyroms_toolbox.quiver'.rad2deg
pyroms_toolbox.quiver'.radians
pyroms_toolbox.quiver'.reciprocal
pyroms_toolbox.quiver'.remainder
pyroms_toolbox.quiver'.right_shift
pyroms_toolbox.quiver'.rint
pyroms_toolbox.quiver'.s_
pyroms_toolbox.quiver'.sctypeDict
pyroms_toolbox.quiver'.sctypeNA
pyroms_toolbox.quiver'.sctypes
pyroms_toolbox.quiver'.sign
pyroms_toolbox.quiver'.signbit
pyroms_toolbox.quiver'.sin
pyroms_toolbox.quiver'.sinh
pyroms_toolbox.quiver'.sqrt
pyroms_toolbox.quiver'.square
pyroms_toolbox.quiver'.subtract
pyroms_toolbox.quiver'.tan
pyroms_toolbox.quiver'.tanh
pyroms_toolbox.quiver'.true_divide
pyroms_toolbox.quiver'.trunc
pyroms_toolbox.quiver'.typeDict
pyroms_toolbox.quiver'.typeNA
pyroms_toolbox.quiver'.typecodes

-[hide private] - - - - diff --git a/pyroms_toolbox/docs/toc-pyroms_toolbox-module.html b/pyroms_toolbox/docs/toc-pyroms_toolbox-module.html deleted file mode 100644 index 977f80f..0000000 --- a/pyroms_toolbox/docs/toc-pyroms_toolbox-module.html +++ /dev/null @@ -1,29 +0,0 @@ - - - - - pyroms_toolbox - - - - - -

Module pyroms_toolbox

-
-
-[hide private] - - - - diff --git a/pyroms_toolbox/docs/toc-pyroms_toolbox.BGrid_GFDL-module.html b/pyroms_toolbox/docs/toc-pyroms_toolbox.BGrid_GFDL-module.html deleted file mode 100644 index 7571d9e..0000000 --- a/pyroms_toolbox/docs/toc-pyroms_toolbox.BGrid_GFDL-module.html +++ /dev/null @@ -1,29 +0,0 @@ - - - - - BGrid_GFDL - - - - - -

Module BGrid_GFDL

-
-
-[hide private] - - - - diff --git a/pyroms_toolbox/docs/toc-pyroms_toolbox.BGrid_GFDL.BGrid_GFDL'-module.html b/pyroms_toolbox/docs/toc-pyroms_toolbox.BGrid_GFDL.BGrid_GFDL'-module.html deleted file mode 100644 index 9081b4a..0000000 --- a/pyroms_toolbox/docs/toc-pyroms_toolbox.BGrid_GFDL.BGrid_GFDL'-module.html +++ /dev/null @@ -1,31 +0,0 @@ - - - - - BGrid_GFDL' - - - - - -

Module BGrid_GFDL'

-
-

Classes

- BGrid_GFDL

-[hide private] - - - - diff --git a/pyroms_toolbox/docs/toc-pyroms_toolbox.BGrid_GFDL.BGrid_GFDL_full-module.html b/pyroms_toolbox/docs/toc-pyroms_toolbox.BGrid_GFDL.BGrid_GFDL_full-module.html deleted file mode 100644 index 0c73ae4..0000000 --- a/pyroms_toolbox/docs/toc-pyroms_toolbox.BGrid_GFDL.BGrid_GFDL_full-module.html +++ /dev/null @@ -1,34 +0,0 @@ - - - - - BGrid_GFDL_full - - - - - -

Module BGrid_GFDL_full

-
-

Classes

- BGrid_GFDL

Functions

- get_nc_BGrid_GFDL
make_remap_BGrid_GFDL_file

-[hide private] - - - - diff --git a/pyroms_toolbox/docs/toc-pyroms_toolbox.BGrid_GFDL.flood'-module.html b/pyroms_toolbox/docs/toc-pyroms_toolbox.BGrid_GFDL.flood'-module.html deleted file mode 100644 index bb84d39..0000000 --- a/pyroms_toolbox/docs/toc-pyroms_toolbox.BGrid_GFDL.flood'-module.html +++ /dev/null @@ -1,31 +0,0 @@ - - - - - flood' - - - - - -

Module flood'

-
-

Functions

- flood

-[hide private] - - - - diff --git a/pyroms_toolbox/docs/toc-pyroms_toolbox.BGrid_GFDL.get_Bgrid_proj'-module.html b/pyroms_toolbox/docs/toc-pyroms_toolbox.BGrid_GFDL.get_Bgrid_proj'-module.html deleted file mode 100644 index 1709b7e..0000000 --- a/pyroms_toolbox/docs/toc-pyroms_toolbox.BGrid_GFDL.get_Bgrid_proj'-module.html +++ /dev/null @@ -1,31 +0,0 @@ - - - - - get_Bgrid_proj' - - - - - -

Module get_Bgrid_proj'

-
-

Functions

- get_Bgrid_proj

-[hide private] - - - - diff --git a/pyroms_toolbox/docs/toc-pyroms_toolbox.BGrid_GFDL.get_coast_line'-module.html b/pyroms_toolbox/docs/toc-pyroms_toolbox.BGrid_GFDL.get_coast_line'-module.html deleted file mode 100644 index fa5d843..0000000 --- a/pyroms_toolbox/docs/toc-pyroms_toolbox.BGrid_GFDL.get_coast_line'-module.html +++ /dev/null @@ -1,31 +0,0 @@ - - - - - get_coast_line' - - - - - -

Module get_coast_line'

-
-

Functions

- get_coast_line

-[hide private] - - - - diff --git a/pyroms_toolbox/docs/toc-pyroms_toolbox.BGrid_GFDL.get_nc_BGrid_GFDL'-module.html b/pyroms_toolbox/docs/toc-pyroms_toolbox.BGrid_GFDL.get_nc_BGrid_GFDL'-module.html deleted file mode 100644 index ce5b3cf..0000000 --- a/pyroms_toolbox/docs/toc-pyroms_toolbox.BGrid_GFDL.get_nc_BGrid_GFDL'-module.html +++ /dev/null @@ -1,31 +0,0 @@ - - - - - get_nc_BGrid_GFDL' - - - - - -

Module get_nc_BGrid_GFDL'

-
-

Functions

- get_nc_BGrid_GFDL

-[hide private] - - - - diff --git a/pyroms_toolbox/docs/toc-pyroms_toolbox.BGrid_GFDL.make_remap_grid_file'-module.html b/pyroms_toolbox/docs/toc-pyroms_toolbox.BGrid_GFDL.make_remap_grid_file'-module.html deleted file mode 100644 index fdb4f5c..0000000 --- a/pyroms_toolbox/docs/toc-pyroms_toolbox.BGrid_GFDL.make_remap_grid_file'-module.html +++ /dev/null @@ -1,31 +0,0 @@ - - - - - make_remap_grid_file' - - - - - -

Module make_remap_grid_file'

-
-

Functions

- make_remap_grid_file

-[hide private] - - - - diff --git a/pyroms_toolbox/docs/toc-pyroms_toolbox.BGrid_GFDL.plot_coast_line'-module.html b/pyroms_toolbox/docs/toc-pyroms_toolbox.BGrid_GFDL.plot_coast_line'-module.html deleted file mode 100644 index a6b5067..0000000 --- a/pyroms_toolbox/docs/toc-pyroms_toolbox.BGrid_GFDL.plot_coast_line'-module.html +++ /dev/null @@ -1,31 +0,0 @@ - - - - - plot_coast_line' - - - - - -

Module plot_coast_line'

-
-

Functions

- plot_coast_line

-[hide private] - - - - diff --git a/pyroms_toolbox/docs/toc-pyroms_toolbox.N2'-module.html b/pyroms_toolbox/docs/toc-pyroms_toolbox.N2'-module.html deleted file mode 100644 index 4363a42..0000000 --- a/pyroms_toolbox/docs/toc-pyroms_toolbox.N2'-module.html +++ /dev/null @@ -1,31 +0,0 @@ - - - - - N2' - - - - - -

Module N2'

-
-

Functions

- N2

-[hide private] - - - - diff --git a/pyroms_toolbox/docs/toc-pyroms_toolbox.O2_saturation'-module.html b/pyroms_toolbox/docs/toc-pyroms_toolbox.O2_saturation'-module.html deleted file mode 100644 index 267d1e1..0000000 --- a/pyroms_toolbox/docs/toc-pyroms_toolbox.O2_saturation'-module.html +++ /dev/null @@ -1,31 +0,0 @@ - - - - - O2_saturation' - - - - - -

Module O2_saturation'

-
-

Functions

- O2_saturation

-[hide private] - - - - diff --git a/pyroms_toolbox/docs/toc-pyroms_toolbox._average-module.html b/pyroms_toolbox/docs/toc-pyroms_toolbox._average-module.html deleted file mode 100644 index b0f96ea..0000000 --- a/pyroms_toolbox/docs/toc-pyroms_toolbox._average-module.html +++ /dev/null @@ -1,36 +0,0 @@ - - - - - _average - - - - - -

Module _average

-
-

Variables

-
- avg2d
-
- avg3d
-
-[hide private] - - - - diff --git a/pyroms_toolbox/docs/toc-pyroms_toolbox.average'-module.html b/pyroms_toolbox/docs/toc-pyroms_toolbox.average'-module.html deleted file mode 100644 index 3f1e4f1..0000000 --- a/pyroms_toolbox/docs/toc-pyroms_toolbox.average'-module.html +++ /dev/null @@ -1,35 +0,0 @@ - - - - - average' - - - - - -

Module average'

-
-

Classes

- avg_obj

Functions

- average

Variables

- avg

-[hide private] - - - - diff --git a/pyroms_toolbox/docs/toc-pyroms_toolbox.change'-module.html b/pyroms_toolbox/docs/toc-pyroms_toolbox.change'-module.html deleted file mode 100644 index 3580647..0000000 --- a/pyroms_toolbox/docs/toc-pyroms_toolbox.change'-module.html +++ /dev/null @@ -1,31 +0,0 @@ - - - - - change' - - - - - -

Module change'

-
-

Functions

- change

-[hide private] - - - - diff --git a/pyroms_toolbox/docs/toc-pyroms_toolbox.get_coast_line'-module.html b/pyroms_toolbox/docs/toc-pyroms_toolbox.get_coast_line'-module.html deleted file mode 100644 index 87ebd0d..0000000 --- a/pyroms_toolbox/docs/toc-pyroms_toolbox.get_coast_line'-module.html +++ /dev/null @@ -1,31 +0,0 @@ - - - - - get_coast_line' - - - - - -

Module get_coast_line'

-
-

Functions

- get_coast_line

-[hide private] - - - - diff --git a/pyroms_toolbox/docs/toc-pyroms_toolbox.isoview'-module.html b/pyroms_toolbox/docs/toc-pyroms_toolbox.isoview'-module.html deleted file mode 100644 index 60ec608..0000000 --- a/pyroms_toolbox/docs/toc-pyroms_toolbox.isoview'-module.html +++ /dev/null @@ -1,31 +0,0 @@ - - - - - isoview' - - - - - -

Module isoview'

-
-

Functions

- isoview

-[hide private] - - - - diff --git a/pyroms_toolbox/docs/toc-pyroms_toolbox.iview'-module.html b/pyroms_toolbox/docs/toc-pyroms_toolbox.iview'-module.html deleted file mode 100644 index 247ea3b..0000000 --- a/pyroms_toolbox/docs/toc-pyroms_toolbox.iview'-module.html +++ /dev/null @@ -1,31 +0,0 @@ - - - - - iview' - - - - - -

Module iview'

-
-

Functions

- iview

-[hide private] - - - - diff --git a/pyroms_toolbox/docs/toc-pyroms_toolbox.jview'-module.html b/pyroms_toolbox/docs/toc-pyroms_toolbox.jview'-module.html deleted file mode 100644 index 58f5e4a..0000000 --- a/pyroms_toolbox/docs/toc-pyroms_toolbox.jview'-module.html +++ /dev/null @@ -1,31 +0,0 @@ - - - - - jview' - - - - - -

Module jview'

-
-

Functions

- jview

-[hide private] - - - - diff --git a/pyroms_toolbox/docs/toc-pyroms_toolbox.latview'-module.html b/pyroms_toolbox/docs/toc-pyroms_toolbox.latview'-module.html deleted file mode 100644 index 706debb..0000000 --- a/pyroms_toolbox/docs/toc-pyroms_toolbox.latview'-module.html +++ /dev/null @@ -1,31 +0,0 @@ - - - - - latview' - - - - - -

Module latview'

-
-

Functions

- latview

-[hide private] - - - - diff --git a/pyroms_toolbox/docs/toc-pyroms_toolbox.lonview'-module.html b/pyroms_toolbox/docs/toc-pyroms_toolbox.lonview'-module.html deleted file mode 100644 index 9398ddd..0000000 --- a/pyroms_toolbox/docs/toc-pyroms_toolbox.lonview'-module.html +++ /dev/null @@ -1,31 +0,0 @@ - - - - - lonview' - - - - - -

Module lonview'

-
-

Functions

- lonview

-[hide private] - - - - diff --git a/pyroms_toolbox/docs/toc-pyroms_toolbox.lsq_phase_amplitude'-module.html b/pyroms_toolbox/docs/toc-pyroms_toolbox.lsq_phase_amplitude'-module.html deleted file mode 100644 index 541ff8d..0000000 --- a/pyroms_toolbox/docs/toc-pyroms_toolbox.lsq_phase_amplitude'-module.html +++ /dev/null @@ -1,31 +0,0 @@ - - - - - lsq_phase_amplitude' - - - - - -

Module lsq_phase_amplitude'

-
-

Functions

- lsq_phase_amplitude

-[hide private] - - - - diff --git a/pyroms_toolbox/docs/toc-pyroms_toolbox.nc_create_roms_bdry_file'-module.html b/pyroms_toolbox/docs/toc-pyroms_toolbox.nc_create_roms_bdry_file'-module.html deleted file mode 100644 index b5e5862..0000000 --- a/pyroms_toolbox/docs/toc-pyroms_toolbox.nc_create_roms_bdry_file'-module.html +++ /dev/null @@ -1,31 +0,0 @@ - - - - - nc_create_roms_bdry_file' - - - - - -

Module nc_create_roms_bdry_file'

-
-

Functions

- nc_create_roms_bdry_file

-[hide private] - - - - diff --git a/pyroms_toolbox/docs/toc-pyroms_toolbox.nc_create_roms_file'-module.html b/pyroms_toolbox/docs/toc-pyroms_toolbox.nc_create_roms_file'-module.html deleted file mode 100644 index 6c9c3b8..0000000 --- a/pyroms_toolbox/docs/toc-pyroms_toolbox.nc_create_roms_file'-module.html +++ /dev/null @@ -1,31 +0,0 @@ - - - - - nc_create_roms_file' - - - - - -

Module nc_create_roms_file'

-
-

Functions

- nc_create_roms_file

-[hide private] - - - - diff --git a/pyroms_toolbox/docs/toc-pyroms_toolbox.plot_coast_line'-module.html b/pyroms_toolbox/docs/toc-pyroms_toolbox.plot_coast_line'-module.html deleted file mode 100644 index 70a048b..0000000 --- a/pyroms_toolbox/docs/toc-pyroms_toolbox.plot_coast_line'-module.html +++ /dev/null @@ -1,31 +0,0 @@ - - - - - plot_coast_line' - - - - - -

Module plot_coast_line'

-
-

Functions

- plot_coast_line

-[hide private] - - - - diff --git a/pyroms_toolbox/docs/toc-pyroms_toolbox.plot_mask'-module.html b/pyroms_toolbox/docs/toc-pyroms_toolbox.plot_mask'-module.html deleted file mode 100644 index 7764b08..0000000 --- a/pyroms_toolbox/docs/toc-pyroms_toolbox.plot_mask'-module.html +++ /dev/null @@ -1,31 +0,0 @@ - - - - - plot_mask' - - - - - -

Module plot_mask'

-
-

Functions

- plot_mask

-[hide private] - - - - diff --git a/pyroms_toolbox/docs/toc-pyroms_toolbox.quiver'-module.html b/pyroms_toolbox/docs/toc-pyroms_toolbox.quiver'-module.html deleted file mode 100644 index 4e2beb9..0000000 --- a/pyroms_toolbox/docs/toc-pyroms_toolbox.quiver'-module.html +++ /dev/null @@ -1,167 +0,0 @@ - - - - - quiver' - - - - - -

Module quiver'

-
-

Functions

- quiver

Variables

- ALLOW_THREADS
BUFSIZE
CLIP
ERR_CALL
ERR_DEFAULT
ERR_DEFAULT2
ERR_IGNORE
ERR_LOG
ERR_PRINT
ERR_RAISE
ERR_WARN
FLOATING_POINT_SUPPORT
FPE_DIVIDEBYZERO
FPE_INVALID
FPE_OVERFLOW
FPE_UNDERFLOW
False_
Inf
Infinity
MAXDIMS
NAN
NINF
NZERO
NaN
PINF
PZERO
RAISE
SHIFT_DIVIDEBYZERO
SHIFT_INVALID
SHIFT_OVERFLOW
SHIFT_UNDERFLOW
ScalarType
True_
UFUNC_BUFSIZE_DEFAULT
UFUNC_PYVALS_NAME
WRAP
absolute
add
arccos
arccosh
arcsin
arcsinh
arctan
arctan2
arctanh
bitwise_and
bitwise_not
bitwise_or
bitwise_xor
c_
cast
ceil
conj
conjugate
cos
cosh
deg2rad
degrees
divide
e
equal
exp
exp2
expm1
fabs
floor
floor_divide
fmax
fmin
fmod
frexp
greater
greater_equal
hypot
index_exp
inf
infty
invert
isfinite
isinf
isnan
ldexp
left_shift
less
less_equal
little_endian
log
log10
log1p
logaddexp
logaddexp2
logical_and
logical_not
logical_or
logical_xor
maximum
mgrid
minimum
mod
modf
multiply
nan
nbytes
negative
newaxis
not_equal
ogrid
ones_like
pi
power
r_
rad2deg
radians
reciprocal
remainder
right_shift
rint
s_
sctypeDict
sctypeNA
sctypes
sign
signbit
sin
sinh
sqrt
square
subtract
tan
tanh
true_divide
trunc
typeDict
typeNA
typecodes

-[hide private] - - - - diff --git a/pyroms_toolbox/docs/toc-pyroms_toolbox.remapping'-module.html b/pyroms_toolbox/docs/toc-pyroms_toolbox.remapping'-module.html deleted file mode 100644 index 3f11c1e..0000000 --- a/pyroms_toolbox/docs/toc-pyroms_toolbox.remapping'-module.html +++ /dev/null @@ -1,31 +0,0 @@ - - - - - remapping' - - - - - -

Module remapping'

-
-

Functions

- remapping

-[hide private] - - - - diff --git a/pyroms_toolbox/docs/toc-pyroms_toolbox.rfactor'-module.html b/pyroms_toolbox/docs/toc-pyroms_toolbox.rfactor'-module.html deleted file mode 100644 index fa72dfd..0000000 --- a/pyroms_toolbox/docs/toc-pyroms_toolbox.rfactor'-module.html +++ /dev/null @@ -1,31 +0,0 @@ - - - - - rfactor' - - - - - -

Module rfactor'

-
-

Functions

- rfactor

-[hide private] - - - - diff --git a/pyroms_toolbox/docs/toc-pyroms_toolbox.rvalue'-module.html b/pyroms_toolbox/docs/toc-pyroms_toolbox.rvalue'-module.html deleted file mode 100644 index fbbf158..0000000 --- a/pyroms_toolbox/docs/toc-pyroms_toolbox.rvalue'-module.html +++ /dev/null @@ -1,31 +0,0 @@ - - - - - rvalue' - - - - - -

Module rvalue'

-
-

Functions

- rvalue

-[hide private] - - - - diff --git a/pyroms_toolbox/docs/toc-pyroms_toolbox.seawater-module.html b/pyroms_toolbox/docs/toc-pyroms_toolbox.seawater-module.html deleted file mode 100644 index 5d1c171..0000000 --- a/pyroms_toolbox/docs/toc-pyroms_toolbox.seawater-module.html +++ /dev/null @@ -1,31 +0,0 @@ - - - - - seawater - - - - - -

Module seawater

-
-

Classes

- OutOfRangeError

-[hide private] - - - - diff --git a/pyroms_toolbox/docs/toc-pyroms_toolbox.seawater.density-module.html b/pyroms_toolbox/docs/toc-pyroms_toolbox.seawater.density-module.html deleted file mode 100644 index 0631e38..0000000 --- a/pyroms_toolbox/docs/toc-pyroms_toolbox.seawater.density-module.html +++ /dev/null @@ -1,43 +0,0 @@ - - - - - density - - - - - -

Module density

-
-

Functions

-
- _dens0
-
- _seck
- alpha
beta
dens
drhods
drhodt
sigma
svan

-[hide private] - - - - diff --git a/pyroms_toolbox/docs/toc-pyroms_toolbox.seawater.heat-module.html b/pyroms_toolbox/docs/toc-pyroms_toolbox.seawater.heat-module.html deleted file mode 100644 index 4d83c93..0000000 --- a/pyroms_toolbox/docs/toc-pyroms_toolbox.seawater.heat-module.html +++ /dev/null @@ -1,34 +0,0 @@ - - - - - heat - - - - - -

Module heat

-
-

Functions

- adtgrad
heatcap
temppot
temppot0

-[hide private] - - - - diff --git a/pyroms_toolbox/docs/toc-pyroms_toolbox.seawater.misc-module.html b/pyroms_toolbox/docs/toc-pyroms_toolbox.seawater.misc-module.html deleted file mode 100644 index c0e8677..0000000 --- a/pyroms_toolbox/docs/toc-pyroms_toolbox.seawater.misc-module.html +++ /dev/null @@ -1,33 +0,0 @@ - - - - - misc - - - - - -

Module misc

-
-

Functions

- depth
freezept
soundvel

-[hide private] - - - - diff --git a/pyroms_toolbox/docs/toc-pyroms_toolbox.seawater.salinity-module.html b/pyroms_toolbox/docs/toc-pyroms_toolbox.seawater.salinity-module.html deleted file mode 100644 index 426d15d..0000000 --- a/pyroms_toolbox/docs/toc-pyroms_toolbox.seawater.salinity-module.html +++ /dev/null @@ -1,50 +0,0 @@ - - - - - salinity - - - - - -

Module salinity

-
-

Functions

-
- _a
-
- _b
-
- _c
-
- _dsal
-
- _rt
-
- _sal
- cond
salt

-[hide private] - - - - diff --git a/pyroms_toolbox/docs/toc-pyroms_toolbox.seawater.test-module.html b/pyroms_toolbox/docs/toc-pyroms_toolbox.seawater.test-module.html deleted file mode 100644 index 850ba19..0000000 --- a/pyroms_toolbox/docs/toc-pyroms_toolbox.seawater.test-module.html +++ /dev/null @@ -1,29 +0,0 @@ - - - - - test - - - - - -

Module test

-
-
-[hide private] - - - - diff --git a/pyroms_toolbox/docs/toc-pyroms_toolbox.shapiro_filter-module.html b/pyroms_toolbox/docs/toc-pyroms_toolbox.shapiro_filter-module.html deleted file mode 100644 index 61c0a3f..0000000 --- a/pyroms_toolbox/docs/toc-pyroms_toolbox.shapiro_filter-module.html +++ /dev/null @@ -1,32 +0,0 @@ - - - - - shapiro_filter - - - - - -

Module shapiro_filter

-
-

Functions

- shapiro1
shapiro2

-[hide private] - - - - diff --git a/pyroms_toolbox/docs/toc-pyroms_toolbox.sview'-module.html b/pyroms_toolbox/docs/toc-pyroms_toolbox.sview'-module.html deleted file mode 100644 index 1c7d77a..0000000 --- a/pyroms_toolbox/docs/toc-pyroms_toolbox.sview'-module.html +++ /dev/null @@ -1,31 +0,0 @@ - - - - - sview' - - - - - -

Module sview'

-
-

Functions

- sview

-[hide private] - - - - diff --git a/pyroms_toolbox/docs/toc-pyroms_toolbox.transectview'-module.html b/pyroms_toolbox/docs/toc-pyroms_toolbox.transectview'-module.html deleted file mode 100644 index a80bfc5..0000000 --- a/pyroms_toolbox/docs/toc-pyroms_toolbox.transectview'-module.html +++ /dev/null @@ -1,31 +0,0 @@ - - - - - transectview' - - - - - -

Module transectview'

-
-

Functions

- transectview

-[hide private] - - - - diff --git a/pyroms_toolbox/docs/toc-pyroms_toolbox.twoDview'-module.html b/pyroms_toolbox/docs/toc-pyroms_toolbox.twoDview'-module.html deleted file mode 100644 index 9bd4c24..0000000 --- a/pyroms_toolbox/docs/toc-pyroms_toolbox.twoDview'-module.html +++ /dev/null @@ -1,31 +0,0 @@ - - - - - twoDview' - - - - - -

Module twoDview'

-
-

Functions

- twoDview

-[hide private] - - - - diff --git a/pyroms_toolbox/docs/toc-pyroms_toolbox.zview'-module.html b/pyroms_toolbox/docs/toc-pyroms_toolbox.zview'-module.html deleted file mode 100644 index 45a194a..0000000 --- a/pyroms_toolbox/docs/toc-pyroms_toolbox.zview'-module.html +++ /dev/null @@ -1,31 +0,0 @@ - - - - - zview' - - - - - -

Module zview'

-
-

Functions

- zview

-[hide private] - - - - diff --git a/pyroms_toolbox/docs/toc.html b/pyroms_toolbox/docs/toc.html deleted file mode 100644 index 2f2ad50..0000000 --- a/pyroms_toolbox/docs/toc.html +++ /dev/null @@ -1,75 +0,0 @@ - - - - - Table of Contents - - - - - -

Table of Contents

-
- Everything -
-

Modules

- pyroms_toolbox
pyroms_toolbox.BGrid_GFDL
pyroms_toolbox.BGrid_GFDL.BGrid_GFDL'
pyroms_toolbox.BGrid_GFDL.BGrid_GFDL_full
pyroms_toolbox.BGrid_GFDL.flood'
pyroms_toolbox.BGrid_GFDL.get_Bgrid_proj'
pyroms_toolbox.BGrid_GFDL.get_coast_line'
pyroms_toolbox.BGrid_GFDL.get_nc_BGrid_GFDL'
pyroms_toolbox.BGrid_GFDL.make_remap_grid_file'
pyroms_toolbox.BGrid_GFDL.plot_coast_line'
pyroms_toolbox.N2'
pyroms_toolbox.O2_saturation'
- pyroms_toolbox.average'
pyroms_toolbox.change'
pyroms_toolbox.get_coast_line'
pyroms_toolbox.isoview'
pyroms_toolbox.iview'
pyroms_toolbox.jview'
pyroms_toolbox.latview'
pyroms_toolbox.lonview'
pyroms_toolbox.lsq_phase_amplitude'
pyroms_toolbox.nc_create_roms_bdry_file'
pyroms_toolbox.nc_create_roms_file'
pyroms_toolbox.plot_coast_line'
pyroms_toolbox.plot_mask'
pyroms_toolbox.quiver'
pyroms_toolbox.remapping'
pyroms_toolbox.rfactor'
pyroms_toolbox.rvalue'
pyroms_toolbox.seawater
pyroms_toolbox.seawater.density
pyroms_toolbox.seawater.heat
pyroms_toolbox.seawater.misc
pyroms_toolbox.seawater.salinity
pyroms_toolbox.seawater.test
pyroms_toolbox.shapiro_filter
pyroms_toolbox.sview'
pyroms_toolbox.transectview'
pyroms_toolbox.twoDview'
pyroms_toolbox.zview'

- [hide private] - - - - diff --git a/pyroms_toolbox/pyroms_toolbox/BGrid_GFDL/BGrid_GFDL.py b/pyroms_toolbox/pyroms_toolbox/BGrid_GFDL/BGrid_GFDL.py index 2f9f798..2473d69 100644 --- a/pyroms_toolbox/pyroms_toolbox/BGrid_GFDL/BGrid_GFDL.py +++ b/pyroms_toolbox/pyroms_toolbox/BGrid_GFDL/BGrid_GFDL.py @@ -5,7 +5,6 @@ import netCDF4 as netCDF except: import netCDF3 as netCDF -import pyroms @@ -48,8 +47,20 @@ def __init__(self, lon_t, lat_t, lon_uv, lat_uv, \ def _calculate_grid_angle(self): geod = pyproj.Geod(ellps='WGS84') - az_forward, az_back, dx = geod.inv(self.lon_t_vert[:,:-1], self.lat_t_vert[:,:-1], \ - self.lon_t_vert[:,1:], self.lat_t_vert[:,1:]) + # This is how it used to be, but it fails for some reason. +# az_forward, az_back, dx = geod.inv(self.lon_t_vert[:,:-1], self.lat_t_vert[:,:-1], \ +# self.lon_t_vert[:,1:], self.lat_t_vert[:,1:]) +# angle = 0.5 * (az_forward[1:,:] + az_forward[:-1,:]) - angle = 0.5 * (az_forward[1:,:] + az_forward[:-1,:]) + # Seems to work... + sizey, sizex = self.lon_t_vert.shape + angle = np.zeros(self.h.shape) + + for i in range(sizex-1): + az_forward, az_back, dx = geod.inv(self.lon_t_vert[:,i], self.lat_t_vert[:,i], \ + self.lon_t_vert[:,i+1], self.lat_t_vert[:,i+1]) + + angle[:,i] = 0.5 * (az_forward[1:] + az_forward[:-1]) + + # part of original code self.angle = (90 - angle) * np.pi/180. diff --git a/pyroms_toolbox/pyroms_toolbox/BGrid_GFDL/BGrid_GFDL_full.py b/pyroms_toolbox/pyroms_toolbox/BGrid_GFDL/BGrid_GFDL_full.py index 11e669e..db5a90a 100644 --- a/pyroms_toolbox/pyroms_toolbox/BGrid_GFDL/BGrid_GFDL_full.py +++ b/pyroms_toolbox/pyroms_toolbox/BGrid_GFDL/BGrid_GFDL_full.py @@ -59,7 +59,7 @@ def _calculate_t_vert(self): lat[-1,0] = self.lat_t[-1,0] - ( self.lat_uv[-2,0] - self.lat_t[-1,0] ) self.lon_t_vert = lon - self.lat_t_vert = lat + self.lat_t_vert = lat def _calculate_uv_vert(self): @@ -93,7 +93,7 @@ def _calculate_grid_angle(self): angle = 0.5 * (az_forward[1:,:] + az_forward[:-1,:]) self.angle = (90 - angle) * np.pi/180. - + def get_nc_BGrid_GFDL(grdfile): @@ -175,7 +175,7 @@ def make_remap_BGrid_GFDL_file(Bgrd, Bpos='t'): grid_imask = Bgrd.mask_uv[0,:].flatten() Mp, Lp = Bgrd.lon_uv.shape else: - raise ValueError, 'Bpos must be t or uv' + raise ValueError('Bpos must be t or uv') grid_size = Lp * Mp diff --git a/pyroms_toolbox/pyroms_toolbox/BGrid_GFDL/__init__.py b/pyroms_toolbox/pyroms_toolbox/BGrid_GFDL/__init__.py index 77fb113..7151993 100644 --- a/pyroms_toolbox/pyroms_toolbox/BGrid_GFDL/__init__.py +++ b/pyroms_toolbox/pyroms_toolbox/BGrid_GFDL/__init__.py @@ -2,11 +2,11 @@ BGrid_GFDL module """ -from BGrid_GFDL import BGrid_GFDL -from get_nc_BGrid_GFDL import get_nc_BGrid_GFDL -from make_remap_grid_file import make_remap_grid_file -from get_coast_line import get_coast_line -from plot_coast_line import plot_coast_line -from get_Bgrid_proj import get_Bgrid_proj -from flood import flood +from .BGrid_GFDL import BGrid_GFDL +from .get_nc_BGrid_GFDL import get_nc_BGrid_GFDL +from .make_remap_grid_file import make_remap_grid_file +from .get_coast_line import get_coast_line +from .plot_coast_line import plot_coast_line +from .get_Bgrid_proj import get_Bgrid_proj +from .flood import flood diff --git a/pyroms_toolbox/pyroms_toolbox/BGrid_GFDL/flood.py b/pyroms_toolbox/pyroms_toolbox/BGrid_GFDL/flood.py index c7acb86..ae33da7 100644 --- a/pyroms_toolbox/pyroms_toolbox/BGrid_GFDL/flood.py +++ b/pyroms_toolbox/pyroms_toolbox/BGrid_GFDL/flood.py @@ -11,12 +11,12 @@ def flood(varz, Bgrdz, Bpos='t', irange=None, jrange=None, \ var = flood(var, Bgrdz) optional switch: - - Bpos='t', 'uv' specify the B-grid position where - the variable rely + - Bpos='t', 'uv' specify the B-grid position where + the variable rely - irange specify grid sub-sample for i direction - jrange specify grid sub-sample for j direction - spval=1e35 define spval value - - dmax=0 if dmax>0, maximum horizontal + - dmax=0 if dmax>0, maximum horizontal flooding distance - cdepth=0 critical depth for flooding if depth no flooding @@ -45,7 +45,7 @@ def flood(varz, Bgrdz, Bpos='t', irange=None, jrange=None, \ h = Bgrdz.h mask = Bgrdz.mask_uv[0,:,:] else: - raise Warning, '%s bad position. Bpos must be t or uv' % Bpos + raise Warning('%s bad position. Bpos must be t or uv' % Bpos) nlev, Mm, Lm = varz.shape @@ -103,6 +103,6 @@ def flood(varz, Bgrdz, Bpos='t', irange=None, jrange=None, \ for i in range(Lm): for j in range(Mm): if mask[j,i] == 1: - varz[bottom[j,i]:,j,i] = varz[bottom[j,i],j,i] + varz[int(bottom[j,i]):,j,i] = varz[int(bottom[j,i]),j,i] return varz diff --git a/pyroms_toolbox/pyroms_toolbox/BGrid_GFDL/get_coast_line.py b/pyroms_toolbox/pyroms_toolbox/BGrid_GFDL/get_coast_line.py index eb6ae60..1f7d2ac 100644 --- a/pyroms_toolbox/pyroms_toolbox/BGrid_GFDL/get_coast_line.py +++ b/pyroms_toolbox/pyroms_toolbox/BGrid_GFDL/get_coast_line.py @@ -22,28 +22,28 @@ def get_coast_line(Bgrd): if mask[jidx[i], iidx[i]] != mask[jidx[i]+1, iidx[i]]: lonc = ([lon[jidx[i]+1,iidx[i]], lon[jidx[i]+1,iidx[i]+1]]) latc = ([lat[jidx[i]+1,iidx[i]], lat[jidx[i]+1,iidx[i]+1]]) - seg = zip(lonc,latc) + seg = list(zip(lonc,latc)) coast.append(seg) if jidx[i] != 0: if mask[jidx[i], iidx[i]] != mask[jidx[i]-1, iidx[i]]: lonc = ([lon[jidx[i],iidx[i]], lon[jidx[i],iidx[i]+1]]) latc = ([lat[jidx[i],iidx[i]], lat[jidx[i],iidx[i]+1]]) - seg = zip(lonc,latc) + seg = list(zip(lonc,latc)) coast.append(seg) if iidx[i] != mask.shape[1]-1: if mask[jidx[i], iidx[i]] != mask[jidx[i], iidx[i]+1]: lonc = ([lon[jidx[i],iidx[i]+1], lon[jidx[i]+1,iidx[i]+1]]) latc = ([lat[jidx[i],iidx[i]+1], lat[jidx[i]+1,iidx[i]+1]]) - seg = zip(lonc,latc) + seg = list(zip(lonc,latc)) coast.append(seg) if iidx[i] != 0: if mask[jidx[i], iidx[i]] != mask[jidx[i], iidx[i]-1]: lonc = ([lon[jidx[i],iidx[i]], lon[jidx[i]+1,iidx[i]]]) latc = ([lat[jidx[i],iidx[i]], lat[jidx[i]+1,iidx[i]]]) - seg = zip(lonc,latc) + seg = list(zip(lonc,latc)) coast.append(seg) return coast diff --git a/pyroms_toolbox/pyroms_toolbox/BGrid_GFDL/get_nc_BGrid_GFDL.py b/pyroms_toolbox/pyroms_toolbox/BGrid_GFDL/get_nc_BGrid_GFDL.py index 715302a..c72a7cb 100644 --- a/pyroms_toolbox/pyroms_toolbox/BGrid_GFDL/get_nc_BGrid_GFDL.py +++ b/pyroms_toolbox/pyroms_toolbox/BGrid_GFDL/get_nc_BGrid_GFDL.py @@ -4,7 +4,7 @@ def get_nc_BGrid_GFDL(grdfile, name='GFDL_CM2.1_North_Pacific', area='regional', \ - xrange=(60,175), yrange=(120, 190), ystart=235)): + xrange=(60,175), yrange=(120, 190), ystart=235): """ Bgrd = get_nc_BGrid_GFDL(grdfile) @@ -37,7 +37,7 @@ def get_nc_BGrid_GFDL(grdfile, name='GFDL_CM2.1_North_Pacific', area='regional', for j in range(M_t): for i in range(L_t): try: - mask_t[0:kmt[j,i], j,i] = 1 + mask_t[0:int(kmt[j,i]), j,i] = 1 except: mask_t[:, j,i] = 0 @@ -48,22 +48,86 @@ def get_nc_BGrid_GFDL(grdfile, name='GFDL_CM2.1_North_Pacific', area='regional', for j in range(M_uv): for i in range(L_uv): try: - mask_uv[0:kmu[j,i], j,i] = 1 + mask_uv[0:int(kmu[j,i]), j,i] = 1 except: mask_uv[:, j,i] = 0 if area == 'npolar': - #add one row in the north and the south - lon_t = lon_t[np.r_[0,:len(lon_t),-1]] - lon_t[0] = lon_t[1] - (lon_t[2]-lon_t[1]) - lon_t[-1] = lon_t[-2] + (lon_t[-2]-lon_t[-3]) - lat_t = lat_t[np.r_[0,0,:len(lat_t),-1,-1]] - lat_t[0] = -85 - lat_t[1] = -80 - lat_t[-2] = 90 - lat_t[-1] = 91 - lon_uv = lon_t - lat_uv = lat_t + #add two rows in the north and the south + lon_t = lon_t[np.r_[0,0,:np.size(lon_t,0),-1,-1]] + lon_t = lon_t[:,np.r_[0,:np.size(lon_t,1),-1]] + lon_t[:,0] = lon_t[:,1] - (lon_t[:,2]-lon_t[:,1]) + lon_t[:,-1] = lon_t[:,-2] + (lon_t[:,-2]-lon_t[:,-3]) + lat_t = lat_t[np.r_[0,0,:np.size(lat_t,0),-1,-1]] + lat_t = lat_t[:,np.r_[0,:np.size(lat_t,1),-1]] + lat_t[0,:] = -85 + lat_t[1,:] = -80 + lat_t[-2,:] = 90 + lat_t[-1,:] = 91 + lon_uv = lon_uv[np.r_[0,0,:np.size(lon_uv,0),-1,-1]] + lon_uv = lon_uv[:,np.r_[0,:np.size(lon_uv,1),-1]] + lon_uv[:,0] = lon_uv[:,1] - (lon_uv[:,2]-lon_t[:,1]) + lon_uv[:,-1] = lon_uv[:,-2] + (lon_uv[:,-2]-lon_uv[:,-3]) + lat_uv = lat_uv[np.r_[0,0,:np.size(lat_uv,0),-1,-1]] + lat_uv = lat_uv[:,np.r_[0,:np.size(lat_uv,1),-1]] + lat_uv[0,:] = -85 + lat_uv[1,:] = -80 + lat_uv[-2,:] = 90 + lat_uv[-1,:] = 91 + mask_t = mask_t[:,np.r_[0,0,:np.size(mask_t,1),-1,-1],:] + mask_t = mask_t[:,:,np.r_[0,:np.size(mask_t,2),-1]] + mask_t[:,:,0] = mask_t[:,:,-2] + mask_t[:,:,-1] = mask_t[:,:,1] + mask_uv = mask_uv[:,np.r_[0,0,:np.size(mask_uv,1),-1,-1],:] + mask_uv = mask_uv[:,:,np.r_[0,:np.size(mask_uv,2),-1]] + mask_uv[:,:,0] = mask_uv[:,:,-2] + mask_uv[:,:,-1] = mask_uv[:,:,1] + h = h[np.r_[0,0,:np.size(h,0),-1,-1]] + h = h[:,np.r_[0,:np.size(h,1),-1]] + h[:,0] = h[:,-2] + h[:,-1] = h[:,1] + f = f[np.r_[0,0,:np.size(f,0),-1,-1]] + f = f[:,np.r_[0,:np.size(f,1),-1]] + f[:,0] = f[:,-2] + f[:,-1] = f[:,1] + m,l = h.shape + xrange=(1,l-2) + yrange=(ystart+2,m-2) + + if area == 'tripole': + #add two rows in the north and the south + fold1 = L_t//2 + lon_t = lon_t[np.r_[0,0,:np.size(lon_t,0),-1,-1]] + lon_t[-2,:fold1] = lon_t[-3,L_t:fold1-1:-1] + lon_t[-2,L_t:fold1-1:-1] = lon_t[-3,:fold1] + lon_t[-1,:fold1] = lon_t[-4,L_t:fold1-1:-1] + lon_t[-1,L_t:fold1-1:-1] = lon_t[-4,:fold1] + + lon_t = lon_t[:,np.r_[0,:np.size(lon_t,1),-1]] + lon_t[:,0] = lon_t[:,1] - (lon_t[:,2]-lon_t[:,1]) + lon_t[:,-1] = lon_t[:,-2] + (lon_t[:,-2]-lon_t[:,-3]) + lat_t = lat_t[np.r_[0,0,:np.size(lat_t,0),-1,-1]] + lat_t = lat_t[:,np.r_[0,:np.size(lat_t,1),-1]] + lat_t[0,:] = -85 + lat_t[1,:] = -80 + lat_t[-2,:] = lat_t[-3,:] + lat_t[-1,:] = lat_t[-4,:] + lon_uv = lon_uv[np.r_[0,0,:np.size(lon_uv,0),-1,-1]] + + lon_uv[-2,:fold1] = lon_uv[-4,L_t:fold1-1:-1] + lon_uv[-2,L_t:fold1-1:-1] = lon_uv[-4,:fold1] + lon_uv[-1,:fold1] = lon_uv[-5,L_t:fold1-1:-1] + lon_uv[-1,L_t:fold1-1:-1] = lon_uv[-5,:fold1] + + lon_uv = lon_uv[:,np.r_[0,:np.size(lon_uv,1),-1]] + lon_uv[:,0] = lon_uv[:,1] - (lon_uv[:,2]-lon_t[:,1]) + lon_uv[:,-1] = lon_uv[:,-2] + (lon_uv[:,-2]-lon_uv[:,-3]) + lat_uv = lat_uv[np.r_[0,0,:np.size(lat_uv,0),-1,-1]] + lat_uv = lat_uv[:,np.r_[0,:np.size(lat_uv,1),-1]] + lat_uv[0,:] = -85 + lat_uv[1,:] = -80 + lat_uv[-2,:] = lat_uv[-3,:] + lat_uv[-1,:] = lat_uv[-4,:] mask_t = mask_t[:,np.r_[0,0,:np.size(mask_t,1),-1,-1],:] mask_t = mask_t[:,:,np.r_[0,:np.size(mask_t,2),-1]] mask_t[:,:,0] = mask_t[:,:,-2] diff --git a/pyroms_toolbox/pyroms_toolbox/BGrid_GFDL/make_remap_grid_file.py b/pyroms_toolbox/pyroms_toolbox/BGrid_GFDL/make_remap_grid_file.py index ef56fe5..e574730 100644 --- a/pyroms_toolbox/pyroms_toolbox/BGrid_GFDL/make_remap_grid_file.py +++ b/pyroms_toolbox/pyroms_toolbox/BGrid_GFDL/make_remap_grid_file.py @@ -33,7 +33,7 @@ def make_remap_grid_file(Bgrd, Bpos='t'): grid_imask = Bgrd.mask_uv[0,:].flatten() Mp, Lp = Bgrd.lon_uv.shape else: - raise ValueError, 'Bpos must be t or uv' + raise ValueError('Bpos must be t or uv') grid_size = Lp * Mp diff --git a/pyroms_toolbox/pyroms_toolbox/BGrid_POP/__init__.py b/pyroms_toolbox/pyroms_toolbox/BGrid_POP/__init__.py index 5279852..21bfe6a 100644 --- a/pyroms_toolbox/pyroms_toolbox/BGrid_POP/__init__.py +++ b/pyroms_toolbox/pyroms_toolbox/BGrid_POP/__init__.py @@ -2,7 +2,7 @@ POP module """ -from BGrid_POP import BGrid_POP -from get_nc_BGrid_POP import get_nc_BGrid_POP -from make_remap_grid_file import make_remap_grid_file -from flood import flood +from .BGrid_POP import BGrid_POP +from .get_nc_BGrid_POP import get_nc_BGrid_POP +from .make_remap_grid_file import make_remap_grid_file +from .flood import flood diff --git a/pyroms_toolbox/pyroms_toolbox/BGrid_POP/flood.py b/pyroms_toolbox/pyroms_toolbox/BGrid_POP/flood.py index 9f7ba56..134a669 100644 --- a/pyroms_toolbox/pyroms_toolbox/BGrid_POP/flood.py +++ b/pyroms_toolbox/pyroms_toolbox/BGrid_POP/flood.py @@ -12,11 +12,11 @@ def flood(varz, Bgrd, Bpos='t', irange=None, jrange=None, \ optional switch: - Bpos='t', 'u' specify the grid position where - the variable rely + the variable rely - irange specify grid sub-sample for i direction - jrange specify grid sub-sample for j direction - spval=1e35 define spval value - - dmax=0 if dmax>0, maximum horizontal + - dmax=0 if dmax>0, maximum horizontal flooding distance - cdepth=0 critical depth for flooding if depth no flooding @@ -101,6 +101,6 @@ def flood(varz, Bgrd, Bpos='t', irange=None, jrange=None, \ for i in range(Lm): for j in range(Mm): if mask[j,i] == 1: - varz[bottom[j,i]:,j,i] = varz[bottom[j,i],j,i] + varz[int(bottom[j,i]):,j,i] = varz[int(bottom[j,i]),j,i] return varz diff --git a/pyroms_toolbox/pyroms_toolbox/BGrid_SODA/BGrid_SODA.py b/pyroms_toolbox/pyroms_toolbox/BGrid_SODA/BGrid_SODA.py index 02f564b..54e3746 100644 --- a/pyroms_toolbox/pyroms_toolbox/BGrid_SODA/BGrid_SODA.py +++ b/pyroms_toolbox/pyroms_toolbox/BGrid_SODA/BGrid_SODA.py @@ -5,7 +5,7 @@ import netCDF4 as netCDF except: import netCDF3 as netCDF - import pyroms +import pyroms class BGrid_SODA(object): """ diff --git a/pyroms_toolbox/pyroms_toolbox/BGrid_SODA/__init__.py b/pyroms_toolbox/pyroms_toolbox/BGrid_SODA/__init__.py index 5e710a7..0291486 100644 --- a/pyroms_toolbox/pyroms_toolbox/BGrid_SODA/__init__.py +++ b/pyroms_toolbox/pyroms_toolbox/BGrid_SODA/__init__.py @@ -2,7 +2,7 @@ SODA module """ -from BGrid_SODA import BGrid_SODA -from get_nc_BGrid_SODA import get_nc_BGrid_SODA -from make_remap_grid_file import make_remap_grid_file -from flood import flood +from .BGrid_SODA import BGrid_SODA +from .get_nc_BGrid_SODA import get_nc_BGrid_SODA +from .make_remap_grid_file import make_remap_grid_file +from .flood import flood diff --git a/pyroms_toolbox/pyroms_toolbox/BGrid_SODA/flood.py b/pyroms_toolbox/pyroms_toolbox/BGrid_SODA/flood.py index 853623f..c56986f 100644 --- a/pyroms_toolbox/pyroms_toolbox/BGrid_SODA/flood.py +++ b/pyroms_toolbox/pyroms_toolbox/BGrid_SODA/flood.py @@ -12,11 +12,11 @@ def flood(varz, Bgrd, Bpos='t', irange=None, jrange=None, \ optional switch: - Bpos='t', 'uv' specify the grid position where - the variable rely + the variable rely - irange specify grid sub-sample for i direction - jrange specify grid sub-sample for j direction - spval=1e35 define spval value - - dmax=0 if dmax>0, maximum horizontal + - dmax=0 if dmax>0, maximum horizontal flooding distance - cdepth=0 critical depth for flooding if depth no flooding @@ -98,6 +98,6 @@ def flood(varz, Bgrd, Bpos='t', irange=None, jrange=None, \ for i in range(Lm): for j in range(Mm): if mask[j,i] == 1: - varz[bottom[j,i]:,j,i] = varz[bottom[j,i],j,i] + varz[int(bottom[j,i]):,j,i] = varz[int(bottom[j,i]),j,i] return varz diff --git a/pyroms_toolbox/pyroms_toolbox/BGrid_SODA/get_nc_BGrid_SODA.py b/pyroms_toolbox/pyroms_toolbox/BGrid_SODA/get_nc_BGrid_SODA.py index 4ce3fa2..74bf368 100644 --- a/pyroms_toolbox/pyroms_toolbox/BGrid_SODA/get_nc_BGrid_SODA.py +++ b/pyroms_toolbox/pyroms_toolbox/BGrid_SODA/get_nc_BGrid_SODA.py @@ -42,7 +42,7 @@ def get_nc_BGrid_SODA(grdfile, name='SODA_2.1.6_CORAL', area='regional', \ for i in range(mask_t[0,:].shape[1]): for j in range(mask_t[0,:].shape[0]): if mask_t[0,j,i] == 1: - h[j,i] = depth_bnds[bottom[j,i]] + h[j,i] = depth_bnds[int(bottom[j,i])] if area == 'global': #add one row in the north and the south diff --git a/pyroms_toolbox/pyroms_toolbox/BGrid_SODA/make_remap_grid_file.py b/pyroms_toolbox/pyroms_toolbox/BGrid_SODA/make_remap_grid_file.py index f568124..1496275 100644 --- a/pyroms_toolbox/pyroms_toolbox/BGrid_SODA/make_remap_grid_file.py +++ b/pyroms_toolbox/pyroms_toolbox/BGrid_SODA/make_remap_grid_file.py @@ -5,7 +5,6 @@ import netCDF4 as netCDF except: import netCDF3 as netCDF -import pyroms def make_remap_grid_file(Bgrd, Bpos='t'): diff --git a/pyroms_toolbox/pyroms_toolbox/CGrid_GLORYS/CGrid_GLORYS.py b/pyroms_toolbox/pyroms_toolbox/CGrid_GLORYS/CGrid_GLORYS.py index a671a18..bad9c5e 100644 --- a/pyroms_toolbox/pyroms_toolbox/CGrid_GLORYS/CGrid_GLORYS.py +++ b/pyroms_toolbox/pyroms_toolbox/CGrid_GLORYS/CGrid_GLORYS.py @@ -53,12 +53,12 @@ def __init__(self, lon_t, lat_t, lon_u, lat_u, lon_v, lat_v, mask_t, mask_u, mas ones = np.ones(self.h.shape) a1 = lat_u[yrange[0]:yrange[1]+1, xrange[0]+1:xrange[1]+2] - \ - lat_u[yrange[0]:yrange[1]+1, xrange[0]:xrange[1]+1] + lat_u[yrange[0]:yrange[1]+1, xrange[0]:xrange[1]+1] a2 = lon_u[yrange[0]:yrange[1]+1, xrange[0]+1:xrange[1]+2] - \ - lon_u[yrange[0]:yrange[1]+1, xrange[0]:xrange[1]+1] + lon_u[yrange[0]:yrange[1]+1, xrange[0]:xrange[1]+1] a3 = 0.5*(lat_u[yrange[0]:yrange[1]+1, xrange[0]+1:xrange[1]+2] + \ - lat_u[yrange[0]:yrange[1]+1, xrange[0]:xrange[1]+1]) - a2 = np.where(a2 > 180*ones, a2 - 360*ones, a2) - a2 = np.where(a2 < -180*ones, a2 + 360*ones, a2) - a2 = a2 * np.cos(np.pi/180.*a3) - self.angle = np.arctan2(a1, a2) + lat_u[yrange[0]:yrange[1]+1, xrange[0]:xrange[1]+1]) + a2 = np.where(a2 > 180*ones, a2 - 360*ones, a2) + a2 = np.where(a2 < -180*ones, a2 + 360*ones, a2) + a2 = a2 * np.cos(np.pi/180.*a3) + self.angle = np.arctan2(a1, a2) diff --git a/pyroms_toolbox/pyroms_toolbox/CGrid_GLORYS/__init__.py b/pyroms_toolbox/pyroms_toolbox/CGrid_GLORYS/__init__.py index b7511ff..35a4c6a 100644 --- a/pyroms_toolbox/pyroms_toolbox/CGrid_GLORYS/__init__.py +++ b/pyroms_toolbox/pyroms_toolbox/CGrid_GLORYS/__init__.py @@ -2,7 +2,7 @@ GLORYS module """ -from CGrid_GLORYS import CGrid_GLORYS -from get_nc_CGrid_GLORYS import get_nc_CGrid_GLORYS -from make_remap_grid_file import make_remap_grid_file -from flood import flood +from .CGrid_GLORYS import CGrid_GLORYS +from .get_nc_CGrid_GLORYS import get_nc_CGrid_GLORYS +from .make_remap_grid_file import make_remap_grid_file +from .flood import flood diff --git a/pyroms_toolbox/pyroms_toolbox/CGrid_GLORYS/flood.py b/pyroms_toolbox/pyroms_toolbox/CGrid_GLORYS/flood.py index 8980dab..0725c2f 100644 --- a/pyroms_toolbox/pyroms_toolbox/CGrid_GLORYS/flood.py +++ b/pyroms_toolbox/pyroms_toolbox/CGrid_GLORYS/flood.py @@ -16,7 +16,7 @@ def flood(varz, Cgrd, Cpos='t', irange=None, jrange=None, \ - irange specify grid sub-sample for i direction - jrange specify grid sub-sample for j direction - spval=1e35 define spval value - - dmax=0 if dmax>0, maximum horizontal + - dmax=0 if dmax>0, maximum horizontal flooding distance - cdepth=0 critical depth for flooding if depth no flooding @@ -100,6 +100,6 @@ def flood(varz, Cgrd, Cpos='t', irange=None, jrange=None, \ for i in range(Lm): for j in range(Mm): if mask[j,i] == 1: - varz[bottom[j,i]:,j,i] = varz[bottom[j,i],j,i] + varz[int(bottom[j,i]):,j,i] = varz[int(bottom[j,i]),j,i] return varz diff --git a/pyroms_toolbox/pyroms_toolbox/CGrid_GLORYS/get_nc_CGrid_GLORYS.py b/pyroms_toolbox/pyroms_toolbox/CGrid_GLORYS/get_nc_CGrid_GLORYS.py index fd5f033..43e8226 100644 --- a/pyroms_toolbox/pyroms_toolbox/CGrid_GLORYS/get_nc_CGrid_GLORYS.py +++ b/pyroms_toolbox/pyroms_toolbox/CGrid_GLORYS/get_nc_CGrid_GLORYS.py @@ -45,7 +45,7 @@ def get_nc_CGrid_GLORYS(grdfile, name='GLORYS_CORAL', area='regional', \ for i in range(mask_t[0,:].shape[1]): for j in range(mask_t[0,:].shape[0]): if mask_t[0,j,i] == 1: - h[j,i] = depth_bnds[bottom[j,i]] + h[j,i] = depth_bnds[int(bottom[j,i])] if area == 'global': #add rows in the north and the south, east and west diff --git a/pyroms_toolbox/pyroms_toolbox/CGrid_GLORYS/make_remap_grid_file.py b/pyroms_toolbox/pyroms_toolbox/CGrid_GLORYS/make_remap_grid_file.py index 666f6ed..5e66dbe 100644 --- a/pyroms_toolbox/pyroms_toolbox/CGrid_GLORYS/make_remap_grid_file.py +++ b/pyroms_toolbox/pyroms_toolbox/CGrid_GLORYS/make_remap_grid_file.py @@ -5,7 +5,6 @@ import netCDF4 as netCDF except: import netCDF3 as netCDF -import pyroms def make_remap_grid_file(Cgrd, Cpos='t'): diff --git a/pyroms_toolbox/pyroms_toolbox/Grid_HYCOM/__init__.py b/pyroms_toolbox/pyroms_toolbox/Grid_HYCOM/__init__.py index 7e04c5b..805da8e 100644 --- a/pyroms_toolbox/pyroms_toolbox/Grid_HYCOM/__init__.py +++ b/pyroms_toolbox/pyroms_toolbox/Grid_HYCOM/__init__.py @@ -2,9 +2,9 @@ HYCOM module """ -from Grid_HYCOM import Grid_HYCOM -from get_nc_Grid_HYCOM import get_nc_Grid_HYCOM -from make_remap_grid_file import make_remap_grid_file -from flood import flood -from flood_fast import flood_fast -from flood_fast_weighted import flood_fast_weighted +from .Grid_HYCOM import Grid_HYCOM +from .get_nc_Grid_HYCOM import get_nc_Grid_HYCOM +from .make_remap_grid_file import make_remap_grid_file +from .flood import flood +from .flood_fast import flood_fast +from .flood_fast_weighted import flood_fast_weighted diff --git a/pyroms_toolbox/pyroms_toolbox/Grid_HYCOM/flood.py b/pyroms_toolbox/pyroms_toolbox/Grid_HYCOM/flood.py index 73b885c..6ea4964 100644 --- a/pyroms_toolbox/pyroms_toolbox/Grid_HYCOM/flood.py +++ b/pyroms_toolbox/pyroms_toolbox/Grid_HYCOM/flood.py @@ -12,11 +12,11 @@ def flood(varz, grd, pos='t', irange=None, jrange=None, \ optional switch: - Bpos='t' specify the grid position where - the variable rely + the variable rely - irange specify grid sub-sample for i direction - jrange specify grid sub-sample for j direction - spval=1e35 define spval value - - dmax=0 if dmax>0, maximum horizontal + - dmax=0 if dmax>0, maximum horizontal flooding distance - cdepth=0 critical depth for flooding if depth no flooding @@ -95,6 +95,6 @@ def flood(varz, grd, pos='t', irange=None, jrange=None, \ for i in range(Lm): for j in range(Mm): if mask[j,i] == 1: - varz[bottom[j,i]:,j,i] = varz[bottom[j,i],j,i] + varz[int(bottom[j,i]):,j,i] = varz[int(bottom[j,i]),j,i] return varz diff --git a/pyroms_toolbox/pyroms_toolbox/Grid_HYCOM/flood_fast.py b/pyroms_toolbox/pyroms_toolbox/Grid_HYCOM/flood_fast.py index 6db3706..4cfc5c1 100644 --- a/pyroms_toolbox/pyroms_toolbox/Grid_HYCOM/flood_fast.py +++ b/pyroms_toolbox/pyroms_toolbox/Grid_HYCOM/flood_fast.py @@ -15,11 +15,11 @@ def flood_fast(varz, grd, pos='t', irange=None, jrange=None, \ optional switch: - Bpos='t' specify the grid position where - the variable rely + the variable rely - irange specify grid sub-sample for i direction - jrange specify grid sub-sample for j direction - spval=1e35 define spval value - - dmax=0 if dmax>0, maximum horizontal + - dmax=0 if dmax>0, maximum horizontal flooding distance - cdepth=0 critical depth for flooding if depth no flooding @@ -92,9 +92,9 @@ def flood_fast(varz, grd, pos='t', irange=None, jrange=None, \ varz[k][idx] = spval # RD: #varz[k,:] = _remapping_fast.flood(varz[k,:], dry, wet_mask, dxy) - print 'RD new flooding' + print('RD new flooding') varz[k,:] = creep.cslf(varz[k,:],spval,-200.,200.) - print varz[k,:].min() , varz[k,:].max() + print(varz[k,:].min() , varz[k,:].max()) # drop the deepest values down idx = np.where(np.isnan(varz) == 1) @@ -104,6 +104,6 @@ def flood_fast(varz, grd, pos='t', irange=None, jrange=None, \ for i in range(Lm): for j in range(Mm): if mask[j,i] == 1: - varz[bottom[j,i]:,j,i] = varz[bottom[j,i],j,i] + varz[int(bottom[j,i]):,j,i] = varz[int(bottom[j,i]),j,i] return varz diff --git a/pyroms_toolbox/pyroms_toolbox/Grid_HYCOM/flood_fast_weighted.py b/pyroms_toolbox/pyroms_toolbox/Grid_HYCOM/flood_fast_weighted.py index 6e00e24..19ca8e1 100644 --- a/pyroms_toolbox/pyroms_toolbox/Grid_HYCOM/flood_fast_weighted.py +++ b/pyroms_toolbox/pyroms_toolbox/Grid_HYCOM/flood_fast_weighted.py @@ -12,11 +12,11 @@ def flood_fast_weighted(varz, grd, pos='t', irange=None, jrange=None, \ optional switch: - Bpos='t' specify the grid position where - the variable rely + the variable rely - irange specify grid sub-sample for i direction - jrange specify grid sub-sample for j direction - spval=1e35 define spval value - - dmax=0 if dmax>0, maximum horizontal + - dmax=0 if dmax>0, maximum horizontal flooding distance - cdepth=0 critical depth for flooding if depth no flooding @@ -97,6 +97,6 @@ def flood_fast_weighted(varz, grd, pos='t', irange=None, jrange=None, \ for i in range(Lm): for j in range(Mm): if mask[j,i] == 1: - varz[bottom[j,i]:,j,i] = varz[bottom[j,i],j,i] + varz[int(bottom[j,i]):,j,i] = varz[int(bottom[j,i]),j,i] return varz diff --git a/pyroms_toolbox/pyroms_toolbox/Grid_HYCOM/get_nc_Grid_HYCOM.py b/pyroms_toolbox/pyroms_toolbox/Grid_HYCOM/get_nc_Grid_HYCOM.py index 5d948f9..277ea5d 100644 --- a/pyroms_toolbox/pyroms_toolbox/Grid_HYCOM/get_nc_Grid_HYCOM.py +++ b/pyroms_toolbox/pyroms_toolbox/Grid_HYCOM/get_nc_Grid_HYCOM.py @@ -45,7 +45,7 @@ def get_nc_Grid_HYCOM(grdfile, name='GLBa0.08_NEP'): for i in range(mask_t[0,:].shape[1]): for j in range(mask_t[0,:].shape[0]): if mask_t[0,j,i] == 1: - h[j,i] = depth_bnds[bottom[j,i]+1] + h[j,i] = depth_bnds[int(bottom[j,i])+1] geod = pyproj.Geod(ellps='WGS84') diff --git a/pyroms_toolbox/pyroms_toolbox/Grid_HYCOM/make_remap_grid_file.py b/pyroms_toolbox/pyroms_toolbox/Grid_HYCOM/make_remap_grid_file.py index f05d3e7..f75f433 100644 --- a/pyroms_toolbox/pyroms_toolbox/Grid_HYCOM/make_remap_grid_file.py +++ b/pyroms_toolbox/pyroms_toolbox/Grid_HYCOM/make_remap_grid_file.py @@ -5,7 +5,6 @@ import netCDF4 as netCDF except: import netCDF3 as netCDF -import pyroms def make_remap_grid_file(grd): diff --git a/pyroms_toolbox/pyroms_toolbox/PCA.py b/pyroms_toolbox/pyroms_toolbox/PCA.py index 5ce9bb7..81d137c 100644 --- a/pyroms_toolbox/pyroms_toolbox/PCA.py +++ b/pyroms_toolbox/pyroms_toolbox/PCA.py @@ -73,7 +73,7 @@ def GetPSD(self, blocks_length=0, fs=1, window='boxcar', overlap=0): window_vector = get_window(window, blocks_length) PSD = np.zeros((nECs, blocks_length/2+1)) freq = np.zeros((nECs, blocks_length/2+1)) - for n in xrange(nECs): + for n in range(nECs): P, f = psd(self.ECs[n], NFFT=blocks_length, Fs=fs, window=window_vector, noverlap=overlap) PSD[n] = np.squeeze(P) freq[n] = f @@ -87,7 +87,7 @@ def pack(self, dataset): npt = np.sum(~mask) packedata = np.zeros((nt,npt)) - for t in xrange(nt): + for t in range(nt): packedata[t] = np.ma.compressed(dataset[t]) return packedata, mask @@ -108,7 +108,7 @@ def unpack(self, packedeofs, mask): eofs = np.ma.masked_all((neof,dims)) - for n in xrange(neof): + for n in range(neof): eofs[n,~mask.flatten()] = packedeofs[:,n] if len(mask.shape) == 2: diff --git a/pyroms_toolbox/pyroms_toolbox/TS_diagram.py b/pyroms_toolbox/pyroms_toolbox/TS_diagram.py index 6de7c85..0515657 100644 --- a/pyroms_toolbox/pyroms_toolbox/TS_diagram.py +++ b/pyroms_toolbox/pyroms_toolbox/TS_diagram.py @@ -5,13 +5,13 @@ import pyroms_toolbox -def TS_diagram(temp, salt, depth=None, dens_lev=None, marker_size=2, fmt='%2.2f', pal=cm.spectral, \ +def TS_diagram(temp, salt, depth=None, dens_lev=None, marker_size=2, fmt='%2.2f', pal=cm.Spectral, \ tlim='None', slim='None', outfile=None): if dens_lev is None: dens_lev = np.arange(10,36,1) - if depth is None: + if depth is None: diag = plt.scatter(salt.flatten(), temp.flatten(), s=marker_size, edgecolors='none') else: diag = plt.scatter(salt.flatten(), temp.flatten(), c=depth.flatten(), \ @@ -36,12 +36,12 @@ def TS_diagram(temp, salt, depth=None, dens_lev=None, marker_size=2, fmt='%2.2f' ax.set_xlim(slim) ax.set_ylim(tlim) - + if outfile is not None: if outfile.find('.png') != -1 or outfile.find('.svg') != -1 or \ outfile.find('.eps') != -1: - print 'Write figure to file', outfile + print('Write figure to file', outfile) plt.savefig(outfile, dpi=200, facecolor='w', edgecolor='w', \ orientation='portrait') else: - print 'Unrecognized file extension. Please use .png, .svg or .eps file extension.' + print('Unrecognized file extension. Please use .png, .svg or .eps file extension.') diff --git a/pyroms_toolbox/pyroms_toolbox/__init__.py b/pyroms_toolbox/pyroms_toolbox/__init__.py index 0d0f565..9b9eb87 100644 --- a/pyroms_toolbox/pyroms_toolbox/__init__.py +++ b/pyroms_toolbox/pyroms_toolbox/__init__.py @@ -1,75 +1,76 @@ #!/usr/bin/env python ''' -PYROMS_TOOLBOX is a toolbox for working with ROMS +PYROMS_TOOLBOX is a toolbox for working with ROMS ocean models input/output files based on PYROMS -pyroms and pyroms_toolbox are based on the -python/numpy/matplotlib scientific python suite. +pyroms and pyroms_toolbox are based on the +python/numpy/matplotlib scientific python suite. NetCDF I/O is based on the NetCDF4-python package. ''' -from iview import iview -from jview import jview -from lonview import lonview -from latview import latview -from sview import sview -from zview import zview -from isoview import isoview -from twoDview import twoDview -from transectview import transectview -from quiver import quiver -import seawater -from N2 import N2 -from O2_saturation import O2_saturation -import shapiro_filter -from change import change -from rx0 import rx0 -from rx1 import rx1 -from rvalue import rvalue -from get_coast_line import get_coast_line -from get_coast_line_from_mask import get_coast_line_from_mask -from get_ijcoast_line import get_ijcoast_line -from plot_coast_line import plot_coast_line -from plot_coast_line_from_mask import plot_coast_line_from_mask -from plot_ijcoast_line import plot_ijcoast_line -from lsq_phase_amplitude import lsq_phase_amplitude -from remapping import remapping -from remapping_bound import remapping_bound -from remapping_bound_sig import remapping_bound_sig -from remapping_tensor import remapping_tensor -from nc_create_roms_file import nc_create_roms_file -from nc_create_roms_bdry_file import nc_create_roms_bdry_file -from average import average -from plot_mask import plot_mask -import BGrid_GFDL -from smooth_1D import smooth_1D -import BGrid_SODA -from get_littoral import get_littoral -from get_littoral2 import get_littoral2 -from _move_runoff import move_runoff -from _move_river_t import move_river_t -from TS_diagram import TS_diagram -from date2jday import date2jday -from jday2date import jday2date -from iso2gregorian import iso2gregorian -from gregorian2iso import gregorian2iso -import BGrid_POP -from low_pass_filter import low_pass_filter -from PCA import PCA, center, standardize -from compute_eke import compute_eke -from compute_moc import compute_moc +from .iview import iview +from .jview import jview +from .lonview import lonview +from .latview import latview +from .sview import sview +from .zview import zview +from .isoview import isoview +from .twoDview import twoDview +from .transectview import transectview +from .quiver import quiver +from . import seawater +from .N2 import N2 +from .O2_saturation import O2_saturation +from . import shapiro_filter +from .change import change +from .rx0 import rx0 +from .rx1 import rx1 +from .rvalue import rvalue +from .get_coast_line import get_coast_line +from .get_coast_line_from_mask import get_coast_line_from_mask +from .get_ijcoast_line import get_ijcoast_line +from .plot_coast_line import plot_coast_line +from .plot_coast_line_from_mask import plot_coast_line_from_mask +from .plot_ijcoast_line import plot_ijcoast_line +from .lsq_phase_amplitude import lsq_phase_amplitude +from .remapping import remapping +from .remapping_bound import remapping_bound +from .remapping_bound_sig import remapping_bound_sig +from .remapping_tensor import remapping_tensor +from .nc_create_roms_file import nc_create_roms_file +from .nc_create_roms_bdry_file import nc_create_roms_bdry_file +from .average import average +from .plot_mask import plot_mask +from . import BGrid_GFDL +from .smooth_1D import smooth_1D +from . import BGrid_SODA +from .get_littoral import get_littoral +from .get_littoral2 import get_littoral2 +from ._move_runoff import move_runoff +from ._move_river_t import move_river_t +from .TS_diagram import TS_diagram +from .date2jday import date2jday +from .jday2date import jday2date +from .iso2gregorian import iso2gregorian +from .gregorian2iso import gregorian2iso +from . import BGrid_POP +from .low_pass_filter import low_pass_filter +from .PCA import PCA, center, standardize +from .compute_eke import compute_eke +from .compute_moc import compute_moc #from plot_Robinson_pyngl import plot_Robinson_pyngl -from get_cell_area import get_cell_area -from laplacian import laplacian -from vorticity import vorticity -from strain_norm import strain_norm -from strain_norm_old import strain_norm_old -from shift_SODA_data import shift_SODA_data -import Grid_HYCOM -from mld_from_temp import mld_from_temp -from mld_from_dens import mld_from_dens -from ocean_in import ocean_in +from .get_cell_area import get_cell_area +from .laplacian import laplacian +from .vorticity import vorticity +from .strain_norm import strain_norm +from .strain_norm_old import strain_norm_old +from .shift_SODA_data import shift_SODA_data +from . import Grid_HYCOM +from . import CGrid_GLORYS +from .mld_from_temp import mld_from_temp +from .mld_from_dens import mld_from_dens +from .ocean_in import ocean_in diff --git a/pyroms_toolbox/pyroms_toolbox/average.py b/pyroms_toolbox/pyroms_toolbox/average.py index 79759f1..5843345 100644 --- a/pyroms_toolbox/pyroms_toolbox/average.py +++ b/pyroms_toolbox/pyroms_toolbox/average.py @@ -52,7 +52,7 @@ def average(var, ncfiles, trange=None, avgfile=None, spval=1e37, timevar='ocean_ var = [var] nvar = len(var) else: - raise ValueError, 'var must be a str or a list of str' + raise ValueError('var must be a str or a list of str') avg.ncfiles = pyroms.io.MFDataset(ncfiles) @@ -67,9 +67,9 @@ def average(var, ncfiles, trange=None, avgfile=None, spval=1e37, timevar='ocean_ start = trange[0] end = min(trange[1]+1, Nt) else: - raise ValueError, 'trange must be within interval [0, %s].' %Nt + raise ValueError('trange must be within interval [0, %s].' %Nt) - print range(start,end) + print(list(range(start,end))) for varname in var: name = varname @@ -110,8 +110,8 @@ def average(var, ncfiles, trange=None, avgfile=None, spval=1e37, timevar='ocean_ setattr(avg, varname, incavg[:]) else: - raise ValueError, 'Variable must be 3D (time + 2 spacial dims) or \ -4D (time + 3 spacial dims)' + raise ValueError('Variable must be 3D (time + 2 spacial dims) or \ +4D (time + 3 spacial dims)') # if avgfile is defined, enter this conditional and begin creating a new netCDF file @@ -129,10 +129,10 @@ def average(var, ncfiles, trange=None, avgfile=None, spval=1e37, timevar='ocean_ nc.Created = datetime.now().strftime("%Y-%m-%d %H:%M:%S") nc.Files = ", ".join(avg.ncfiles._files) - print 'Writing '+str(avgfile)+'...' + print('Writing '+str(avgfile)+'...') # for each dimension in original netCDF files, recreate the dimensions in the new netCDF file - for newdim in avg.ncfiles.dimensions.keys(): + for newdim in list(avg.ncfiles.dimensions.keys()): if avg.ncfiles.dimensions[newdim].isunlimited(): nc.createDimension(newdim,None) else: @@ -152,7 +152,7 @@ def average(var, ncfiles, trange=None, avgfile=None, spval=1e37, timevar='ocean_ # for each variable in var, create a new variable with all dimensions associated with that # variable except ocean_time for varname in var: - print ' writting %s...' %varname + print(' writting %s...' %varname) vardims = avg.ncfiles.variables[varname].dimensions @@ -172,9 +172,9 @@ def average(var, ncfiles, trange=None, avgfile=None, spval=1e37, timevar='ocean_ avg.ncfiles.close() nc.close() else: - print "avgfile must be a string that equates to the path where this netCDF file is to be placed." + print("avgfile must be a string that equates to the path where this netCDF file is to be placed.") return else: avg.ncfiles.close() - print "Returning average object..." + print("Returning average object...") return avg diff --git a/pyroms_toolbox/pyroms_toolbox/change.py b/pyroms_toolbox/pyroms_toolbox/change.py index ccfc9b3..5ac3bec 100644 --- a/pyroms_toolbox/pyroms_toolbox/change.py +++ b/pyroms_toolbox/pyroms_toolbox/change.py @@ -7,7 +7,7 @@ def change(old,relation,flag,value): if relation != '==' and relation != '!=' and relation != '>' and relation != '<' and \ relation != '!=' and relation != '>=' and relation != '<=': - raise ValueError, 'Relation {%s} not valid' % relation + raise ValueError('Relation {%s} not valid' % relation) if np.isnan(flag): if relation == '==': @@ -15,7 +15,7 @@ def change(old,relation,flag,value): elif relation == '!=': replace = np.where(np.isnan(old) == False) else: - raise ValueError, 'Relation should be == or ~= to compare to NaN' + raise ValueError('Relation should be == or ~= to compare to NaN') else: if relation == '==': diff --git a/pyroms_toolbox/pyroms_toolbox/compute_eke.py b/pyroms_toolbox/pyroms_toolbox/compute_eke.py index c9abaa8..5bcb57d 100644 --- a/pyroms_toolbox/pyroms_toolbox/compute_eke.py +++ b/pyroms_toolbox/pyroms_toolbox/compute_eke.py @@ -1,5 +1,4 @@ import numpy as np -import pyroms def compute_eke(u, v): diff --git a/pyroms_toolbox/pyroms_toolbox/get_coast_line.py b/pyroms_toolbox/pyroms_toolbox/get_coast_line.py index 56bb61a..189f79e 100644 --- a/pyroms_toolbox/pyroms_toolbox/get_coast_line.py +++ b/pyroms_toolbox/pyroms_toolbox/get_coast_line.py @@ -24,8 +24,8 @@ def get_coast_line(grd, Cpos='rho'): lat = grd.hgrid.lat_rho mask = grd.hgrid.mask_psi else: - raise Warning, '%s bad position. Valid Arakawa-C are \ - rho, u or v.' % Cpos + raise Warning('%s bad position. Valid Arakawa-C are \ + rho, u or v.' % Cpos) jidx, iidx = np.where(mask == 0) @@ -36,28 +36,28 @@ def get_coast_line(grd, Cpos='rho'): if mask[jidx[i], iidx[i]] != mask[jidx[i]+1, iidx[i]]: lonc = ([lon[jidx[i]+1,iidx[i]], lon[jidx[i]+1,iidx[i]+1]]) latc = ([lat[jidx[i]+1,iidx[i]], lat[jidx[i]+1,iidx[i]+1]]) - seg = zip(lonc,latc) + seg = list(zip(lonc,latc)) coast.append(seg) if jidx[i] != 0: if mask[jidx[i], iidx[i]] != mask[jidx[i]-1, iidx[i]]: lonc = ([lon[jidx[i],iidx[i]], lon[jidx[i],iidx[i]+1]]) latc = ([lat[jidx[i],iidx[i]], lat[jidx[i],iidx[i]+1]]) - seg = zip(lonc,latc) + seg = list(zip(lonc,latc)) coast.append(seg) if iidx[i] != mask.shape[1]-1: if mask[jidx[i], iidx[i]] != mask[jidx[i], iidx[i]+1]: lonc = ([lon[jidx[i],iidx[i]+1], lon[jidx[i]+1,iidx[i]+1]]) latc = ([lat[jidx[i],iidx[i]+1], lat[jidx[i]+1,iidx[i]+1]]) - seg = zip(lonc,latc) + seg = list(zip(lonc,latc)) coast.append(seg) if iidx[i] != 0: if mask[jidx[i], iidx[i]] != mask[jidx[i], iidx[i]-1]: lonc = ([lon[jidx[i],iidx[i]], lon[jidx[i]+1,iidx[i]]]) latc = ([lat[jidx[i],iidx[i]], lat[jidx[i]+1,iidx[i]]]) - seg = zip(lonc,latc) + seg = list(zip(lonc,latc)) coast.append(seg) return np.array(coast) diff --git a/pyroms_toolbox/pyroms_toolbox/get_coast_line_from_mask.py b/pyroms_toolbox/pyroms_toolbox/get_coast_line_from_mask.py index 8735888..4a9c207 100644 --- a/pyroms_toolbox/pyroms_toolbox/get_coast_line_from_mask.py +++ b/pyroms_toolbox/pyroms_toolbox/get_coast_line_from_mask.py @@ -19,28 +19,28 @@ def get_coast_line_from_mask(msk, lon, lat): if mask[jidx[i], iidx[i]] != mask[jidx[i]+1, iidx[i]]: lonc = ([lon[jidx[i]+1,iidx[i]], lon[jidx[i]+1,iidx[i]+1]]) latc = ([lat[jidx[i]+1,iidx[i]], lat[jidx[i]+1,iidx[i]+1]]) - seg = zip(lonc,latc) + seg = list(zip(lonc,latc)) coast.append(seg) if jidx[i] != 0: if mask[jidx[i], iidx[i]] != mask[jidx[i]-1, iidx[i]]: lonc = ([lon[jidx[i],iidx[i]], lon[jidx[i],iidx[i]+1]]) latc = ([lat[jidx[i],iidx[i]], lat[jidx[i],iidx[i]+1]]) - seg = zip(lonc,latc) + seg = list(zip(lonc,latc)) coast.append(seg) if iidx[i] != mask.shape[1]-1: if mask[jidx[i], iidx[i]] != mask[jidx[i], iidx[i]+1]: lonc = ([lon[jidx[i],iidx[i]+1], lon[jidx[i]+1,iidx[i]+1]]) latc = ([lat[jidx[i],iidx[i]+1], lat[jidx[i]+1,iidx[i]+1]]) - seg = zip(lonc,latc) + seg = list(zip(lonc,latc)) coast.append(seg) if iidx[i] != 0: if mask[jidx[i], iidx[i]] != mask[jidx[i], iidx[i]-1]: lonc = ([lon[jidx[i],iidx[i]], lon[jidx[i]+1,iidx[i]]]) latc = ([lat[jidx[i],iidx[i]], lat[jidx[i]+1,iidx[i]]]) - seg = zip(lonc,latc) + seg = list(zip(lonc,latc)) coast.append(seg) return np.array(coast) diff --git a/pyroms_toolbox/pyroms_toolbox/get_ijcoast_line.py b/pyroms_toolbox/pyroms_toolbox/get_ijcoast_line.py index 24177c1..7041dc6 100644 --- a/pyroms_toolbox/pyroms_toolbox/get_ijcoast_line.py +++ b/pyroms_toolbox/pyroms_toolbox/get_ijcoast_line.py @@ -17,28 +17,28 @@ def get_ijcoast_line(mask): if mask[jidx[i], iidx[i]] != mask[jidx[i]+1, iidx[i]]: ic = ([iidx[i]-0.5, iidx[i]+1-0.5]) jc = ([jidx[i]+1-0.5, jidx[i]+1-0.5]) - seg = zip(ic,jc) + seg = list(zip(ic,jc)) ijcoast.append(seg) if jidx[i] != 0: if mask[jidx[i], iidx[i]] != mask[jidx[i]-1, iidx[i]]: ic = ([iidx[i]-0.5, iidx[i]+1-0.5]) jc = ([jidx[i]-0.5, jidx[i]-0.5]) - seg = zip(ic,jc) + seg = list(zip(ic,jc)) ijcoast.append(seg) if iidx[i] != mask.shape[1]-1: if mask[jidx[i], iidx[i]] != mask[jidx[i], iidx[i]+1]: ic = ([iidx[i]+1-0.5, iidx[i]+1-0.5]) jc = ([jidx[i]-0.5, jidx[i]+1-0.5]) - seg = zip(ic,jc) + seg = list(zip(ic,jc)) ijcoast.append(seg) if iidx[i] != 0: if mask[jidx[i], iidx[i]] != mask[jidx[i], iidx[i]-1]: ic = ([iidx[i]-0.5, iidx[i]-0.5]) jc = ([jidx[i]-0.5, jidx[i]+1-0.5]) - seg = zip(ic,jc) + seg = list(zip(ic,jc)) ijcoast.append(seg) return np.array(ijcoast) diff --git a/pyroms_toolbox/pyroms_toolbox/isoview.py b/pyroms_toolbox/pyroms_toolbox/isoview.py index c3592bb..3e1045e 100644 --- a/pyroms_toolbox/pyroms_toolbox/isoview.py +++ b/pyroms_toolbox/pyroms_toolbox/isoview.py @@ -15,29 +15,29 @@ def isoview(var, prop, tindex, isoval, grid, filename=None, \ map = isoview(var, prop, tindex, isoval, grid, {optional switch}) optional switch: - - filename if defined, load the variable from file - - cmin set color minimum limit - - cmax set color maximum limit - - clev set the number of color step - - fill use contourf instead of pcolor - - contour overlay contour (request fill=True) - - d contour density (default d=4) - - range set axis limit - - fts set font size (default: 12) - - title add title to the plot - - clb add colorbar (defaul: True) - - pal set color map (default: cm.jet) - - proj set projection type (default: merc) - - fill_land fill land masked area with gray (defaul: True) - - outfile if defined, write figure to file - - plot a projection of variable at property == isoval. If filename - is provided, var and prop must be a strings and the variables will + - filename if defined, load the variable from file + - cmin set color minimum limit + - cmax set color maximum limit + - clev set the number of color step + - fill use contourf instead of pcolor + - contour overlay contour (request fill=True) + - d contour density (default d=4) + - range set axis limit + - fts set font size (default: 12) + - title add title to the plot + - clb add colorbar (defaul: True) + - pal set color map (default: cm.jet) + - proj set projection type (default: merc) + - fill_land fill land masked area with gray (defaul: True) + - outfile if defined, write figure to file + + plot a projection of variable at property == isoval. If filename + is provided, var and prop must be a strings and the variables will be load from the file. grid can be a grid object or a gridid. In the later case, the grid object correponding to the provided gridid will be loaded. - If proj is not None, return a Basemap object to be used with quiver - for example. + If proj is not None, return a Basemap object to be used with quiver + for example. """ # get grid @@ -81,10 +81,10 @@ def isoview(var, prop, tindex, isoval, grid, filename=None, \ # get constante-iso slice if tindex == -1: var = var[:,:,:] - prop = prop[:,:,:] + prop = prop[:,:,:] else: var = var[tindex,:,:,:] - prop = prop[tindex,:,:,:] + prop = prop[tindex,:,:,:] if fill == True: isoslice, lon, lat = pyroms.tools.isoslice(var, prop, isoval, \ @@ -132,7 +132,7 @@ def isoview(var, prop, tindex, isoval, grid, filename=None, \ lon_max = lon.max() lon_0 = (lon_min + lon_max) / 2. lat_min = lat.min() - lat_max = lat.max() + lat_max = lat.max() lat_0 = (lat_min + lat_max) / 2. else: lon_min = range[0] @@ -150,49 +150,49 @@ def isoview(var, prop, tindex, isoval, grid, filename=None, \ urcrnrlon=lon_max, urcrnrlat=lat_max, lat_0=lat_0, lon_0=lon_0, \ resolution='h', area_thresh=5.) #map = pyroms.utility.get_grid_proj(grd, type=proj) - x, y = map(lon,lat) - + x, y = list(map(lon,lat)) + if fill_land is True and proj is not None: # fill land and draw coastlines map.drawcoastlines() map.fillcontinents(color='grey') else: - if proj is not None: + if proj is not None: Basemap.pcolor(map, x, y, mask, vmin=-2, cmap=cm.gray) pyroms_toolbox.plot_coast_line(grd, map) - else: + else: plt.pcolor(lon, lat, mask, vmin=-2, cmap=cm.gray) pyroms_toolbox.plot_coast_line(grd) - + if fill is True: - if proj is not None: + if proj is not None: cf = Basemap.contourf(map, x, y, isoslice, vc, cmap = pal, \ norm = pal_norm) - else: + else: cf = plt.contourf(lon, lat, isoslice, vc, cmap = pal, \ norm = pal_norm) else: - if proj is not None: + if proj is not None: cf = Basemap.pcolor(map, x, y, isoslice, cmap = pal, norm = pal_norm) - else: + else: cf = plt.pcolor(lon, lat, isoslice, cmap = pal, norm = pal_norm) if clb is True: - clb = plt.colorbar(cf, fraction=0.075,format='%.2f') - for t in clb.ax.get_yticklabels(): - t.set_fontsize(fts) + clb = plt.colorbar(cf, fraction=0.075,format='%.2f') + for t in clb.ax.get_yticklabels(): + t.set_fontsize(fts) if contour is True: if fill is not True: - raise Warning, 'Please run again with fill=True to overlay contour.' + raise Warning('Please run again with fill=True to overlay contour.') else: if proj is not None: Basemap.contour(map, x, y, isoslice, vc[::d], colors='k', linewidths=0.5, linestyles='solid') - else: + else: plt.contour(lon, lat, isoslice, vc[::d], colors='k', linewidths=0.5, linestyles='solid') if proj is None and range is not None: - plt.axis(range) + plt.axis(range) if title is not None: @@ -207,11 +207,11 @@ def isoview(var, prop, tindex, isoval, grid, filename=None, \ if outfile is not None: if outfile.find('.png') != -1 or outfile.find('.svg') != -1 or \ outfile.find('.eps') != -1: - print 'Write figure to file', outfile + print('Write figure to file', outfile) plt.savefig(outfile, dpi=200, facecolor='w', edgecolor='w', \ orientation='portrait') else: - print 'Unrecognized file extension. Please use .png, .svg or .eps file extension.' + print('Unrecognized file extension. Please use .png, .svg or .eps file extension.') if proj is None: diff --git a/pyroms_toolbox/pyroms_toolbox/iview.py b/pyroms_toolbox/pyroms_toolbox/iview.py index 17813ff..6df0eac 100644 --- a/pyroms_toolbox/pyroms_toolbox/iview.py +++ b/pyroms_toolbox/pyroms_toolbox/iview.py @@ -20,7 +20,7 @@ def iview(var, tindex, iindex, gridid, filename=None, \ - clev set the number of color step - fill use contourf instead of pcolor - contour overlay contour (request fill=True) - - d contour density (default d=4) + - d contour density (default d=4) - jrange j range - hrange h range - fts set font size (default: 12) @@ -131,21 +131,21 @@ def iview(var, tindex, iindex, gridid, filename=None, \ #pal.set_bad('w', 1.0) pal_norm = colors.BoundaryNorm(vc,ncolors=256, clip = False) - + # clear figure #plt.clf() if map is True: - # set axes for the main plot in order to keep space for the map - if fts < 12: - ax=None - else: - ax = plt.axes([0.15, 0.08, 0.8, 0.65]) - else: - if fts < 12: - ax=None - else: - ax=plt.axes([0.15, 0.1, 0.8, 0.8]) + # set axes for the main plot in order to keep space for the map + if fts < 12: + ax=None + else: + ax = plt.axes([0.15, 0.08, 0.8, 0.65]) + else: + if fts < 12: + ax=None + else: + ax=plt.axes([0.15, 0.1, 0.8, 0.8]) if fill is True: @@ -154,13 +154,13 @@ def iview(var, tindex, iindex, gridid, filename=None, \ cf = plt.pcolor(lati, zi, islice, cmap = pal, norm = pal_norm, axes=ax) if clb is True: - clb = plt.colorbar(cf, fraction=0.075,format=clbformat) - for t in clb.ax.get_yticklabels(): - t.set_fontsize(fts) + clb = plt.colorbar(cf, fraction=0.075,format=clbformat) + for t in clb.ax.get_yticklabels(): + t.set_fontsize(fts) if contour is True: if fill is not True: - raise Warning, 'Please run again with fill=True for overlay contour.' + raise Warning('Please run again with fill=True for overlay contour.') else: plt.contour(lati, zi, islice, vc[::d], colors='k', linewidths=0.5, linestyles='solid', axes=ax) @@ -188,8 +188,8 @@ def iview(var, tindex, iindex, gridid, filename=None, \ if map is True: # draw a map with constant-i slice location ax_map = plt.axes([0.4, 0.76, 0.2, 0.23]) - varm = np.ma.masked_where(mask[:,:] == 0, var[var.shape[0]-1,:,:]) - xmin, xmax = ax.get_xlim() + varm = np.ma.masked_where(mask[:,:] == 0, var[var.shape[0]-1,:,:]) + xmin, xmax = ax.get_xlim() dd = (lat[:,iindex] - xmin) * (lat[:,iindex] - xmin) start = np.where(dd == dd.min()) dd = (lat[:,iindex] - xmax) * (lat[:,iindex] - xmax) @@ -198,12 +198,12 @@ def iview(var, tindex, iindex, gridid, filename=None, \ lon_max = lon.max() lon_0 = (lon_min + lon_max) / 2. lat_min = lat.min() - lat_max = lat.max() + lat_max = lat.max() lat_0 = (lat_min + lat_max) / 2. map = Basemap(projection='merc', llcrnrlon=lon_min, llcrnrlat=lat_min, \ urcrnrlon=lon_max, urcrnrlat=lat_max, lat_0=lat_0, lon_0=lon_0, \ resolution='i', area_thresh=10.) - x, y = map(lon,lat) + x, y = list(map(lon,lat)) # fill land and draw coastlines map.drawcoastlines() map.fillcontinents(color='grey') @@ -215,10 +215,10 @@ def iview(var, tindex, iindex, gridid, filename=None, \ if outfile is not None: if outfile.find('.png') != -1 or outfile.find('.svg') != -1 or outfile.find('.eps') != -1: - print 'Write figure to file', outfile + print('Write figure to file', outfile) plt.savefig(outfile, dpi=200, facecolor='w', edgecolor='w', orientation='portrait') else: - print 'Unrecognized file extension. Please use .png, .svg or .eps file extension.' + print('Unrecognized file extension. Please use .png, .svg or .eps file extension.') return diff --git a/pyroms_toolbox/pyroms_toolbox/jday2date.py b/pyroms_toolbox/pyroms_toolbox/jday2date.py index 8c5c95b..1b16630 100644 --- a/pyroms_toolbox/pyroms_toolbox/jday2date.py +++ b/pyroms_toolbox/pyroms_toolbox/jday2date.py @@ -22,18 +22,18 @@ def jday2date(jday): for t in range(nt): j = int(np.floor(jday[t])) + 32044 + jd0 - g = j / 146097 + g = j // 146097 dg = j % 146097 - c = (dg / 36524 + 1) * 3 / 4 + c = (dg // 36524 + 1) * 3 // 4 dc = dg - c * 36524 - b = dc / 1461 + b = dc // 1461 db = dc % 1461 - a = (db / 365 + 1) * 3 / 4 + a = (db // 365 + 1) * 3 // 4 da = db - a * 365 y = g * 400 + c * 100 + b * 4 + a - m = (da * 5 + 308) / 153 - 2 - d = da - (m + 4) * 153 / 5 + 122 - Y = y - 4800 + (m + 2) / 12 + m = (da * 5 + 308) // 153 - 2 + d = da - (m + 4) * 153 // 5 + 122 + Y = y - 4800 + (m + 2) // 12 M = (m + 2) % 12 + 1 D = d + 1 diff --git a/pyroms_toolbox/pyroms_toolbox/jview.py b/pyroms_toolbox/pyroms_toolbox/jview.py index 66fc00c..e5406e6 100644 --- a/pyroms_toolbox/pyroms_toolbox/jview.py +++ b/pyroms_toolbox/pyroms_toolbox/jview.py @@ -20,7 +20,7 @@ def jview(var, tindex, jindex, gridid, filename=None, \ - clev set the number of color step - fill use contourf instead of pcolor - contour overlay contour (request fill=True) - - d contour density (default d=4) + - d contour density (default d=4) - irange i range - hrange h range - fts set font size (default: 12) @@ -130,21 +130,21 @@ def jview(var, tindex, jindex, gridid, filename=None, \ #pal.set_bad('w', 1.0) pal_norm = colors.BoundaryNorm(vc,ncolors=256, clip = False) - + # clear figure #plt.clf() if map is True: - # set axes for the main plot in order to keep space for the map - if fts < 12: - ax=None - else: - ax = plt.axes([0.15, 0.08, 0.8, 0.65]) - else: - if fts < 12: - ax=None - else: - ax=plt.axes([0.15, 0.1, 0.8, 0.8]) + # set axes for the main plot in order to keep space for the map + if fts < 12: + ax=None + else: + ax = plt.axes([0.15, 0.08, 0.8, 0.65]) + else: + if fts < 12: + ax=None + else: + ax=plt.axes([0.15, 0.1, 0.8, 0.8]) if fill is True: @@ -153,13 +153,13 @@ def jview(var, tindex, jindex, gridid, filename=None, \ cf = plt.pcolor(lonj, zj, jslice, cmap = pal, norm = pal_norm, axes=ax) if clb is True: - clb = plt.colorbar(cf, fraction=0.075,format=clbformat) - for t in clb.ax.get_yticklabels(): - t.set_fontsize(fts) + clb = plt.colorbar(cf, fraction=0.075,format=clbformat) + for t in clb.ax.get_yticklabels(): + t.set_fontsize(fts) if contour is True: if fill is not True: - raise Warning, 'Please run again with fill=True for overlay contour.' + raise Warning('Please run again with fill=True for overlay contour.') else: plt.contour(lonj, zj, jslice, vc[::d], colors='k', linewidths=0.5, linestyles='solid', axes=ax) @@ -187,8 +187,8 @@ def jview(var, tindex, jindex, gridid, filename=None, \ if map is True: # draw a map with constant-i slice location ax_map = plt.axes([0.4, 0.76, 0.2, 0.23]) - varm = np.ma.masked_where(mask[:,:] == 0, var[var.shape[0]-1,:,:]) - xmin, xmax = ax.get_xlim() + varm = np.ma.masked_where(mask[:,:] == 0, var[var.shape[0]-1,:,:]) + xmin, xmax = ax.get_xlim() dd = (lon[jindex,:] - xmin) * (lon[jindex,:] - xmin) start = np.where(dd == dd.min()) dd = (lon[jindex,:] - xmax) * (lon[jindex,:] - xmax) @@ -197,12 +197,12 @@ def jview(var, tindex, jindex, gridid, filename=None, \ lon_max = lon.max() lon_0 = (lon_min + lon_max) / 2. lat_min = lat.min() - lat_max = lat.max() + lat_max = lat.max() lat_0 = (lat_min + lat_max) / 2. map = Basemap(projection='merc', llcrnrlon=lon_min, llcrnrlat=lat_min, \ urcrnrlon=lon_max, urcrnrlat=lat_max, lat_0=lat_0, lon_0=lon_0, \ resolution='i', area_thresh=10.) - x, y = map(lon,lat) + x, y = list(map(lon,lat)) # fill land and draw coastlines map.drawcoastlines() map.fillcontinents(color='grey') @@ -214,10 +214,10 @@ def jview(var, tindex, jindex, gridid, filename=None, \ if outfile is not None: if outfile.find('.png') != -1 or outfile.find('.svg') != -1 or outfile.find('.eps') != -1: - print 'Write figure to file', outfile + print('Write figure to file', outfile) plt.savefig(outfile, dpi=200, facecolor='w', edgecolor='w', orientation='portrait') else: - print 'Unrecognized file extension. Please use .png, .svg or .eps file extension.' + print('Unrecognized file extension. Please use .png, .svg or .eps file extension.') return diff --git a/pyroms_toolbox/pyroms_toolbox/latview.py b/pyroms_toolbox/pyroms_toolbox/latview.py index 05e4bff..2937ce0 100644 --- a/pyroms_toolbox/pyroms_toolbox/latview.py +++ b/pyroms_toolbox/pyroms_toolbox/latview.py @@ -20,7 +20,7 @@ def latview(var, tindex, latitude, gridid, filename=None, \ - clev set the number of color step - fill use contourf instead of pcolor - contour overlay contour (request fill=True) - - d contour density (default d=4) + - d contour density (default d=4) - lonrange longitude range - hrange h range - fts set font size (default: 12) @@ -30,8 +30,8 @@ def latview(var, tindex, latitude, gridid, filename=None, \ - clb add colorbar (defaul: True) - outfile if defined, write figure to file - plot a constante-latitudinal slice of variable var. If filename - is provided, var must be a string and the variable will be load + plot a constante-latitudinal slice of variable var. If filename + is provided, var must be a string and the variable will be load from the file. grid can be a grid object or a gridid. In the later case, the grid object correponding to the provided gridid will be loaded. @@ -124,21 +124,21 @@ def latview(var, tindex, latitude, gridid, filename=None, \ #pal.set_bad('w', 1.0) pal_norm = colors.BoundaryNorm(vc,ncolors=256, clip = False) - + # clear figure #plt.clf() if map is True: - # set axes for the main plot in order to keep space for the map - if fts < 12: - ax=None - else: - ax = plt.axes([0.15, 0.08, 0.8, 0.65]) - else: - if fts < 12: - ax=None - else: - ax=plt.axes([0.15, 0.1, 0.8, 0.8]) + # set axes for the main plot in order to keep space for the map + if fts < 12: + ax=None + else: + ax = plt.axes([0.15, 0.08, 0.8, 0.65]) + else: + if fts < 12: + ax=None + else: + ax=plt.axes([0.15, 0.1, 0.8, 0.8]) if fill is True: @@ -147,13 +147,13 @@ def latview(var, tindex, latitude, gridid, filename=None, \ cf = plt.pcolor(lons, zs, latslice, cmap = pal, norm = pal_norm, axes=ax) if clb is True: - clb = plt.colorbar(cf, fraction=0.075,format='%.2f') - for t in clb.ax.get_yticklabels(): - t.set_fontsize(fts) + clb = plt.colorbar(cf, fraction=0.075,format='%.2f') + for t in clb.ax.get_yticklabels(): + t.set_fontsize(fts) if contour is True: if fill is not True: - raise Warning, 'Please run again with fill=True for overlay contour.' + raise Warning('Please run again with fill=True for overlay contour.') else: plt.contour(lons, zs, latslice, vc[::d], colors='k', linewidths=0.5, linestyles='solid', axes=ax) @@ -181,25 +181,25 @@ def latview(var, tindex, latitude, gridid, filename=None, \ if map is True: # draw a map with constant-i slice location ax_map = plt.axes([0.4, 0.76, 0.2, 0.23]) - varm = np.ma.masked_where(mask[:,:] == 0, var[var.shape[0]-1,:,:]) + varm = np.ma.masked_where(mask[:,:] == 0, var[var.shape[0]-1,:,:]) lon_min = lon.min() lon_max = lon.max() lon_0 = (lon_min + lon_max) / 2. lat_min = lat.min() - lat_max = lat.max() + lat_max = lat.max() lat_0 = (lat_min + lat_max) / 2. map = Basemap(projection='merc', llcrnrlon=lon_min, llcrnrlat=lat_min, \ urcrnrlon=lon_max, urcrnrlat=lat_max, lat_0=lat_0, lon_0=lon_0, \ resolution='i', area_thresh=10.) - x, y = map(lon,lat) + x, y = list(map(lon,lat)) if lonrange is None: - xs, ys = map(lons[0,:],lats[0,:]) + xs, ys = list(map(lons[0,:],lats[0,:])) else: c1 = lats[0,:] >= lonrange[0] c2 = lats[0,:] <= lonrange[1] c = c1 & c2 idx = np.where(c == True) - xs, ys = map(lons[0,idx[0]],lats[0,idx[0]]) + xs, ys = list(map(lons[0,idx[0]],lats[0,idx[0]])) # fill land and draw coastlines map.drawcoastlines() map.fillcontinents(color='grey') @@ -210,10 +210,10 @@ def latview(var, tindex, latitude, gridid, filename=None, \ if outfile is not None: if outfile.find('.png') != -1 or outfile.find('.svg') != -1 or outfile.find('.eps') != -1: - print 'Write figure to file', outfile + print('Write figure to file', outfile) plt.savefig(outfile, dpi=200, facecolor='w', edgecolor='w', orientation='portrait') else: - print 'Unrecognized file extension. Please use .png, .svg or .eps file extension.' + print('Unrecognized file extension. Please use .png, .svg or .eps file extension.') return diff --git a/pyroms_toolbox/pyroms_toolbox/lonview.py b/pyroms_toolbox/pyroms_toolbox/lonview.py index 989e465..0861f26 100644 --- a/pyroms_toolbox/pyroms_toolbox/lonview.py +++ b/pyroms_toolbox/pyroms_toolbox/lonview.py @@ -20,7 +20,7 @@ def lonview(var, tindex, longitude, gridid, filename=None, \ - clev set the number of color step - fill use contourf instead of pcolor - contour overlay contour (request fill=True) - - d contour density (default d=4) + - d contour density (default d=4) - latrange latitude range - hrange h range - fts set font size (default: 12) @@ -30,8 +30,8 @@ def lonview(var, tindex, longitude, gridid, filename=None, \ - clb add colorbar (defaul: True) - outfile if defined, write figure to file - plot a constant-longitudinal slice of variable var. If filename - is provided, var must be a string and the variable will be load + plot a constant-longitudinal slice of variable var. If filename + is provided, var must be a string and the variable will be load from the file. grid can be a grid object or a gridid. In the later case, the grid object correponding to the provided gridid will be loaded. @@ -124,21 +124,21 @@ def lonview(var, tindex, longitude, gridid, filename=None, \ #pal.set_bad('w', 1.0) pal_norm = colors.BoundaryNorm(vc,ncolors=256, clip = False) - + # clear figure #plt.clf() if map is True: - # set axes for the main plot in order to keep space for the map - if fts < 12: - ax=None - else: - ax = plt.axes([0.15, 0.08, 0.8, 0.65]) - else: - if fts < 12: - ax=None - else: - ax=plt.axes([0.15, 0.1, 0.8, 0.8]) + # set axes for the main plot in order to keep space for the map + if fts < 12: + ax=None + else: + ax = plt.axes([0.15, 0.08, 0.8, 0.65]) + else: + if fts < 12: + ax=None + else: + ax=plt.axes([0.15, 0.1, 0.8, 0.8]) if fill is True: @@ -147,13 +147,13 @@ def lonview(var, tindex, longitude, gridid, filename=None, \ cf = plt.pcolor(lats, zs, lonslice, cmap = pal, norm = pal_norm, axes=ax) if clb is True: - clb = plt.colorbar(cf, fraction=0.075,format='%.2f') - for t in clb.ax.get_yticklabels(): - t.set_fontsize(fts) + clb = plt.colorbar(cf, fraction=0.075,format='%.2f') + for t in clb.ax.get_yticklabels(): + t.set_fontsize(fts) if contour is True: if fill is not True: - raise Warning, 'Please run again with fill=True for overlay contour.' + raise Warning('Please run again with fill=True for overlay contour.') else: plt.contour(lats, zs, lonslice, vc[::d], colors='k', linewidths=0.5, linestyles='solid', axes=ax) @@ -181,25 +181,25 @@ def lonview(var, tindex, longitude, gridid, filename=None, \ if map is True: # draw a map with constant-i slice location ax_map = plt.axes([0.4, 0.76, 0.2, 0.23]) - varm = np.ma.masked_where(mask[:,:] == 0, var[var.shape[0]-1,:,:]) + varm = np.ma.masked_where(mask[:,:] == 0, var[var.shape[0]-1,:,:]) lon_min = lon.min() lon_max = lon.max() lon_0 = (lon_min + lon_max) / 2. lat_min = lat.min() - lat_max = lat.max() + lat_max = lat.max() lat_0 = (lat_min + lat_max) / 2. map = Basemap(projection='merc', llcrnrlon=lon_min, llcrnrlat=lat_min, \ urcrnrlon=lon_max, urcrnrlat=lat_max, lat_0=lat_0, lon_0=lon_0, \ resolution='i', area_thresh=10.) - x, y = map(lon,lat) + x, y = list(map(lon,lat)) if latrange is None: - xs, ys = map(lons[0,:],lats[0,:]) + xs, ys = list(map(lons[0,:],lats[0,:])) else: c1 = lats[0,:] >= latrange[0] c2 = lats[0,:] <= latrange[1] c = c1 & c2 idx = np.where(c == True) - xs, ys = map(lons[0,idx[0]],lats[0,idx[0]]) + xs, ys = list(map(lons[0,idx[0]],lats[0,idx[0]])) # fill land and draw coastlines map.drawcoastlines() map.fillcontinents(color='grey') @@ -210,10 +210,10 @@ def lonview(var, tindex, longitude, gridid, filename=None, \ if outfile is not None: if outfile.find('.png') != -1 or outfile.find('.svg') != -1 or outfile.find('.eps') != -1: - print 'Write figure to file', outfile + print('Write figure to file', outfile) plt.savefig(outfile, dpi=200, facecolor='w', edgecolor='w', orientation='portrait') else: - print 'Unrecognized file extension. Please use .png, .svg or .eps file extension.' + print('Unrecognized file extension. Please use .png, .svg or .eps file extension.') return diff --git a/pyroms_toolbox/pyroms_toolbox/low_pass_filter.py b/pyroms_toolbox/pyroms_toolbox/low_pass_filter.py index 45b5702..5a60a76 100644 --- a/pyroms_toolbox/pyroms_toolbox/low_pass_filter.py +++ b/pyroms_toolbox/pyroms_toolbox/low_pass_filter.py @@ -20,7 +20,7 @@ def low_pass_filter(data, window_size=3): low_passed_data = np.zeros(data.shape) if np.abs(window_size) % 2 != 1: - raise ValueError, 'window_size must be odd.' + raise ValueError('window_size must be odd.') win2 = (window_size - 1) / 2 diff --git a/pyroms_toolbox/pyroms_toolbox/nc_create_roms_file.py b/pyroms_toolbox/pyroms_toolbox/nc_create_roms_file.py index 879877d..7227245 100644 --- a/pyroms_toolbox/pyroms_toolbox/nc_create_roms_file.py +++ b/pyroms_toolbox/pyroms_toolbox/nc_create_roms_file.py @@ -48,29 +48,29 @@ def nc_create_roms_file(filename, grd, ocean_time, lgrid=True): nc.createVariable('s_rho', 'f8', ('s_rho')) nc.variables['s_rho'].long_name = 'S-coordinate at RHO-points' - nc.variables['s_rho'].valid_min = '-1' - nc.variables['s_rho'].valid_max = '0' + nc.variables['s_rho'].valid_min = '-1.0' + nc.variables['s_rho'].valid_max = '0.0' nc.variables['s_rho'].field = 's_rho,scalar' nc.variables['s_rho'][:] = grd.vgrid.s_rho nc.createVariable('s_w', 'f8', ('s_w')) nc.variables['s_w'].long_name = 'S-coordinate at W-points' - nc.variables['s_w'].valid_min = '-1' - nc.variables['s_w'].valid_max = '0' + nc.variables['s_w'].valid_min = '-1.0' + nc.variables['s_w'].valid_max = '0.0' nc.variables['s_w'].field = 's_w,scalar' nc.variables['s_w'][:] = grd.vgrid.s_w nc.createVariable('Cs_r', 'f8', ('s_rho')) nc.variables['Cs_r'].long_name = 'S-coordinate stretching curves at RHO-points' - nc.variables['Cs_r'].valid_min = '-1' - nc.variables['Cs_r'].valid_max = '0' + nc.variables['Cs_r'].valid_min = '-1.0' + nc.variables['Cs_r'].valid_max = '0.0' nc.variables['Cs_r'].field = 'Cs_r,scalar' nc.variables['Cs_r'][:] = grd.vgrid.Cs_r nc.createVariable('Cs_w', 'f8', ('s_w')) nc.variables['Cs_w'].long_name = 'S-coordinate stretching curves at W-points' - nc.variables['Cs_w'].valid_min = '-1' - nc.variables['Cs_w'].valid_max = '0' + nc.variables['Cs_w'].valid_min = '-1.0' + nc.variables['Cs_w'].valid_max = '0.0' nc.variables['Cs_w'].field = 'Cs_w,scalar' nc.variables['Cs_w'][:] = grd.vgrid.Cs_w diff --git a/pyroms_toolbox/pyroms_toolbox/ocean_in.py b/pyroms_toolbox/pyroms_toolbox/ocean_in.py index d638a7b..7871528 100644 --- a/pyroms_toolbox/pyroms_toolbox/ocean_in.py +++ b/pyroms_toolbox/pyroms_toolbox/ocean_in.py @@ -8,7 +8,7 @@ """ Some tools for parsing the ROMS ocean.in file, saving it to JSON and/or -writing it back out to ocean.in format. The Python format is a list and +writing it back out to ocean.in format. The Python format is a list and a dictionary. The dictionary has the settable variables such as 'Ngrids' as keys, the values as values in string form. The list is more complex, containing one element for comment lines and blank lines and up to four @@ -27,18 +27,18 @@ class Line: """ def __init__(self, text, ngrids=False, pos=0, comment="", extra=[]): """(Line, str, bool, int, str) -> NoneType - Constructor for line with ROMS variable name. + Constructor for line with ROMS variable name. - text - String with ROMS variable name. - ngrids - One value per grid (True) or one for all grids (False) - pos - Position of first = in file. + text - String with ROMS variable name. + ngrids - One value per grid (True) or one for all grids (False) + pos - Position of first = in file. comment - String containing a final comment (if any) - """ - self.text = text - self.ngrids = ngrids - self.pos = pos - self.comment = comment - self.extra = extra + """ + self.text = text + self.ngrids = ngrids + self.pos = pos + self.comment = comment + self.extra = extra class ocean_in: @@ -51,35 +51,35 @@ class ocean_in: def do_lines(self, line_list): # Private helper function for dealing with continuation lines first = line_list.pop(0) - extra = [] + extra = [] if len(first) > 1: extra.append(first[1]) else: extra.append("") p = re.compile('([^=]+)(=+)([^=!]+)') try: - eq_pos = first[0].find('=') + eq_pos = first[0].find('=') m = p.match(first[0]) key = m.group(1) eq = m.group(2) value = m.group(3) - key = key.strip() - value = value.lstrip() + key = key.strip() + value = value.lstrip() except: - print("trouble!", first) + print(("trouble!", first)) exit() key = key.strip() value_list = [value] for item in line_list: - line = item[0] + line = item[0] line = line.strip() value_list.append(line) if len(item) > 1: extra.append(item[1]) else: extra.append("") - ngrids = False - if eq == "==": ngrids = True + ngrids = False + if eq == "==": ngrids = True my_list = Line(key, ngrids=ngrids, pos=eq_pos, extra = extra) self.var_list.append(my_list) self.var_dict[key] = value_list @@ -98,12 +98,12 @@ def __init__(self, fname): fh = open(fname) for line in fh: - # Handle blank lines and lines starting with ! - line = line.rstrip() + # Handle blank lines and lines starting with ! + line = line.rstrip() if len(line) == 0 or line[0] == '!': line_list = Line(line) self.var_list.append(line_list) - continue + continue # Look for comments after the assignment and stash them away p = re.compile('(.*\S)( *!.*)') @@ -135,7 +135,7 @@ def __init__(self, fname): eq = m.group(2) value = m.group(3) except: - print("trouble with line:", line) + print(("trouble with line:", line)) exit() key = key.strip() value = [value.strip()] @@ -155,69 +155,69 @@ def write_ocean_in(self, fname): try: fh = open(fname, 'w') except: - print('trouble opening file', fname) + print(('trouble opening file', fname)) exit() for item in self.var_list: # Deal with non-var lines first - if item.pos == 0: + if item.pos == 0: fh.write(item.text) fh.write("\n") - continue - + continue + varname = item.text - eq = "=" - if item.ngrids: eq = "==" - value = self.var_dict[varname] + eq = "=" + if item.ngrids: eq = "==" + value = self.var_dict[varname] string = ' '.join(value) string = item.text + ' ' + eq + ' ' + string - if item.comment: + if item.comment: string += item.comment - index = string.find('=') - # We might need pos below. - pos = item.pos + index = string.find('=') + # We might need pos below. + pos = item.pos if pos > index: - string = ' '*(item.pos-index) + string + string = ' '*(item.pos-index) + string - # Look for continuation lines and split them up - first = True + # Look for continuation lines and split them up + first = True p = re.compile('(\\\\|\|)') m = p.findall(string) - if m: - # All continuation chunks should have an "extra" - # except after a merge - extra = item.extra - count = 0 + if m: + # All continuation chunks should have an "extra" + # except after a merge + extra = item.extra + count = 0 for match in m: index = string.find(match) part = string[0:index+len(match)] - if len(extra) > count and extra[count]: + if len(extra) > count and extra[count]: part += extra[count] - count += 1 + count += 1 string = string[index+len(match):] - # Add space to front of trailing lines - if first: - first = False - else: + # Add space to front of trailing lines + if first: + first = False + else: p = re.compile('([^ ])') - m = p.search(part) - index = part.find(m.group(1)) - part = ' '*(pos-index + 3) + part + m = p.search(part) + index = part.find(m.group(1)) + part = ' '*(pos-index + 3) + part fh.write(part) fh.write("\n") - # Get the last continued line and pad it too + # Get the last continued line and pad it too p = re.compile('([^ ])') - m = p.search(string) - index = string.find(m.group(1)) - string = ' '*(pos-index + 3) + string - if len(extra) > count and extra[count]: + m = p.search(string) + index = string.find(m.group(1)) + string = ' '*(pos-index + 3) + string + if len(extra) > count and extra[count]: string += extra[count] fh.write(string) fh.write("\n") - else: + else: fh.write(string) fh.write("\n") @@ -243,28 +243,28 @@ def merge_dicts(self, my_ocn_list): my_ocn_list - a list of dictionaries to add to self. Given a ROMS ocean_in object, merge the dictionaries from one or - more other grids to create a multi-grid ocean_in. + more other grids to create a multi-grid ocean_in. """ # Copy here in case we're adding this object to itself t_dict = copy.deepcopy(self.var_dict) num_lists = len(my_ocn_list) for i in range(num_lists): - new_dict = my_ocn_list[i].var_dict + new_dict = my_ocn_list[i].var_dict for it in self.var_list: - var = it.text - if it.ngrids: - try: + var = it.text + if it.ngrids: + try: # If the first string ends in "\" or "|", it is likely to be # a set of long filenames for which we need to add "\" before appending - first = t_dict[var][0] - if first[-1] == '|' or first[-1] == '\\': - t_dict[var][-1] += ' \\' - t_dict[var] = t_dict[var] + new_dict[var] + first = t_dict[var][0] + if first[-1] == '|' or first[-1] == '\\': + t_dict[var][-1] += ' \\' + t_dict[var] = t_dict[var] + new_dict[var] except: - print("List", i, "is missing variable", var) + print(("List", i, "is missing variable", var)) t_dict['Ngrids'][0] = str(int(t_dict['Ngrids'][0]) + \ - int(new_dict['Ngrids'][0])) + int(new_dict['Ngrids'][0])) self.var_dict = t_dict def main(): diff --git a/pyroms_toolbox/pyroms_toolbox/plot_Robinson_pyngl.py b/pyroms_toolbox/pyroms_toolbox/plot_Robinson_pyngl.py index 9a78178..fbbb215 100644 --- a/pyroms_toolbox/pyroms_toolbox/plot_Robinson_pyngl.py +++ b/pyroms_toolbox/pyroms_toolbox/plot_Robinson_pyngl.py @@ -28,7 +28,7 @@ def plot_Robinson_pyngl(var,lon,lat,wks_name='plot', clim=None, cspace=None, cma try: Ngl.define_colormap(wks,mycmap) except: - raise Warning, 'Unknown colormap' + raise Warning('Unknown colormap') # # The next set of resources will apply to the contour plot and the labelbar. diff --git a/pyroms_toolbox/pyroms_toolbox/plot_mask.py b/pyroms_toolbox/pyroms_toolbox/plot_mask.py index d09e3ab..4fa4568 100644 --- a/pyroms_toolbox/pyroms_toolbox/plot_mask.py +++ b/pyroms_toolbox/pyroms_toolbox/plot_mask.py @@ -15,7 +15,7 @@ def plot_mask(gridid, Cpos='rho', proj=None, **kwargs): grd = pyroms.grid.get_ROMS_grid(gridid) Cpos = str(Cpos) - print Cpos + print(Cpos) # get grid information if Cpos == 'rho': @@ -34,7 +34,7 @@ def plot_mask(gridid, Cpos='rho', proj=None, **kwargs): mask = grd.hgrid.mask_v else: - raise Warning, 'Cpos must be rho, u or v' + raise Warning('Cpos must be rho, u or v') # defined color map land_color = kwargs.pop('land_color', (0.6, 1.0, 0.6)) diff --git a/pyroms_toolbox/pyroms_toolbox/quiver.py b/pyroms_toolbox/pyroms_toolbox/quiver.py index 679de67..511a57c 100644 --- a/pyroms_toolbox/pyroms_toolbox/quiver.py +++ b/pyroms_toolbox/pyroms_toolbox/quiver.py @@ -124,10 +124,10 @@ def quiver(uvar, vvar, tindex, depth, gridid, \ if outfile is not None: if outfile.find('.png') != -1 or outfile.find('.svg') != -1 or \ outfile.find('.eps') != -1: - print 'Write figure to file', outfile + print('Write figure to file', outfile) plt.savefig(outfile, dpi=100, facecolor='w', edgecolor='w', \ orientation='portrait') else: - print 'Unrecognized file extension. Please use .png, .svg or .eps file extension.' + print('Unrecognized file extension. Please use .png, .svg or .eps file extension.') return diff --git a/pyroms_toolbox/pyroms_toolbox/remapping.py b/pyroms_toolbox/pyroms_toolbox/remapping.py index b7e5446..8bd8ccf 100644 --- a/pyroms_toolbox/pyroms_toolbox/remapping.py +++ b/pyroms_toolbox/pyroms_toolbox/remapping.py @@ -15,8 +15,6 @@ import matplotlib.pyplot as plt -import datetime - def remapping(varname, srcfile, wts_files, srcgrd, dstgrd, \ rotate_uv=False, trange=None, irange=None, jrange=None, \ dstdir='./' ,zlevel=None, dmax=0, cdepth=0, kk=0, \ @@ -60,14 +58,14 @@ def remapping(varname, srcfile, wts_files, srcgrd, dstgrd, \ varname = [varname] nvar = len(varname) else: - raise ValueError, 'varname must be a str or a list of str' + raise ValueError('varname must be a str or a list of str') # if we're working on u and v, we'll compute ubar,vbar afterwards compute_ubar = False if (varname.__contains__('u') == 1 and varname.__contains__('v') == 1) or \ (varname.__contains__('u_eastward') == 1 and varname.__contains__('v_northward') == 1): compute_ubar = True - print 'ubar/vbar to be computed from u/v' + print('ubar/vbar to be computed from u/v') if varname.__contains__('ubar'): varname.remove('ubar') nvar = nvar-1 @@ -78,23 +76,23 @@ def remapping(varname, srcfile, wts_files, srcgrd, dstgrd, \ # if rotate_uv=True, check that u and v are in varname if rotate_uv is True: if varname.__contains__(uvar) == 0 or varname.__contains__(vvar) == 0: - raise Warning, 'varname must include uvar and vvar in order to' \ - + ' rotate the velocity field' + raise Warning('varname must include uvar and vvar in order to' \ + + ' rotate the velocity field') else: varname.remove(uvar) varname.remove(vvar) nvar = nvar-2 # srcfile argument - print 'files', srcfile + print('files', srcfile) if type(srcfile).__name__ == 'list': nfile = len(srcfile) elif type(srcfile).__name__ == 'str': srcfile = sorted(glob.glob(srcfile)) nfile = len(srcfile) else: - raise ValueError, 'src_srcfile must be a str or a list of str' - print 'number of files', nfile, srcfile + raise ValueError('src_srcfile must be a str or a list of str') + print('number of files', nfile, srcfile) # get wts_file if type(wts_files).__name__ == 'str': @@ -103,7 +101,7 @@ def remapping(varname, srcfile, wts_files, srcgrd, dstgrd, \ nctidx = 0 # loop over the srcfile for nf in range(nfile): - print 'Working with file', srcfile[nf], '...' + print('Working with file', srcfile[nf], '...') # get time ocean_time = pyroms.utility.get_nc_var('ocean_time', srcfile[nf]) @@ -111,14 +109,14 @@ def remapping(varname, srcfile, wts_files, srcgrd, dstgrd, \ # trange argument if trange is None: - trange = range(ntime) + trange = list(range(ntime)) # create destination file if nctidx == 0: dstfile = dstdir + os.path.basename(srcfile[nf])[:-3] + '_' \ + dstgrd.name + '.nc' if os.path.exists(dstfile) is False: - print 'Creating destination file', dstfile + print('Creating destination file', dstfile) pyroms_toolbox.nc_create_roms_file(dstfile, dstgrd, ocean_time) # open destination file @@ -131,10 +129,10 @@ def remapping(varname, srcfile, wts_files, srcgrd, dstgrd, \ # loop over variable for nv in range(nvar): - print ' ' - print 'remapping', varname[nv], 'from', srcgrd.name, \ - 'to', dstgrd.name - print 'time =', ocean_time[nt] + print(' ') + print('remapping', varname[nv], 'from', srcgrd.name, \ + 'to', dstgrd.name) + print('time =', ocean_time[nt]) # get source data src_var = pyroms.utility.get_nc_var(varname[nv], srcfile[nf]) @@ -146,8 +144,8 @@ def remapping(varname, srcfile, wts_files, srcgrd, dstgrd, \ try: spval = src_var._FillValue except: -# raise Warning, 'Did not find a _FillValue attribute.' - print 'Warning, Did not find a _FillValue attribute.' +# raise Warning, 'Did not find a _FillValue attribute.' + print('Warning, Did not find a _FillValue attribute.') spval = 1.e37 # irange @@ -172,17 +170,17 @@ def remapping(varname, srcfile, wts_files, srcgrd, dstgrd, \ if src_var.dimensions[1].find('_w') != -1: Cpos='w' - print 'Arakawa C-grid position is', Cpos + print('Arakawa C-grid position is', Cpos) # create variable in _destination file if nctidx == 0: - print 'Creating variable', varname[nv] + print('Creating variable', varname[nv]) nc.createVariable(varname[nv], 'f8', src_var.dimensions, fill_value=spval) nc.variables[varname[nv]].long_name = src_var.long_name try: nc.variables[varname[nv]].units = src_var.units except: - print varname[nv]+' has no units' + print(varname[nv]+' has no units') nc.variables[varname[nv]].time = src_var.time nc.variables[varname[nv]].coordinates = \ src_var.coordinates @@ -195,18 +193,18 @@ def remapping(varname, srcfile, wts_files, srcgrd, dstgrd, \ break else: if s == len(wts_files) - 1: - raise ValueError, 'Did not find the appropriate remap weights file' + raise ValueError('Did not find the appropriate remap weights file') if ndim == 3: # vertical interpolation from sigma to standard z level - print 'vertical interpolation from sigma to standard z level' + print('vertical interpolation from sigma to standard z level') src_varz = pyroms.remapping.roms2z( \ src_var[nt,:,jjrange[0]:jjrange[1],iirange[0]:iirange[1]], \ srcgrd, srcgrdz, Cpos=Cpos, spval=spval, \ irange=iirange, jrange=jjrange) # flood the grid - print 'flood the grid' + print('flood the grid') src_varz = pyroms.remapping.flood(src_varz, srcgrdz, Cpos=Cpos, \ irange=iirange, jrange=jjrange, spval=spval, \ dmax=dmax, cdepth=cdepth, kk=kk) @@ -214,32 +212,35 @@ def remapping(varname, srcfile, wts_files, srcgrd, dstgrd, \ else: src_varz = src_var[nt,jjrange[0]:jjrange[1],iirange[0]:iirange[1]] -# print datetime.datetime.now() # horizontal interpolation using scrip weights - print 'horizontal interpolation using scrip weights' + print('horizontal interpolation using scrip weights') dst_varz = pyroms.remapping.remap(src_varz, wts_file, \ spval=spval) -# print datetime.datetime.now() if ndim == 3: # vertical interpolation from standard z level to sigma - print 'vertical interpolation from standard z level to sigma' + print('vertical interpolation from standard z level to sigma') dst_var = pyroms.remapping.z2roms(dst_varz, dstgrdz, dstgrd, \ Cpos=Cpos, spval=spval, flood=False) else: dst_var = dst_varz + if varname[nv] == 'u': + dst_u = dst_var + if varname[nv] == 'v': + dst_v = dst_var + # write data in destination file - print 'write data in destination file' + print('write data in destination file') nc.variables[varname[nv]][nctidx] = dst_var # rotate the velocity field if requested if rotate_uv is True: - print ' ' - print 'remapping and rotating', uvar, 'and', vvar, 'from', \ - srcgrd.name, 'to', dstgrd.name + print(' ') + print('remapping and rotating', uvar, 'and', vvar, 'from', \ + srcgrd.name, 'to', dstgrd.name) # get source data src_u = pyroms.utility.get_nc_var(uvar, srcfile[nf]) @@ -249,18 +250,18 @@ def remapping(varname, srcfile, wts_files, srcgrd, dstgrd, \ try: spval = src_v._FillValue except: - raise Warning, 'Did not find a _FillValue attribute.' + raise Warning('Did not find a _FillValue attribute.') if rotate_part: ndim = len(src_u.dimensions)-1 ind = uvar.find('_eastward') uvar_out = uvar[0:ind] - print "Warning: renaming uvar to", uvar_out - print "uvar dims:", src_u.dimensions + print("Warning: renaming uvar to", uvar_out) +# print("uvar dims:", src_u.dimensions) ind = vvar.find('_northward') vvar_out = vvar[0:ind] - print "Warning: renaming vvar to", vvar_out - print "uvar dims:", src_v.dimensions + print("Warning: renaming vvar to", vvar_out) +# print("vvar dims:", src_v.dimensions) if ndim == 3: dimens_u = ['ocean_time', 's_rho', 'eta_u', 'xi_u'] dimens_v = ['ocean_time', 's_rho', 'eta_v', 'xi_v'] @@ -276,7 +277,7 @@ def remapping(varname, srcfile, wts_files, srcgrd, dstgrd, \ # create variable in destination file if nctidx == 0: - print 'Creating variable '+uvar_out + print('Creating variable '+uvar_out) nc.createVariable(uvar_out, 'f8', dimens_u, fill_value=spval) nc.variables[uvar_out].long_name = src_u.long_name nc.variables[uvar_out].units = src_u.units @@ -284,7 +285,7 @@ def remapping(varname, srcfile, wts_files, srcgrd, dstgrd, \ nc.variables[uvar_out].coordinates = \ str(dimens_u.reverse()) nc.variables[uvar_out].field = src_u.field - print 'Creating variable '+vvar_out + print('Creating variable '+vvar_out) nc.createVariable(vvar_out, 'f8', dimens_v, fill_value=spval) nc.variables[vvar_out].long_name = src_v.long_name nc.variables[vvar_out].units = src_v.units @@ -301,24 +302,28 @@ def remapping(varname, srcfile, wts_files, srcgrd, dstgrd, \ wts_file_v = wts_files[s] Cpos_u = 'rho' Cpos_v = 'rho' + else: + for s in range(len(wts_files)): + if wts_files[s].__contains__('u_to_rho.nc'): + wts_file_u = wts_files[s] + if wts_files[s].__contains__('v_to_rho.nc'): + wts_file_v = wts_files[s] + Cpos_u = 'u' + Cpos_v = 'v' + + # get the right ranges + if rotate_part: # irange if irange is None: iirange = (0,src_u.shape[-1]) else: - iirange = iirange + iirange = irange # jrange if jrange is None: jjrange = (0,src_u.shape[-2]) else: jjrange = jrange else: - for s in range(len(wts_files)): - if wts_files[s].__contains__('u_to_rho.nc'): - wts_file_u = wts_files[s] - if wts_files[s].__contains__('v_to_rho.nc'): - wts_file_v = wts_files[s] - Cpos_u = 'u' - Cpos_v = 'v' # irange if irange is None: iirange = (0,src_u.shape[-1]) @@ -334,13 +339,13 @@ def remapping(varname, srcfile, wts_files, srcgrd, dstgrd, \ ndim = len(src_v.dimensions)-1 if ndim == 3: - print 'vertical interpolation from sigma to standard z level' + print('vertical interpolation from sigma to standard z level') src_uz = pyroms.remapping.roms2z( \ src_u[nt,:,jjrange[0]:jjrange[1],iirange[0]:iirange[1]], \ srcgrd, srcgrdz, Cpos=Cpos_u, spval=spval, \ irange=iirange, jrange=jjrange) # flood the grid - print 'flood the u grid' + print('flood the u grid') src_uz = pyroms.remapping.flood(src_uz, srcgrdz, Cpos=Cpos_u, \ irange=iirange, jrange=jjrange, \ spval=spval, dmax=dmax, cdepth=cdepth, kk=kk) @@ -356,7 +361,7 @@ def remapping(varname, srcfile, wts_files, srcgrd, dstgrd, \ if irange is None: iirange = (0,src_v.shape[-1]) else: - iirange = iirange + iirange = irange # jrange if jrange is None: jjrange = (0,src_v.shape[-2]) @@ -381,7 +386,7 @@ def remapping(varname, srcfile, wts_files, srcgrd, dstgrd, \ irange=iirange, jrange=jjrange) # flood the grid - print 'flood the v grid' + print('flood the v grid') src_vz = pyroms.remapping.flood(src_vz, srcgrdz, Cpos=Cpos_v, \ irange=iirange, jrange=jjrange, \ spval=spval, dmax=dmax, cdepth=cdepth, kk=kk) @@ -392,7 +397,7 @@ def remapping(varname, srcfile, wts_files, srcgrd, dstgrd, \ dmax=dmax) # horizontal interpolation using scrip weights - print 'horizontal interpolation using scrip weights' + print('horizontal interpolation using scrip weights') dst_uz = pyroms.remapping.remap(src_uz, wts_file_u, \ spval=spval) dst_vz = pyroms.remapping.remap(src_vz, wts_file_v, \ @@ -400,7 +405,7 @@ def remapping(varname, srcfile, wts_files, srcgrd, dstgrd, \ if ndim == 3: # vertical interpolation from standard z level to sigma - print 'vertical interpolation from standard z level to sigma' + print('vertical interpolation from standard z level to sigma') dst_u = pyroms.remapping.z2roms(dst_uz, dstgrdz, dstgrd, \ Cpos='rho', spval=spval, flood=False) dst_v = pyroms.remapping.z2roms(dst_vz, dstgrdz, dstgrd, \ @@ -442,20 +447,20 @@ def remapping(varname, srcfile, wts_files, srcgrd, dstgrd, \ for n in range(dstgrd.vgrid.N): dst_u[n,idxu[0], idxu[1]] = spval dst_v[n,idxv[0], idxv[1]] = spval - else: + else: dst_u = 0.5 * (dst_u[:,:-1] + dst_u[:,1:]) dst_v = 0.5 * (dst_v[:-1,:] + dst_v[1:,:]) dst_u[idxu[0], idxu[1]] = spval dst_v[idxv[0], idxv[1]] = spval # write data in destination file - print 'write data in destination file' + print('write data in destination file') nc.variables[uvar_out][nctidx] = dst_u nc.variables[vvar_out][nctidx] = dst_v if compute_ubar: if nctidx == 0: - print 'Creating variable ubar' + print('Creating variable ubar') nc.createVariable('ubar', 'f8', \ ('ocean_time', 'eta_u', 'xi_u'), fill_value=spval) nc.variables['ubar'].long_name = '2D u-momentum component' @@ -463,7 +468,7 @@ def remapping(varname, srcfile, wts_files, srcgrd, dstgrd, \ nc.variables['ubar'].time = 'ocean_time' nc.variables['ubar'].coordinates = 'xi_u eta_u ocean_time' nc.variables['ubar'].field = 'ubar-velocity,, scalar, series' - print 'Creating variable vbar' + print('Creating variable vbar') nc.createVariable('vbar', 'f8', \ ('ocean_time', 'eta_v', 'xi_v'), fill_value=spval) nc.variables['vbar'].long_name = '2D v-momentum component' @@ -502,9 +507,9 @@ def remapping(varname, srcfile, wts_files, srcgrd, dstgrd, \ nc.variables['vbar'][nctidx] = dst_vbar nctidx = nctidx + 1 - print 'ADDING to nctidx ', nctidx + print('ADDING to nctidx ', nctidx) nc.sync() - + # close destination file nc.close() diff --git a/pyroms_toolbox/pyroms_toolbox/remapping_bound.py b/pyroms_toolbox/pyroms_toolbox/remapping_bound.py index 43e4f45..d81ccfa 100644 --- a/pyroms_toolbox/pyroms_toolbox/remapping_bound.py +++ b/pyroms_toolbox/pyroms_toolbox/remapping_bound.py @@ -60,14 +60,14 @@ def remapping_bound(varname, srcfile, wts_files, srcgrd, dst_grd, \ varname = [varname] nvar = len(varname) else: - raise ValueError, 'varname must be a str or a list of str' + raise ValueError('varname must be a str or a list of str') # if we're working on u and v, we'll compute ubar,vbar afterwards compute_ubar = False if (varname.__contains__('u') == 1 and varname.__contains__('v') == 1) or \ (varname.__contains__('u_eastward') == 1 and varname.__contains__('v_northward') == 1): compute_ubar = True - print 'ubar/vbar to be computed from u/v' + print('ubar/vbar to be computed from u/v') if varname.__contains__('ubar'): varname.remove('ubar') nvar = nvar-1 @@ -78,8 +78,8 @@ def remapping_bound(varname, srcfile, wts_files, srcgrd, dst_grd, \ # if rotate_uv=True, check that u and v are in varname if rotate_uv is True: if varname.__contains__(uvar) == 0 or varname.__contains__(vvar) == 0: - raise Warning, 'varname must include uvar and vvar in order to' \ - + ' rotate the velocity field' + raise Warning('varname must include uvar and vvar in order to' \ + + ' rotate the velocity field') else: varname.remove(uvar) varname.remove(vvar) @@ -92,7 +92,7 @@ def remapping_bound(varname, srcfile, wts_files, srcgrd, dst_grd, \ srcfile = sorted(glob.glob(srcfile)) nfile = len(srcfile) else: - raise ValueError, 'src_srcfile must be a str or a list of str' + raise ValueError('src_srcfile must be a str or a list of str') # get wts_file if type(wts_files).__name__ == 'str': @@ -107,7 +107,7 @@ def remapping_bound(varname, srcfile, wts_files, srcgrd, dst_grd, \ nctidx = 0 # loop over the srcfile for nf in range(nfile): - print 'Working with file', srcfile[nf], '...' + print('Working with file', srcfile[nf], '...') # get time ocean_time = pyroms.utility.get_nc_var('ocean_time', srcfile[nf]) @@ -115,14 +115,14 @@ def remapping_bound(varname, srcfile, wts_files, srcgrd, dst_grd, \ # trange argument if trange is None: - trange = range(ntime) + trange = list(range(ntime)) # create destination file if nctidx == 0: dstfile = dstdir + os.path.basename(srcfile[nf])[:-3] + '_' \ + dst_grd.name + '_bdry.nc' if os.path.exists(dstfile) is False: - print 'Creating destination file', dstfile + print('Creating destination file', dstfile) pyroms_toolbox.nc_create_roms_file(dstfile, dst_grd, \ ocean_time, lgrid=False) @@ -136,10 +136,10 @@ def remapping_bound(varname, srcfile, wts_files, srcgrd, dst_grd, \ # loop over variable for nv in range(nvar): - print ' ' - print 'remapping', varname[nv], 'from', srcgrd.name, \ - 'to', dst_grd.name - print 'time =', ocean_time[nt] + print(' ') + print('remapping', varname[nv], 'from', srcgrd.name, \ + 'to', dst_grd.name) + print('time =', ocean_time[nt]) Mp, Lp = dst_grd.hgrid.mask_rho.shape # get source data @@ -152,7 +152,7 @@ def remapping_bound(varname, srcfile, wts_files, srcgrd, dst_grd, \ try: spval = src_var._FillValue except: - raise Warning, 'Did not find a _FillValue attribute.' + raise Warning('Did not find a _FillValue attribute.') # irange if irange is None: @@ -182,7 +182,7 @@ def remapping_bound(varname, srcfile, wts_files, srcgrd, dst_grd, \ if src_var.dimensions[1].find('_w') != -1: Cpos='w' - print 'Arakawa C-grid position is', Cpos + print('Arakawa C-grid position is', Cpos) # create variable in _destination file if nctidx == 0: @@ -192,7 +192,7 @@ def remapping_bound(varname, srcfile, wts_files, srcgrd, dst_grd, \ for dim in dimens: if re.match(dimexcl[sid],dim): dimens.remove(dim) - print 'Creating variable', varn, dimens + print('Creating variable', varn, dimens) nc.createVariable(varn, 'f8', dimens, \ fill_value=spval) nc.variables[varn].long_name = varname[nv] + \ @@ -200,7 +200,7 @@ def remapping_bound(varname, srcfile, wts_files, srcgrd, dst_grd, \ try: nc.variables[varn].units = src_var.units except: - print varn+' has no units' + print(varn+' has no units') nc.variables[varn].time = src_var.time nc.variables[varn].coordinates = \ str(dimens.reverse()) @@ -213,18 +213,18 @@ def remapping_bound(varname, srcfile, wts_files, srcgrd, dst_grd, \ break else: if s == len(wts_files) - 1: - raise ValueError, 'Did not find the appropriate remap weights file' + raise ValueError('Did not find the appropriate remap weights file') if ndim == 3: # vertical interpolation from sigma to standard z level - print 'vertical interpolation from sigma to standard z level' + print('vertical interpolation from sigma to standard z level') src_varz = pyroms.remapping.roms2z( \ src_var[nt,:,jjrange[0]:jjrange[1],iirange[0]:iirange[1]], \ srcgrd, srcgrdz, Cpos=Cpos, spval=spval, \ irange=iirange, jrange=jjrange) # flood the grid - print 'flood the grid' + print('flood the grid') src_varz = pyroms.remapping.flood(src_varz, srcgrdz, Cpos=Cpos, \ irange=iirange, jrange=jjrange, spval=spval, \ dmax=dmax, cdepth=cdepth, kk=kk) @@ -232,9 +232,9 @@ def remapping_bound(varname, srcfile, wts_files, srcgrd, dst_grd, \ else: src_varz = src_var[nt,jjrange[0]:jjrange[1],iirange[0]:iirange[1]] - print datetime.datetime.now() + print(datetime.datetime.now()) # horizontal interpolation using scrip weights - print 'horizontal interpolation using scrip weights' + print('horizontal interpolation using scrip weights') dst_varz = pyroms.remapping.remap(src_varz, wts_file, \ spval=spval) @@ -275,7 +275,7 @@ def remapping_bound(varname, srcfile, wts_files, srcgrd, dst_grd, \ # print datetime.datetime.now() # write data in destination file - print 'write data in destination file' + print('write data in destination file') sid = '_west' varn = varname[nv]+str(sid) nc.variables[varn][nctidx] = np.squeeze(dst_var_west) @@ -294,9 +294,9 @@ def remapping_bound(varname, srcfile, wts_files, srcgrd, dst_grd, \ # rotate the velocity field if requested if rotate_uv is True: - print ' ' - print 'remapping and rotating u and v from', srcgrd.name, \ - 'to', dst_grd.name + print(' ') + print('remapping and rotating u and v from', srcgrd.name, \ + 'to', dst_grd.name) # get source data src_u = pyroms.utility.get_nc_var(uvar, srcfile[nf]) @@ -306,16 +306,16 @@ def remapping_bound(varname, srcfile, wts_files, srcgrd, dst_grd, \ try: spval = src_v._FillValue except: - raise Warning, 'Did not find a _FillValue attribute.' + raise Warning('Did not find a _FillValue attribute.') if rotate_part: ndim = len(src_u.dimensions)-1 ind = uvar.find('_eastward') uvar_out = uvar[0:ind] - print "Warning: renaming uvar to", uvar_out + print("Warning: renaming uvar to", uvar_out) ind = vvar.find('_northward') vvar_out = vvar[0:ind] - print "Warning: renaming vvar to", vvar_out + print("Warning: renaming vvar to", vvar_out) if ndim == 3: dimens_u = ['ocean_time', 's_rho', 'eta_u', 'xi_u'] dimens_v = ['ocean_time', 's_rho', 'eta_v', 'xi_v'] @@ -331,10 +331,10 @@ def remapping_bound(varname, srcfile, wts_files, srcgrd, dst_grd, \ # create variable in destination file if nctidx == 0: - print 'Creating boundary variables for '+uvar + print('Creating boundary variables for '+uvar) for sid in sides: varn = uvar_out+str(sid) - print 'Creating variable', varn + print('Creating variable', varn) dimens = list(dimens_u) for dim in dimens: if re.match(dimexcl[sid],dim): @@ -346,15 +346,15 @@ def remapping_bound(varname, srcfile, wts_files, srcgrd, dst_grd, \ try: nc.variables[varn].units = src_u.units except: - print varn+' has no units' + print(varn+' has no units') nc.variables[varn].time = src_u.time nc.variables[varn].coordinates = \ str(dimens.reverse()) nc.variables[varn].field = src_u.field - print 'Creating boundary variables for '+vvar + print('Creating boundary variables for '+vvar) for sid in sides: varn = vvar_out+str(sid) - print 'Creating variable', varn + print('Creating variable', varn) dimens = list(dimens_v) for dim in dimens: if re.match(dimexcl[sid],dim): @@ -366,7 +366,7 @@ def remapping_bound(varname, srcfile, wts_files, srcgrd, dst_grd, \ try: nc.variables[varn].units = src_v.units except: - print varn+' has no units' + print(varn+' has no units') nc.variables[varn].time = src_v.time nc.variables[varn].coordinates = \ str(dimens.reverse()) @@ -380,6 +380,16 @@ def remapping_bound(varname, srcfile, wts_files, srcgrd, dst_grd, \ wts_file_v = wts_files[s] Cpos_u = 'rho' Cpos_v = 'rho' + else: + for s in range(len(wts_files)): + if wts_files[s].__contains__('u_to_rho.nc'): + wts_file_u = wts_files[s] + if wts_files[s].__contains__('v_to_rho.nc'): + wts_file_v = wts_files[s] + Cpos_u = 'u' + Cpos_v = 'v' + + if rotate_part: # irange if irange is None: iirange = (0,src_u.shape[-1]) @@ -391,13 +401,6 @@ def remapping_bound(varname, srcfile, wts_files, srcgrd, dst_grd, \ else: jjrange = jrange else: - for s in range(len(wts_files)): - if wts_files[s].__contains__('u_to_rho.nc'): - wts_file_u = wts_files[s] - if wts_files[s].__contains__('v_to_rho.nc'): - wts_file_v = wts_files[s] - Cpos_u = 'u' - Cpos_v = 'v' # irange if irange is None: iirange = (0,src_u.shape[-1]) @@ -410,16 +413,15 @@ def remapping_bound(varname, srcfile, wts_files, srcgrd, dst_grd, \ jjrange = jrange # vertical interpolation from sigma to standard z level - ndim = len(src_v.dimensions)-1 if ndim == 3: - print 'vertical interpolation from sigma to standard z level' + print('vertical interpolation from sigma to standard z level') src_uz = pyroms.remapping.roms2z( \ src_u[nt,:,jjrange[0]:jjrange[1],iirange[0]:iirange[1]], \ srcgrd, srcgrdz, Cpos=Cpos_u, spval=spval, \ irange=iirange, jrange=jjrange) # flood the grid - print 'flood the u grid' + print('flood the u grid') src_uz = pyroms.remapping.flood(src_uz, srcgrdz, Cpos=Cpos_u, \ irange=iirange, jrange=jjrange, \ spval=spval, dmax=dmax, cdepth=cdepth, kk=kk) @@ -429,7 +431,6 @@ def remapping_bound(varname, srcfile, wts_files, srcgrd, dst_grd, \ irange=iirange, jrange=jjrange, spval=spval, \ dmax=dmax) - # get the right ranges if rotate_part: # irange if irange is None: @@ -460,7 +461,7 @@ def remapping_bound(varname, srcfile, wts_files, srcgrd, dst_grd, \ irange=iirange, jrange=jjrange) # flood the grid - print 'flood the v grid' + print('flood the v grid') src_vz = pyroms.remapping.flood(src_vz, srcgrdz, Cpos=Cpos_v, \ irange=iirange, jrange=jjrange, \ spval=spval, dmax=dmax, cdepth=cdepth, kk=kk) @@ -471,7 +472,7 @@ def remapping_bound(varname, srcfile, wts_files, srcgrd, dst_grd, \ dmax=dmax) # horizontal interpolation using scrip weights - print 'horizontal interpolation using scrip weights' + print('horizontal interpolation using scrip weights') dst_uz = pyroms.remapping.remap(src_uz, wts_file_u, \ spval=spval) dst_vz = pyroms.remapping.remap(src_vz, wts_file_v, \ @@ -480,7 +481,7 @@ def remapping_bound(varname, srcfile, wts_files, srcgrd, dst_grd, \ if ndim == 3: # vertical interpolation from standard z level to sigma - print 'vertical interpolation from standard z level to sigma' + print('vertical interpolation from standard z level to sigma') dst_u_north = pyroms.remapping.z2roms(dst_uz[:, Mp-2:Mp, 0:Lp], \ dst_grdz, dst_grd, Cpos='rho', spval=spval, \ flood=False, irange=(0,Lp), jrange=(Mp-2,Mp)) @@ -630,7 +631,7 @@ def remapping_bound(varname, srcfile, wts_files, srcgrd, dst_grd, \ dst_v_west[idxv_west[0]] = spval # write data in destination file - print 'write data in destination file' + print('write data in destination file') sid = '_west' varn = uvar_out+str(sid) nc.variables[varn][nctidx] = dst_u_west @@ -657,7 +658,7 @@ def remapping_bound(varname, srcfile, wts_files, srcgrd, dst_grd, \ if compute_ubar: if nctidx == 0: - print 'Creating variable ubar_north' + print('Creating variable ubar_north') nc.createVariable('ubar_north', 'f8', \ ('ocean_time', 'xi_u'), fill_value=spval) nc.variables['ubar_north'].long_name = \ @@ -666,7 +667,7 @@ def remapping_bound(varname, srcfile, wts_files, srcgrd, dst_grd, \ nc.variables['ubar_north'].time = 'ocean_time' nc.variables['ubar_north'].coordinates = 'xi_u ocean_time' nc.variables['ubar_north'].field = 'ubar_north, scalar, series' - print 'Creating variable vbar_north' + print('Creating variable vbar_north') nc.createVariable('vbar_north', 'f8', \ ('ocean_time', 'xi_v'), fill_value=spval) nc.variables['vbar_north'].long_name = \ @@ -676,7 +677,7 @@ def remapping_bound(varname, srcfile, wts_files, srcgrd, dst_grd, \ nc.variables['vbar_north'].coordinates = 'xi_v ocean_time' nc.variables['vbar_north'].field = 'vbar_north,, scalar, series' - print 'Creating variable ubar_south' + print('Creating variable ubar_south') nc.createVariable('ubar_south', 'f8', \ ('ocean_time', 'xi_u'), fill_value=spval) nc.variables['ubar_south'].long_name = \ @@ -685,7 +686,7 @@ def remapping_bound(varname, srcfile, wts_files, srcgrd, dst_grd, \ nc.variables['ubar_south'].time = 'ocean_time' nc.variables['ubar_south'].coordinates = 'xi_u ocean_time' nc.variables['ubar_south'].field = 'ubar_south, scalar, series' - print 'Creating variable vbar_south' + print('Creating variable vbar_south') nc.createVariable('vbar_south', 'f8', \ ('ocean_time', 'xi_v'), fill_value=spval) nc.variables['vbar_south'].long_name = \ @@ -694,7 +695,7 @@ def remapping_bound(varname, srcfile, wts_files, srcgrd, dst_grd, \ nc.variables['vbar_south'].time = 'ocean_time' nc.variables['vbar_south'].coordinates = 'xi_v ocean_time' - print 'Creating variable ubar_west' + print('Creating variable ubar_west') nc.createVariable('ubar_west', 'f8', \ ('ocean_time', 'eta_u'), fill_value=spval) nc.variables['ubar_west'].long_name = \ @@ -703,7 +704,7 @@ def remapping_bound(varname, srcfile, wts_files, srcgrd, dst_grd, \ nc.variables['ubar_west'].time = 'ocean_time' nc.variables['ubar_west'].coordinates = 'eta_u ocean_time' nc.variables['ubar_west'].field = 'ubar_west, scalar, series' - print 'Creating variable vbar_west' + print('Creating variable vbar_west') nc.createVariable('vbar_west', 'f8', \ ('ocean_time', 'eta_v'), fill_value=spval) nc.variables['vbar_west'].long_name = \ @@ -712,7 +713,7 @@ def remapping_bound(varname, srcfile, wts_files, srcgrd, dst_grd, \ nc.variables['vbar_west'].time = 'ocean_time' nc.variables['vbar_west'].coordinates = 'eta_v ocean_time' - print 'Creating variable ubar_east' + print('Creating variable ubar_east') nc.createVariable('ubar_east', 'f8', \ ('ocean_time', 'eta_u'), fill_value=spval) nc.variables['ubar_east'].long_name = \ @@ -721,7 +722,7 @@ def remapping_bound(varname, srcfile, wts_files, srcgrd, dst_grd, \ nc.variables['ubar_east'].time = 'ocean_time' nc.variables['ubar_east'].coordinates = 'eta_u ocean_time' nc.variables['ubar_east'].field = 'ubar_east, scalar, series' - print 'Creating variable vbar_east' + print('Creating variable vbar_east') nc.createVariable('vbar_east', 'f8', \ ('ocean_time', 'eta_v'), fill_value=spval) nc.variables['vbar_east'].long_name = \ @@ -732,7 +733,7 @@ def remapping_bound(varname, srcfile, wts_files, srcgrd, dst_grd, \ # compute depth average velocity ubar and vbar # get z at the right position - print 'Computing ubar/vbar from u/v' + print('Computing ubar/vbar from u/v') z_u_north = 0.5 * (dst_grd.vgrid.z_w[0,:,-1,:-1] + dst_grd.vgrid.z_w[0,:,-1, 1:]) z_v_north = 0.5 * (dst_grd.vgrid.z_w[0,:,-1,:] + @@ -820,7 +821,7 @@ def remapping_bound(varname, srcfile, wts_files, srcgrd, dst_grd, \ nc.variables['vbar_west'][nctidx] = dst_vbar_west nctidx = nctidx + 1 - print 'ADDING to nctidx ', nctidx + print('ADDING to nctidx ', nctidx) nc.sync() # close files here? how? diff --git a/pyroms_toolbox/pyroms_toolbox/remapping_bound_sig.py b/pyroms_toolbox/pyroms_toolbox/remapping_bound_sig.py index 5282784..058fd3c 100644 --- a/pyroms_toolbox/pyroms_toolbox/remapping_bound_sig.py +++ b/pyroms_toolbox/pyroms_toolbox/remapping_bound_sig.py @@ -43,7 +43,7 @@ def remapping_bound_sig(varname, srcfile, wts_files, srcgrd, dst_grd, \ varname = [varname] nvar = len(varname) else: - raise ValueError, 'varname must be a str or a list of str' + raise ValueError('varname must be a str or a list of str') # srcfile argument if type(srcfile).__name__ == 'list': @@ -52,12 +52,12 @@ def remapping_bound_sig(varname, srcfile, wts_files, srcgrd, dst_grd, \ srcfile = sorted(glob.glob(srcfile)) nfile = len(srcfile) else: - raise ValueError, 'src_srcfile must be a str or a list of str' + raise ValueError('src_srcfile must be a str or a list of str') # get wts_file if type(wts_files).__name__ == 'str': wts_files = sorted(glob.glob(wts_files)) - + sides = ['_west','_east','_north','_south'] long = {'_west':'Western', '_east':'Eastern', \ '_north':'Northern', '_south':'Southern'} @@ -67,22 +67,22 @@ def remapping_bound_sig(varname, srcfile, wts_files, srcgrd, dst_grd, \ nctidx = 0 # loop over the srcfile for nf in range(nfile): - print 'Working with file', srcfile[nf], '...' + print('Working with file', srcfile[nf], '...') - # get time + # get time ocean_time = pyroms.utility.get_nc_var('ocean_time', srcfile[nf]) ntime = len(ocean_time[:]) # trange argument if trange is None: - trange = range(ntime) + trange = list(range(ntime)) # create destination file if nctidx == 0: dstfile = dstdir + os.path.basename(srcfile[nf])[:-3] + '_' \ + dst_grd.name + '_bdry.nc' if os.path.exists(dstfile) is False: - print 'Creating destination file', dstfile + print('Creating destination file', dstfile) pyroms_toolbox.nc_create_roms_file(dstfile, dst_grd, \ ocean_time, lgrid=False) @@ -96,10 +96,10 @@ def remapping_bound_sig(varname, srcfile, wts_files, srcgrd, dst_grd, \ # loop over variable for nv in range(nvar): - print ' ' - print 'remapping', varname[nv], 'from', srcgrd.name, \ - 'to', dst_grd.name - print 'time =', ocean_time[nt] + print(' ') + print('remapping', varname[nv], 'from', srcgrd.name, \ + 'to', dst_grd.name) + print('time =', ocean_time[nt]) Mp, Lp = dst_grd.hgrid.mask_rho.shape # get source data @@ -109,7 +109,7 @@ def remapping_bound_sig(varname, srcfile, wts_files, srcgrd, dst_grd, \ try: spval = src_var._FillValue except: - raise Warning, 'Did not find a _FillValue attribute.' + raise Warning('Did not find a _FillValue attribute.') # irange if irange is None: @@ -127,15 +127,15 @@ def remapping_bound_sig(varname, srcfile, wts_files, srcgrd, dst_grd, \ if src_var.dimensions[2].find('_rho') != -1: Cpos='rho' else: - print "Sigma should be on rho points" + print("Sigma should be on rho points") - print 'Arakawa C-grid position is', Cpos + print('Arakawa C-grid position is', Cpos) # create variable in _destination file if nctidx == 0: for sid in sides: varn = varname[nv]+str(sid) - print 'Creating variable', varn + print('Creating variable', varn) dimens = [i for i in src_var.dimensions] for dim in dimens: if re.match(dimexcl[sid],dim): @@ -147,7 +147,7 @@ def remapping_bound_sig(varname, srcfile, wts_files, srcgrd, dst_grd, \ try: nc.variables[varn].units = src_var.units except: - print varn+' has no units' + print(varn+' has no units') nc.variables[varn].time = src_var.time nc.variables[varn].coordinates = \ str(dimens.reverse()) @@ -160,7 +160,7 @@ def remapping_bound_sig(varname, srcfile, wts_files, srcgrd, dst_grd, \ break else: if s == len(wts_files) - 1: - raise ValueError, 'Did not find the appropriate remap weights file' + raise ValueError('Did not find the appropriate remap weights file') # print datetime.datetime.now() # horizontal interpolation using scrip weights @@ -175,7 +175,7 @@ def remapping_bound_sig(varname, srcfile, wts_files, srcgrd, dst_grd, \ dst_var_west = dst_var[:, 0] # write data in destination file - print 'write data in destination file' + print('write data in destination file') sid = '_west' varn = varname[nv]+str(sid) nc.variables[varn][nctidx] = np.squeeze(dst_var_west) @@ -194,9 +194,9 @@ def remapping_bound_sig(varname, srcfile, wts_files, srcgrd, dst_grd, \ # rotate the velocity field if requested if rotate_sig: - print ' ' - print 'remapping and rotating sigma from', srcgrd.name, \ - 'to', dst_grd.name + print(' ') + print('remapping and rotating sigma from', srcgrd.name, \ + 'to', dst_grd.name) # get source data src_11 = pyroms.utility.get_nc_var(varname[0], srcfile[nf]) @@ -204,7 +204,7 @@ def remapping_bound_sig(varname, srcfile, wts_files, srcgrd, dst_grd, \ try: spval = src_11._FillValue except: - raise Warning, 'Did not find a _FillValue attribute.' + raise Warning('Did not find a _FillValue attribute.') src_11 = src_11[nt,jjrange[0]:jjrange[1],iirange[0]:iirange[1]] @@ -216,7 +216,7 @@ def remapping_bound_sig(varname, srcfile, wts_files, srcgrd, dst_grd, \ # horizontal interpolation using scrip weights - print 'horizontal interpolation using scrip weights' + print('horizontal interpolation using scrip weights') dst_11 = pyroms.remapping.remap(src_11, wts_file, \ spval=spval) dst_22 = pyroms.remapping.remap(src_22, wts_file, \ @@ -253,7 +253,7 @@ def remapping_bound_sig(varname, srcfile, wts_files, srcgrd, dst_grd, \ sin_ang = np.sin(angle) Lp = cos_ang.shape[-1] Mp = cos_ang.shape[-2] - print "Lp, Mp", Lp, Mp + print("Lp, Mp", Lp, Mp) if rotate_sig: # North @@ -329,7 +329,7 @@ def remapping_bound_sig(varname, srcfile, wts_files, srcgrd, dst_grd, \ dst_12_west[idx_west[0]] = spval # write data in destination file - print 'write data in destination file' + print('write data in destination file') sid = '_west' varn = 'sig11'+str(sid) nc.variables[varn][nctidx] = dst_11_west @@ -365,7 +365,7 @@ def remapping_bound_sig(varname, srcfile, wts_files, srcgrd, dst_grd, \ nctidx = nctidx + 1 nc.sync() # close files here? how? - + # close destination file nc.close() diff --git a/pyroms_toolbox/pyroms_toolbox/remapping_tensor.py b/pyroms_toolbox/pyroms_toolbox/remapping_tensor.py index 654ce68..03dda27 100644 --- a/pyroms_toolbox/pyroms_toolbox/remapping_tensor.py +++ b/pyroms_toolbox/pyroms_toolbox/remapping_tensor.py @@ -42,7 +42,7 @@ def remapping_tensor(varname, srcfile, wts_files, srcgrd, dstgrd, \ varname = [varname] nvar = len(varname) else: - raise ValueError, 'varname must be a str or a list of str' + raise ValueError('varname must be a str or a list of str') # srcfile argument if type(srcfile).__name__ == 'list': @@ -51,7 +51,7 @@ def remapping_tensor(varname, srcfile, wts_files, srcgrd, dstgrd, \ srcfile = sorted(glob.glob(srcfile)) nfile = len(srcfile) else: - raise ValueError, 'src_srcfile must be a str or a list of str' + raise ValueError('src_srcfile must be a str or a list of str') # get wts_file if type(wts_files).__name__ == 'str': @@ -59,7 +59,7 @@ def remapping_tensor(varname, srcfile, wts_files, srcgrd, dstgrd, \ # loop over the srcfile for nf in range(nfile): - print 'Working with file', srcfile[nf], '...' + print('Working with file', srcfile[nf], '...') # get time ocean_time = pyroms.utility.get_nc_var('ocean_time', srcfile[nf]) @@ -67,12 +67,12 @@ def remapping_tensor(varname, srcfile, wts_files, srcgrd, dstgrd, \ # trange argument if trange is None: - trange = range(ntime) + trange = list(range(ntime)) # create destination file dstfile = dstdir + os.path.basename(srcfile[nf])[:-3] + '_' + dstgrd.name + '.nc' if os.path.exists(dstfile) is False: - print 'Creating destination file', dstfile + print('Creating destination file', dstfile) pyroms_toolbox.nc_create_roms_file(dstfile, dstgrd, ocean_time) # open destination file @@ -86,10 +86,10 @@ def remapping_tensor(varname, srcfile, wts_files, srcgrd, dstgrd, \ # loop over variable for nv in range(nvar): - print ' ' - print 'remapping', varname[nv], 'from', srcgrd.name, \ - 'to', dstgrd.name - print 'time =', ocean_time[nt] + print(' ') + print('remapping', varname[nv], 'from', srcgrd.name, \ + 'to', dstgrd.name) + print('time =', ocean_time[nt]) # get source data src_var = pyroms.utility.get_nc_var(varname[nv], srcfile[nf]) @@ -98,7 +98,7 @@ def remapping_tensor(varname, srcfile, wts_files, srcgrd, dstgrd, \ try: spval = src_var._FillValue except: - raise Warning, 'Did not find a _FillValue attribute.' + raise Warning('Did not find a _FillValue attribute.') # irange if irange is None: @@ -116,19 +116,19 @@ def remapping_tensor(varname, srcfile, wts_files, srcgrd, dstgrd, \ if src_var.dimensions[2].find('_rho') != -1: Cpos='rho' else: - print "Sigma should be on rho points" + print("Sigma should be on rho points") - print 'Arakawa C-grid position is', Cpos + print('Arakawa C-grid position is', Cpos) # create variable in _destination file if nt == trange[0]: - print 'Creating variable', varname[nv] + print('Creating variable', varname[nv]) nc.createVariable(varname[nv], 'f8', src_var.dimensions, fill_value=spval) nc.variables[varname[nv]].long_name = src_var.long_name try: nc.variables[varname[nv]].units = src_var.units except: - print varname[nv]+' has no units' + print(varname[nv]+' has no units') nc.variables[varname[nv]].time = src_var.time nc.variables[varname[nv]].coordinates = \ src_var.coordinates @@ -142,7 +142,7 @@ def remapping_tensor(varname, srcfile, wts_files, srcgrd, dstgrd, \ break else: if s == len(wts_files) - 1: - raise ValueError, 'Did not find the appropriate remap weights file' + raise ValueError('Did not find the appropriate remap weights file') # write data in destination file @@ -151,9 +151,9 @@ def remapping_tensor(varname, srcfile, wts_files, srcgrd, dstgrd, \ # rotate the velocity field if requested # print datetime.datetime.now() - print ' ' - print 'remapping and rotating sigma from', srcgrd.name, \ - 'to', dstgrd.name + print(' ') + print('remapping and rotating sigma from', srcgrd.name, \ + 'to', dstgrd.name) # get source data src_11 = pyroms.utility.get_nc_var(varname[0], srcfile[nf]) @@ -161,7 +161,7 @@ def remapping_tensor(varname, srcfile, wts_files, srcgrd, dstgrd, \ try: spval = src_11._FillValue except: - raise Warning, 'Did not find a _FillValue attribute.' + raise Warning('Did not find a _FillValue attribute.') src_11 = src_11[nt,jjrange[0]:jjrange[1],iirange[0]:iirange[1]] @@ -171,22 +171,22 @@ def remapping_tensor(varname, srcfile, wts_files, srcgrd, dstgrd, \ src_12 = pyroms.utility.get_nc_var(varname[2], srcfile[nf]) src_12 = src_12[nt,jjrange[0]:jjrange[1],iirange[0]:iirange[1]] - print "before", src_11[-1,30], src_12[-1,30], src_22[-1,30] + print("before", src_11[-1,30], src_12[-1,30], src_22[-1,30]) if shapiro: src_11 = pyroms_toolbox.shapiro_filter.shapiro2(src_11,2) src_22 = pyroms_toolbox.shapiro_filter.shapiro2(src_22,2) src_12 = pyroms_toolbox.shapiro_filter.shapiro2(src_12,2) - print "after", src_11[-1,30], src_12[-1,30], src_22[-1,30] + print("after", src_11[-1,30], src_12[-1,30], src_22[-1,30]) # horizontal interpolation using scrip weights - print 'horizontal interpolation using scrip weights' + print('horizontal interpolation using scrip weights') dst_11 = pyroms.remapping.remap(src_11, wts_file, \ spval=spval) dst_22 = pyroms.remapping.remap(src_22, wts_file, \ spval=spval) dst_12 = pyroms.remapping.remap(src_12, wts_file, \ spval=spval) - print "after remapping", dst_11[-1,30], dst_12[-1,30], dst_22[-1,30] + print("after remapping", dst_11[-1,30], dst_12[-1,30], dst_22[-1,30]) if rotate_sig is True: # rotate stress tensor @@ -198,7 +198,7 @@ def remapping_tensor(varname, srcfile, wts_files, srcgrd, dstgrd, \ sin_ang = np.sin(angle) Lp = cos_ang.shape[-1] Mp = cos_ang.shape[-2] - print "Lp, Mp", Lp, Mp + print("Lp, Mp", Lp, Mp) for j in range(Mp): for i in range(Lp): @@ -216,7 +216,7 @@ def remapping_tensor(varname, srcfile, wts_files, srcgrd, dstgrd, \ dst_11[j,i] = sig_rot[0,0] dst_12[j,i] = sig_rot[0,1] dst_22[j,i] = sig_rot[1,1] - print "after rotating", dst_11[-1,30], dst_12[-1,30], dst_22[-1,30] + print("after rotating", dst_11[-1,30], dst_12[-1,30], dst_22[-1,30]) # spval @@ -226,7 +226,7 @@ def remapping_tensor(varname, srcfile, wts_files, srcgrd, dstgrd, \ dst_22[idx[0], idx[1]] = spval # write data in destination file - print 'write data in destination file' + print('write data in destination file') nc.variables['sig11'][nctidx] = dst_11 nc.variables['sig12'][nctidx] = dst_12 nc.variables['sig22'][nctidx] = dst_22 diff --git a/pyroms_toolbox/pyroms_toolbox/rvalue.py b/pyroms_toolbox/pyroms_toolbox/rvalue.py index 9f7e92d..036bd6f 100644 --- a/pyroms_toolbox/pyroms_toolbox/rvalue.py +++ b/pyroms_toolbox/pyroms_toolbox/rvalue.py @@ -15,10 +15,10 @@ def rvalue(h): """ #check that h is 2D if (len(h.squeeze().shape)!=2): - raise ValueError, 'h must be two dimensions' + raise ValueError('h must be two dimensions') #check whether h contains any NaNs - if np.isnan(h).any(): raise Warning, 'the height array contains NaNs' + if np.isnan(h).any(): raise Warning('the height array contains NaNs') #compute diff(h)/2*mean(h) at each velocity grid point dhdx_u = np.diff(h, axis=1) diff --git a/pyroms_toolbox/pyroms_toolbox/rx0.py b/pyroms_toolbox/pyroms_toolbox/rx0.py index 530205e..d0846b7 100644 --- a/pyroms_toolbox/pyroms_toolbox/rx0.py +++ b/pyroms_toolbox/pyroms_toolbox/rx0.py @@ -50,10 +50,10 @@ def rx0(h,rmask): ravg = rx0.mean() rmed = np.median(rx0) - print ' ' - print 'Minimum r-value = ', rmin - print 'Maximum r-value = ', rmax - print 'Mean r-value = ', ravg - print 'Median r-value = ', rmed + print(' ') + print('Minimum r-value = ', rmin) + print('Maximum r-value = ', rmax) + print('Mean r-value = ', ravg) + print('Median r-value = ', rmed) return rx0 diff --git a/pyroms_toolbox/pyroms_toolbox/rx1.py b/pyroms_toolbox/pyroms_toolbox/rx1.py index 84e28fa..db8a221 100644 --- a/pyroms_toolbox/pyroms_toolbox/rx1.py +++ b/pyroms_toolbox/pyroms_toolbox/rx1.py @@ -55,10 +55,10 @@ def rx1(z_w,rmask): ravg = rx1.mean() rmed = np.median(rx1) - print ' ' - print 'Minimum r-value = ', rmin - print 'Maximum r-value = ', rmax - print 'Mean r-value = ', ravg - print 'Median r-value = ', rmed + print(' ') + print('Minimum r-value = ', rmin) + print('Maximum r-value = ', rmax) + print('Mean r-value = ', ravg) + print('Median r-value = ', rmed) return rx1 diff --git a/pyroms_toolbox/pyroms_toolbox/sandbox/roms_movie.py b/pyroms_toolbox/pyroms_toolbox/sandbox/roms_movie.py index 18ad0c2..c4284ce 100644 --- a/pyroms_toolbox/pyroms_toolbox/sandbox/roms_movie.py +++ b/pyroms_toolbox/pyroms_toolbox/sandbox/roms_movie.py @@ -66,7 +66,7 @@ def make_movie(filelst, varname, cmin, cmax, view, lev=0, istart=None, iend=None cmax = float(cmax) if imode is 'off': - print 'Turn interactive mode off' + print('Turn interactive mode off') plt.ioff() for tindex in range(istart, iend, 1): @@ -83,7 +83,7 @@ def make_movie(filelst, varname, cmin, cmax, view, lev=0, istart=None, iend=None title=title, outfile='plot.png') else: - print 'Option not available. view must be set to sview, zview or view2D' + print('Option not available. view must be set to sview, zview or view2D') outfile = str('%05d' % tindex) + '.png' @@ -103,13 +103,13 @@ def make_movie(filelst, varname, cmin, cmax, view, lev=0, istart=None, iend=None '-o', 'output.avi') - print "\n\nabout to execute:\n%s\n\n" % ' '.join(command) + print("\n\nabout to execute:\n%s\n\n" % ' '.join(command)) subprocess.check_call(command) - print "\n\n The movie was written to 'output.avi'" + print("\n\n The movie was written to 'output.avi'") if imode is 'off': - print 'Turn interactive mode on again' + print('Turn interactive mode on again') plt.ion() if clean is True: @@ -146,7 +146,7 @@ def make_big_movie(filelst, varname, cmin, cmax, Cpos, view, lev=0, grd=None, \ nfile = len(filelst) if imode is 'off': - print 'Turn interactive mode off' + print('Turn interactive mode off') plt.ioff() counter = 0 @@ -174,7 +174,7 @@ def make_big_movie(filelst, varname, cmin, cmax, Cpos, view, lev=0, grd=None, \ title=title, outfile='plot.png') else: - print 'Option not available. view must be set to sview, zview or view2D' + print('Option not available. view must be set to sview, zview or view2D') Tindex = counter + tindex @@ -199,13 +199,13 @@ def make_big_movie(filelst, varname, cmin, cmax, Cpos, view, lev=0, grd=None, \ 'output.avi') if imode is 'off': - print 'Turn interactive mode on again' + print('Turn interactive mode on again') plt.ion() - print "\n\nabout to execute:\n%s\n\n" % ' '.join(command) + print("\n\nabout to execute:\n%s\n\n" % ' '.join(command)) subprocess.check_call(command) - print "\n\n The movie was written to 'output.avi'" + print("\n\n The movie was written to 'output.avi'") if clean is True: for tindex in range(counter): diff --git a/pyroms_toolbox/pyroms_toolbox/seawater/__init__.py b/pyroms_toolbox/pyroms_toolbox/seawater/__init__.py index 194bf94..98e61a0 100644 --- a/pyroms_toolbox/pyroms_toolbox/seawater/__init__.py +++ b/pyroms_toolbox/pyroms_toolbox/seawater/__init__.py @@ -56,9 +56,9 @@ # --- Exceptions --- class OutOfRangeError(Exception): pass -from density import dens, svan, sigma, drhodt, alpha, drhods, beta -from salinity import salt, cond -from heat import heatcap, adtgrad, temppot, temppot0 -from misc import freezept, soundvel, depth +from .density import dens, svan, sigma, drhodt, alpha, drhods, beta +from .salinity import salt, cond +from .heat import heatcap, adtgrad, temppot, temppot0 +from .misc import freezept, soundvel, depth diff --git a/pyroms_toolbox/pyroms_toolbox/seawater/heat.py b/pyroms_toolbox/pyroms_toolbox/seawater/heat.py index 596d9ae..26c3fd3 100644 --- a/pyroms_toolbox/pyroms_toolbox/seawater/heat.py +++ b/pyroms_toolbox/pyroms_toolbox/seawater/heat.py @@ -101,9 +101,9 @@ def heatcap(S, T, P=0): CP2 = ((d0 + d1*T + d2*T**2 + d3*T**3 + d4*T**4)*S \ + (e0 + e1*T + e2*T**2)*S3_2)*P \ - + ((f0 + f1*T + f2*T**2 + f3*T**3)*S \ - + g0*S3_2)*P**2 \ - + ((h0 + h1*T + h2*T**2)*S + j1*T*S3_2)*P**3 + + ((f0 + f1*T + f2*T**2 + f3*T**3)*S \ + + g0*S3_2)*P**2 \ + + ((h0 + h1*T + h2*T**2)*S + j1*T*S3_2)*P**3 return CP0 + CP1 + CP2 @@ -150,7 +150,7 @@ def adtgrad(S, T, P=0): return a0 + (a1 + (a2 + a3*T)*T)*T \ + (b0 + b1*T)*(S-35) \ - + ( (c0 + (c1 + (c2 + c3*T)*T)*T) \ + + ( (c0 + (c1 + (c2 + c3*T)*T)*T) \ + (d0 + d1*T)*(S-35) )*P \ + (e0 + (e1 + e2*T)*T )*P*P diff --git a/pyroms_toolbox/pyroms_toolbox/seawater/salinity.py b/pyroms_toolbox/pyroms_toolbox/seawater/salinity.py index 6db6bb6..06070fa 100644 --- a/pyroms_toolbox/pyroms_toolbox/seawater/salinity.py +++ b/pyroms_toolbox/pyroms_toolbox/seawater/salinity.py @@ -149,7 +149,7 @@ def cond(S, T, P): RT = (S/35.0)**0.5 SI = _sal(RT,DT) # Iteration - for n in xrange(100): + for n in range(100): RT = RT + (S-SI)/_dsal(RT,DT) SI = _sal(RT,DT) try: diff --git a/pyroms_toolbox/pyroms_toolbox/seawater/test.py b/pyroms_toolbox/pyroms_toolbox/seawater/test.py index b91cc20..9ef3235 100644 --- a/pyroms_toolbox/pyroms_toolbox/seawater/test.py +++ b/pyroms_toolbox/pyroms_toolbox/seawater/test.py @@ -1,95 +1,95 @@ - + ### TEST-CODE ##################################### if __name__ == '__main__': format1 = "Computed: %25s = " format2 = "Check value = " - - print + + print() # Check value from UNESCO 1983, p. 20 - print "Checking svan" - print - print "S = 40, T = 40 °C, P = 10000 dbar" - print format1 % "svan(40, 40, 10000)", svan(40, 40, 10000) - print format2, "981.30210E-8" + print("Checking svan") + print() + print("S = 40, T = 40 C, P = 10000 dbar") + print(format1 % "svan(40, 40, 10000)", svan(40, 40, 10000)) + print(format2, "981.30210E-8") - print + print() # Check value from UNESCO 1983, p. 20 - print "Checking sigma" - print - print "S = 40, T = 40 °C, P = 10000 dbar" - print format1 % "sigma(40, 40, 10000)", sigma(40, 40, 10000) - print format2, 59.82037 + print("Checking sigma") + print() + print("S = 40, T = 40 C, P = 10000 dbar") + print(format1 % "sigma(40, 40, 10000)", sigma(40, 40, 10000)) + print(format2, 59.82037) - print + print() # Check value from UNESCO 1983, p. 11 - print "Checking salt" - print - print "Salinity = 40.0000" - print "cond = 1.888091, T = 40 °C, P = 10000 dbar" - print format1 % "salt(1.888091, 40, 10000)", salt(1.888091, 40, 10000) - print format2, 40.0000 - - print + print("Checking salt") + print() + print("Salinity = 40.0000") + print("cond = 1.888091, T = 40 C, P = 10000 dbar") + print(format1 % "salt(1.888091, 40, 10000)", salt(1.888091, 40, 10000)) + print(format2, 40.0000) + + print() # Check value from UNESCO 1983, p. 11 - print "Checking cond" - print - print "S = 40, T = 40 °C, P = 10000 dbar" - print format1 % "cond(40, 40, 10000)", cond(40, 40, 10000) - print format2, 1.888091 + print("Checking cond") + print() + print("S = 40, T = 40 C, P = 10000 dbar") + print(format1 % "cond(40, 40, 10000)", cond(40, 40, 10000)) + print(format2, 1.888091) - print + print() # Check value from UNESCO 1983, p. 35 - print "Checking heatcap" - print - print "S = 40, T = 40 °C, P = 10000 dbar" - print format1 % "heatcap(40, 40, 10000)", heatcap(40, 40, 10000) - print format2, "3849.500" + print("Checking heatcap") + print() + print("S = 40, T = 40 C, P = 10000 dbar") + print(format1 % "heatcap(40, 40, 10000)", heatcap(40, 40, 10000)) + print(format2, "3849.500") - print + print() # Check value from UNESCO 1983, p. 36 - print "Checking adtgrad" - print - print "S = 40, T = 40 °C, P = 10000 dbar" - print format1 % "adtgrad(40, 40, 10000)", adtgrad(40, 40, 10000) - print format2, "3.255976E-4" + print("Checking adtgrad") + print() + print("S = 40, T = 40 C, P = 10000 dbar") + print(format1 % "adtgrad(40, 40, 10000)", adtgrad(40, 40, 10000)) + print(format2, "3.255976E-4") - print + print() # Check value from UNESCO 1983, p. 44 - print "Checking temppot" - print - print "S = 40, T = 40 °C, P = 10000 dbar, Pref = 0" - print format1 % "temppot(40, 40, 10000)", temppot(40, 40, 10000) - print format2, 36.89073 + print("Checking temppot") + print() + print("S = 40, T = 40 C, P = 10000 dbar, Pref = 0") + print(format1 % "temppot(40, 40, 10000)", temppot(40, 40, 10000)) + print(format2, 36.89073) - print + print() # Check value from UNESCO 1983, p. 30 - print "Checking freezept" - print - print "S = 40, p = 500 dbar" - print format1 % "freezept(40, 500)", freezept(40, 500) - print format2, -2.588567 + print("Checking freezept") + print() + print("S = 40, p = 500 dbar") + print(format1 % "freezept(40, 500)", freezept(40, 500)) + print(format2, -2.588567) - print + print() # Check value from UNESCO 1983, p. 49 - print "Checking soundvel" - print - print "S = 40, T = 40 °C, P = 10000 dbar" - print format1 % "soundvel(40, 40, 10000)", soundvel(40, 40, 10000) - print format2, 1731.995 + print("Checking soundvel") + print() + print("S = 40, T = 40 C, P = 10000 dbar") + print(format1 % "soundvel(40, 40, 10000)", soundvel(40, 40, 10000)) + print(format2, 1731.995) - print + print() # Check value from UNESCO 1983, p. 28 - print "Checking depth" - print - print "P = 10000 dbar, latitude = 30 degrees" - print format1 % "depth(10000, 30)", depth(10000, 30) - print format2, 9712.653 + print("Checking depth") + print() + print("P = 10000 dbar, latitude = 30 degrees") + print(format1 % "depth(10000, 30)", depth(10000, 30)) + print(format2, 9712.653) # ---------------------------------------------------------------- - + diff --git a/pyroms_toolbox/pyroms_toolbox/setup.py b/pyroms_toolbox/pyroms_toolbox/setup.py index c4a7256..f4f42be 100644 --- a/pyroms_toolbox/pyroms_toolbox/setup.py +++ b/pyroms_toolbox/pyroms_toolbox/setup.py @@ -6,6 +6,7 @@ def configuration(parent_package='',top_path=None): config.add_subpackage('BGrid_GFDL') config.add_subpackage('BGrid_POP') config.add_subpackage('BGrid_SODA') + config.add_subpackage('CGrid_GLORYS') config.add_subpackage('seawater') config.add_subpackage('Grid_HYCOM') config.add_library('_average', sources=['src/average.f90']), diff --git a/pyroms_toolbox/pyroms_toolbox/smooth_1D.py b/pyroms_toolbox/pyroms_toolbox/smooth_1D.py index c0d208c..5f4d67b 100644 --- a/pyroms_toolbox/pyroms_toolbox/smooth_1D.py +++ b/pyroms_toolbox/pyroms_toolbox/smooth_1D.py @@ -34,16 +34,16 @@ def smooth_1D(x, window_len=10, window='hanning'): """ if x.ndim != 1: - raise ValueError, "smooth only accepts 1 dimension arrays." + raise ValueError("smooth only accepts 1 dimension arrays.") if x.size < window_len: - raise ValueError, "Input vector needs to be bigger than window size." + raise ValueError("Input vector needs to be bigger than window size.") if window_len < 3: return x if not window in ['flat', 'hanning', 'hamming', 'bartlett', 'blackman']: - raise ValueError, "Window is on of 'flat', 'hanning', 'hamming', 'bartlett', 'blackman'" + raise ValueError("Window is on of 'flat', 'hanning', 'hamming', 'bartlett', 'blackman'") s=np.r_[2*x[0]-x[window_len:1:-1], x, 2*x[-1]-x[-1:-window_len:-1]] #print(len(s)) diff --git a/pyroms_toolbox/pyroms_toolbox/sview.py b/pyroms_toolbox/pyroms_toolbox/sview.py index 1395d75..9c06ee9 100644 --- a/pyroms_toolbox/pyroms_toolbox/sview.py +++ b/pyroms_toolbox/pyroms_toolbox/sview.py @@ -146,7 +146,7 @@ def sview(var, tindex, sindex, grid, filename=None, \ urcrnrlon=lon_max, urcrnrlat=lat_max, lat_0=lat_0, lon_0=lon_0, \ resolution='h', area_thresh=5.) #map = pyroms.utility.get_grid_proj(grd, type=proj) - x, y = map(lon,lat) + x, y = list(map(lon,lat)) if fill_land is True and proj is not None: # fill land and draw coastlines @@ -174,9 +174,9 @@ def sview(var, tindex, sindex, grid, filename=None, \ cf = plt.pcolor(lon, lat, sslice, cmap = pal, norm = pal_norm) if clb is True: - clb = plt.colorbar(cf, fraction=0.075,format='%.2f') - for t in clb.ax.get_yticklabels(): - t.set_fontsize(fts) + clb = plt.colorbar(cf, fraction=0.075,format='%.2f') + for t in clb.ax.get_yticklabels(): + t.set_fontsize(fts) if contour is True: if fill is not True: @@ -208,11 +208,11 @@ def sview(var, tindex, sindex, grid, filename=None, \ if outfile is not None: if outfile.find('.png') != -1 or outfile.find('.svg') != -1 or \ outfile.find('.eps') != -1: - print 'Write figure to file', outfile + print('Write figure to file', outfile) plt.savefig(outfile, dpi=200, facecolor='w', edgecolor='w', \ orientation='portrait') else: - print 'Unrecognized file extension. Please use .png, .svg or .eps file extension.' + print('Unrecognized file extension. Please use .png, .svg or .eps file extension.') if proj is None: diff --git a/pyroms_toolbox/pyroms_toolbox/transectview.py b/pyroms_toolbox/pyroms_toolbox/transectview.py index edf85f2..49b5dc0 100644 --- a/pyroms_toolbox/pyroms_toolbox/transectview.py +++ b/pyroms_toolbox/pyroms_toolbox/transectview.py @@ -11,7 +11,7 @@ def transectview(var, tindex, istart, iend, jstart, jend, gridid, \ fts=None, title=None, map=False, \ pal=None, clb=True, xaxis='lon', outfile=None): """ - transectview(var, tindex, istart, iend, jstart, jend, gridid, + transectview(var, tindex, istart, iend, jstart, jend, gridid, {optional switch}) optional switch: @@ -22,7 +22,7 @@ def transectview(var, tindex, istart, iend, jstart, jend, gridid, \ - clev set the number of color step - fill use contourf instead of pcolor - contour overlay contour - - c desired contour level. If not specified, + - c desired contour level. If not specified, plot every 4 contour level. - jrange j range - hrange h range @@ -134,21 +134,21 @@ def transectview(var, tindex, istart, iend, jstart, jend, gridid, \ #pal.set_bad('w', 1.0) pal_norm = colors.BoundaryNorm(vc,ncolors=256, clip = False) - + # clear figure #plt.clf() if map is True: - # set axes for the main plot in order to keep space for the map - if fts < 12: - ax=None - else: - ax = plt.axes([0.15, 0.08, 0.8, 0.65]) - else: - if fts < 12: - ax=None - else: - ax=plt.axes([0.15, 0.1, 0.8, 0.8]) + # set axes for the main plot in order to keep space for the map + if fts < 12: + ax=None + else: + ax = plt.axes([0.15, 0.08, 0.8, 0.65]) + else: + if fts < 12: + ax=None + else: + ax=plt.axes([0.15, 0.1, 0.8, 0.8]) if fill is True: @@ -157,20 +157,20 @@ def transectview(var, tindex, istart, iend, jstart, jend, gridid, \ cf = plt.pcolor(xt, zt, transect, cmap = pal, norm = pal_norm, axes=ax) if clb is True: - clb = plt.colorbar(cf, fraction=0.075,format='%.2f') - for t in clb.ax.get_yticklabels(): - t.set_fontsize(fts) + clb = plt.colorbar(cf, fraction=0.075,format='%.2f') + for t in clb.ax.get_yticklabels(): + t.set_fontsize(fts) if contour is True: if c is None: - c = vc[::10] + c = vc[::10] if fill is True: plt.contour(xt, zt, transect, c, colors='k', linewidths=0.5, linestyles='solid', axes=ax) else: xc = 0.5*(xt[1:,:]+xt[:-1,:]) xc = 0.5*(xc[:,1:]+xc[:,:-1]) zc = 0.5*(zt[1:,:]+zt[:-1,:]) - zc = 0.5*(zc[:,1:]+zc[:,:-1]) + zc = 0.5*(zc[:,1:]+zc[:,:-1]) plt.contour(xc, zc, transect, c, colors='k', linewidths=0.5, linestyles='solid', axes=ax) if jrange is not None: @@ -196,18 +196,18 @@ def transectview(var, tindex, istart, iend, jstart, jend, gridid, \ if map is True: # draw a map with constant-i slice location ax_map = plt.axes([0.4, 0.76, 0.2, 0.23]) - varm = np.ma.masked_where(mask[:,:] == 0, var[var.shape[0]-1,:,:]) + varm = np.ma.masked_where(mask[:,:] == 0, var[var.shape[0]-1,:,:]) lon_min = lon.min() lon_max = lon.max() lon_0 = (lon_min + lon_max) / 2. lat_min = lat.min() - lat_max = lat.max() + lat_max = lat.max() lat_0 = (lat_min + lat_max) / 2. map = Basemap(projection='merc', llcrnrlon=lon_min, llcrnrlat=lat_min, \ urcrnrlon=lon_max, urcrnrlat=lat_max, lat_0=lat_0, lon_0=lon_0, \ resolution='i', area_thresh=10.) - x, y = map(lon,lat) - xt, yt = map(lont[0,:],latt[0,:]) + x, y = list(map(lon,lat)) + xt, yt = list(map(lont[0,:],latt[0,:])) # fill land and draw coastlines map.drawcoastlines() map.fillcontinents(color='grey') @@ -218,10 +218,10 @@ def transectview(var, tindex, istart, iend, jstart, jend, gridid, \ if outfile is not None: if outfile.find('.png') != -1 or outfile.find('.svg') != -1 or outfile.find('.eps') != -1: - print 'Write figure to file', outfile + print('Write figure to file', outfile) plt.savefig(outfile, dpi=200, facecolor='w', edgecolor='w', orientation='portrait') else: - print 'Unrecognized file extension. Please use .png, .svg or .eps file extension.' + print('Unrecognized file extension. Please use .png, .svg or .eps file extension.') return diff --git a/pyroms_toolbox/pyroms_toolbox/twoDview.py b/pyroms_toolbox/pyroms_toolbox/twoDview.py index 575f63e..6bc6039 100644 --- a/pyroms_toolbox/pyroms_toolbox/twoDview.py +++ b/pyroms_toolbox/pyroms_toolbox/twoDview.py @@ -16,12 +16,12 @@ def twoDview(var, tindex, grid, filename=None, \ optional switch: - filename if defined, load the variable from file - - cmin set color minimum limit - - cmax set color maximum limit - - clev set the number of color step + - cmin set color minimum limit + - cmax set color maximum limit + - clev set the number of color step - fill use contourf instead of pcolor - contour overlay contour (request fill=True) - - d contour density (default d=4) + - d contour density (default d=4) - range set axis limit - fts set font size (default: 12) - title add title to the plot @@ -35,8 +35,8 @@ def twoDview(var, tindex, grid, filename=None, \ var must be a string and the variable will be load from the file. grid can be a grid object or a gridid. In the later case, the grid object correponding to the provided gridid will be loaded. - If proj is not None, return a Basemap object to be used with quiver - for example. + If proj is not None, return a Basemap object to be used with quiver + for example. """ # get grid @@ -153,7 +153,7 @@ def twoDview(var, tindex, grid, filename=None, \ lon_max = lon.max() lon_0 = (lon_min + lon_max) / 2. lat_min = lat.min() - lat_max = lat.max() + lat_max = lat.max() lat_0 = (lat_min + lat_max) / 2. else: lon_min = range[0] @@ -171,49 +171,49 @@ def twoDview(var, tindex, grid, filename=None, \ urcrnrlon=lon_max, urcrnrlat=lat_max, lat_0=lat_0, lon_0=lon_0, \ resolution='h', area_thresh=5.) #map = pyroms.utility.get_grid_proj(grd, type=proj) - x, y = map(lon,lat) - + x, y = list(map(lon,lat)) + if fill_land is True and proj is not None: # fill land and draw coastlines map.drawcoastlines() map.fillcontinents(color='grey') else: - if proj is not None: + if proj is not None: Basemap.pcolor(map, x, y, mask, vmin=-2, cmap=cm.gray, edgecolors='face') pyroms_toolbox.plot_coast_line(grd, map) - else: + else: plt.pcolor(lon, lat, mask, vmin=-2, cmap=cm.gray, edgecolors='face') pyroms_toolbox.plot_coast_line(grd) - + if fill is True: - if proj is not None: + if proj is not None: cf = Basemap.contourf(map, x, y, var, vc, cmap = pal, \ norm = pal_norm) - else: + else: cf = plt.contourf(lon, lat, var, vc, cmap = pal, \ norm = pal_norm) else: - if proj is not None: + if proj is not None: cf = Basemap.pcolor(map, x, y, var, cmap = pal, norm = pal_norm, edgecolors='face') - else: + else: cf = plt.pcolor(lon, lat, var, cmap = pal, norm = pal_norm, edgecolors='face') if clb is True: - clb = plt.colorbar(cf, fraction=0.075,format='%.2f') - for t in clb.ax.get_yticklabels(): - t.set_fontsize(fts) + clb = plt.colorbar(cf, fraction=0.075,format='%.2f') + for t in clb.ax.get_yticklabels(): + t.set_fontsize(fts) if contour is True: if fill is not True: - raise Warning, 'Please run again with fill=True to overlay contour.' + raise Warning('Please run again with fill=True to overlay contour.') else: if proj is not None: Basemap.contour(map, x, y, var, vc[::d], colors='k', linewidths=0.5, linestyles='solid') - else: + else: plt.contour(lon, lat, var, vc[::d], colors='k', linewidths=0.5, linestyles='solid') if proj is None and range is not None: - plt.axis(range) + plt.axis(range) if title is not None: @@ -228,11 +228,11 @@ def twoDview(var, tindex, grid, filename=None, \ if outfile is not None: if outfile.find('.png') != -1 or outfile.find('.svg') != -1 or \ outfile.find('.eps') != -1: - print 'Write figure to file', outfile + print('Write figure to file', outfile) plt.savefig(outfile, dpi=200, facecolor='w', edgecolor='w', \ orientation='portrait') else: - print 'Unrecognized file extension. Please use .png, .svg or .eps file extension.' + print('Unrecognized file extension. Please use .png, .svg or .eps file extension.') if proj is None: diff --git a/pyroms_toolbox/pyroms_toolbox/zview.py b/pyroms_toolbox/pyroms_toolbox/zview.py index f331365..de0809a 100644 --- a/pyroms_toolbox/pyroms_toolbox/zview.py +++ b/pyroms_toolbox/pyroms_toolbox/zview.py @@ -16,9 +16,9 @@ def zview(var, tindex, depth, grid, filename=None, \ optional switch: - filename if defined, load the variable from file - - cmin set color minimum limit - - cmax set color maximum limit - - clev set the number of color step + - cmin set color minimum limit + - cmax set color maximum limit + - clev set the number of color step - fill use contourf instead of pcolor - contour overlay contour (request fill=True) - range set axis limit @@ -34,8 +34,8 @@ def zview(var, tindex, depth, grid, filename=None, \ var must be a string and the variable will be load from the file. grid can be a grid object or a gridid. In the later case, the grid object correponding to the provided gridid will be loaded. - If proj is not None, return a Basemap object to be used with quiver - for example. + If proj is not None, return a Basemap object to be used with quiver + for example. """ # get grid @@ -128,7 +128,7 @@ def zview(var, tindex, depth, grid, filename=None, \ lon_max = lon.max() lon_0 = (lon_min + lon_max) / 2. lat_min = lat.min() - lat_max = lat.max() + lat_max = lat.max() lat_0 = (lat_min + lat_max) / 2. else: lon_min = range[0] @@ -146,49 +146,49 @@ def zview(var, tindex, depth, grid, filename=None, \ urcrnrlon=lon_max, urcrnrlat=lat_max, lat_0=lat_0, lon_0=lon_0, \ resolution='h', area_thresh=5.) #map = pyroms.utility.get_grid_proj(grd, type=proj) - x, y = map(lon,lat) - + x, y = list(map(lon,lat)) + if fill_land is True and proj is not None: # fill land and draw coastlines map.drawcoastlines() map.fillcontinents(color='grey') else: - if proj is not None: + if proj is not None: Basemap.pcolor(map, x, y, mask, vmin=-2, cmap=cm.gray) pyroms_toolbox.plot_coast_line(grd, map) - else: + else: plt.pcolor(lon, lat, mask, vmin=-2, cmap=cm.gray) pyroms_toolbox.plot_coast_line(grd) - + if fill is True: - if proj is not None: + if proj is not None: cf = Basemap.contourf(map, x, y, zslice, vc, cmap = pal, \ norm = pal_norm) - else: + else: cf = plt.contourf(lon, lat, zslice, vc, cmap = pal, \ norm = pal_norm) else: - if proj is not None: + if proj is not None: cf = Basemap.pcolor(map, x, y, zslice, cmap = pal, norm = pal_norm) - else: + else: cf = plt.pcolor(lon, lat, zslice, cmap = pal, norm = pal_norm) if clb is True: - clb = plt.colorbar(cf, fraction=0.075,format=clb_format) - for t in clb.ax.get_yticklabels(): - t.set_fontsize(fts) + clb = plt.colorbar(cf, fraction=0.075,format=clb_format) + for t in clb.ax.get_yticklabels(): + t.set_fontsize(fts) if contour is True: if fill is not True: - raise Warning, 'Please run again with fill=True to overlay contour.' + raise Warning('Please run again with fill=True to overlay contour.') else: if proj is not None: Basemap.contour(map, x, y, zslice, vc[::d], colors='k', linewidths=0.5, linestyles='solid') - else: + else: plt.contour(lon, lat, zslice, vc[::d], colors='k', linewidths=0.5, linestyles='solid') if proj is None and range is not None: - plt.axis(range) + plt.axis(range) if title is not None: @@ -203,11 +203,11 @@ def zview(var, tindex, depth, grid, filename=None, \ if outfile is not None: if outfile.find('.png') != -1 or outfile.find('.svg') != -1 or \ outfile.find('.eps') != -1: - print 'Write figure to file', outfile + print('Write figure to file', outfile) plt.savefig(outfile, dpi=200, facecolor='w', edgecolor='w', \ orientation='portrait') else: - print 'Unrecognized file extension. Please use .png, .svg or .eps file extension.' + print('Unrecognized file extension. Please use .png, .svg or .eps file extension.') if proj is None: diff --git a/pyroms_toolbox/setup.py b/pyroms_toolbox/setup.py index b8e42d0..77df91b 100644 --- a/pyroms_toolbox/setup.py +++ b/pyroms_toolbox/setup.py @@ -4,13 +4,28 @@ pyroms_toolbox is a suite of tools for working with ROMS. Requires: - pyroms (https://github.com/kshedstrom/pyroms) + pyroms (https://github.com/ESMG/pyroms) Contains: many things... """ +from numpy.distutils.core import Extension + +average = Extension(name = '_average', + sources = ['pyroms_toolbox/src/average.f90']) + +creep = Extension(name = 'creep', + sources = ['pyroms_toolbox/src/creeping_sea.f90']) + +move_river = Extension(name = '_move_river_t', + sources = ['pyroms_toolbox/src/move_river_t.f90']) + +move_runoff = Extension(name = '_move_runoff', + sources = ['pyroms_toolbox/src/move_runoff.f90']) + + doclines = __doc__.split("\n") def configuration(parent_package='',top_path=None): @@ -29,10 +44,10 @@ def configuration(parent_package='',top_path=None): version = '0.1', description = doclines[0], long_description = "\n".join(doclines[2:]), - author = "Pyroms Group", - author_email = "kshedstrom@alaska.edu", - url = 'https://github.com/kshedstrom/pyroms', + author = "ESMG", + url = 'https://github.com/ESMG/pyroms', license = 'BSD', platforms = ["any"], + ext_modules=[average, creep, move_river, move_runoff], configuration=configuration, )