diff --git a/Dockerfile b/Dockerfile index e132873f..314182d9 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,43 +1,86 @@ -# Use a multi-stage build to create a platform-independent image -FROM --platform=$BUILDPLATFORM continuumio/miniconda3:latest AS build +# Use a lightweight Python image +FROM python:3.10-slim -# Set build arguments -ARG TARGETPLATFORM -ARG BUILDPLATFORM +# Set environment variables to avoid Python buffering +ENV PYTHONUNBUFFERED=1 +ENV PYTHONDONTWRITEBYTECODE=1 -# Install necessary build dependencies -RUN apt-get update && apt-get install -y --no-install-recommends \ - build-essential \ - && rm -rf /var/lib/apt/lists/* - -# Create and activate the conda environment -RUN conda create --name myproject python=3.8 -y -SHELL ["conda", "run", "-n", "myproject", "/bin/bash", "-c"] +# Set the working directory in the container +WORKDIR /ctoaster.carrotcake -# Install netcdf-fortran using apt-get -RUN apt-get update && apt-get install -y --no-install-recommends \ +# Install required system packages and dependencies for netCDF installation +RUN apt-get update && apt-get install -y \ + git \ + net-tools \ + wget \ libnetcdf-dev \ libnetcdff-dev \ - && rm -rf /var/lib/apt/lists/* + gfortran \ + build-essential \ + m4 \ + libxml2-dev \ + libcurl4-openssl-dev \ + libhdf5-dev \ + zlib1g-dev \ + unzip \ + && apt-get clean && rm -rf /var/lib/apt/lists/* + +# Download and install netCDF C libraries +RUN wget https://downloads.unidata.ucar.edu/netcdf-c/4.9.2/netcdf-c-4.9.2.tar.gz \ + && tar -xvzf netcdf-c-4.9.2.tar.gz \ + && cd netcdf-c-4.9.2 \ + && export LDFLAGS="-L/usr/lib/x86_64-linux-gnu/hdf5/serial/lib" \ + && export CFLAGS="-I/usr/lib/x86_64-linux-gnu/hdf5/serial/include" \ + && ./configure \ + && make -j$(nproc) \ + && make check \ + && make install \ + && cd .. && rm -rf netcdf-c-4.9.2 netcdf-c-4.9.2.tar.gz + +# Download and install netCDF Fortran libraries +RUN wget https://downloads.unidata.ucar.edu/netcdf-fortran/4.6.1/netcdf-fortran-4.6.1.tar.gz \ + && tar -xvzf netcdf-fortran-4.6.1.tar.gz \ + && cd netcdf-fortran-4.6.1 \ + && ./configure \ + && make -j$(nproc) \ + && make check \ + && make install \ + && cd .. && rm -rf netcdf-fortran-4.6.1 netcdf-fortran-4.6.1.tar.gz + +# Run ldconfig to update library links +RUN ldconfig + +# Copy the entire project +COPY . /ctoaster.carrotcake + +# Copy the MODELS folder +COPY MODELS /ctoaster.carrotcake-jobs/MODELS + +# Install Python dependencies +RUN pip install --no-cache-dir -r requirements.txt -# Install scons and matplotlib using pip -RUN pip install --no-cache-dir scons matplotlib +# Create required directories +RUN mkdir -p /ctoaster.carrotcake-data \ + && mkdir -p /ctoaster.carrotcake-test \ + && mkdir -p /ctoaster.carrotcake-jobs -# Clone the ctoaster.cupcake repository -RUN git clone https://github.com/derpycode/ctoaster.cupcake.git -WORKDIR /ctoaster.cupcake +# Clone the required repositories +RUN git clone https://github.com/genie-model/ctoaster-data /ctoaster.carrotcake-data \ + && git clone https://github.com/genie-model/ctoaster-test /ctoaster.carrotcake-test -# Create the final image -FROM continuumio/miniconda3:latest +# Create the hidden .ctoasterrc file with the configuration +RUN echo "ctoaster_root: /ctoaster.carrotcake\nctoaster_data: /ctoaster.carrotcake-data\nctoaster_test: /ctoaster.carrotcake-test\nctoaster_jobs: /ctoaster.carrotcake-jobs\nctoaster_version: DEVELOPMENT" > /root/.ctoasterrc -# Copy the conda environment from the build stage -COPY --from=build /opt/conda/envs/myproject /opt/conda/envs/myproject +# Make necessary scripts executable +RUN chmod +x /ctoaster.carrotcake/setup-ctoaster /ctoaster.carrotcake/run-carrotcake /ctoaster.carrotcake/tests -# Activate the conda environment -SHELL ["conda", "run", "-n", "myproject", "/bin/bash", "-c"] +# Set up SCons and build the project +RUN pip install scons +# \ + # && scons -C /ctoaster.carrotcake-jobs/MODELS/DEVELOPMENT/LINUX/ship -# Copy the project files -COPY --from=build /ctoaster.cupcake /ctoaster.cupcake -WORKDIR /ctoaster.cupcake +# Expose the port for the FastAPI server +EXPOSE 8000 -# Set any necessary environment variables +# Command to run the FastAPI server +CMD ["uvicorn", "tools.REST:app", "--host", "0.0.0.0", "--port", "8000"] diff --git a/Dockerfile_backup b/Dockerfile_backup new file mode 100644 index 00000000..1eac8210 --- /dev/null +++ b/Dockerfile_backup @@ -0,0 +1,43 @@ +# Use a multi-stage build to create a platform-independent image +FROM --platform=$BUILDPLATFORM continuumio/miniconda3:latest AS build + +# Set build arguments +ARG TARGETPLATFORM +ARG BUILDPLATFORM + +# Install necessary build dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + build-essential \ + && rm -rf /var/lib/apt/lists/* + +# Create and activate the conda environment +RUN conda create --name myproject python=3.8 -y +SHELL ["conda", "run", "-n", "myproject", "/bin/bash", "-c"] + +# Install netcdf-fortran using apt-get +RUN apt-get update && apt-get install -y --no-install-recommends \ + libnetcdf-dev \ + libnetcdff-dev \ + && rm -rf /var/lib/apt/lists/* + +# Install scons and matplotlib using pip +RUN pip install --no-cache-dir scons matplotlib + +# Clone the ctoaster.carrotcake repository +RUN git clone https://github.com/derpycode/ctoaster.carrotcake.git +WORKDIR /ctoaster.carrotcake + +# Create the final image +FROM continuumio/miniconda3:latest + +# Copy the conda environment from the build stage +COPY --from=build /opt/conda/envs/myproject /opt/conda/envs/myproject + +# Activate the conda environment +SHELL ["conda", "run", "-n", "myproject", "/bin/bash", "-c"] + +# Copy the project files +COPY --from=build /ctoaster.carrotcake /ctoaster.carrotcake +WORKDIR /ctoaster.carrotcake + +# Set any necessary environment variables diff --git a/README.md b/README.md index 8991fea1..3ce02ae8 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,387 @@ -# cupcake -(cGENIE varient) -(also known as cTOASTER -- the carbon Turnover in Ocean, Atmosphere, Sediment, and Terrestrial Exchangeable Reservoirs model) +# Carrotcake + +**Carrotcake** is a variant of **cGENIE** (also known as **cTOASTER** โ€” the **Carbon Turnover in Ocean, Atmosphere, Sediment, and Terrestrial Exchangeable Reservoirs** model). It is designed to simulate and analyze carbon turnover across various Earth system reservoirs. + +--- + +## Setup Instructions + +### Prerequisites + +1. **Install Docker** + Follow the official Docker installation guide for your platform: + [Docker Installation Guide](https://docs.docker.com/engine/install/) + +2. **Install Google Cloud SDK** + Install the Google Cloud SDK for your operating system: + [Google Cloud SDK Installation Guide](https://cloud.google.com/sdk/docs/install) + +--- + +## Docker Commands + +### 1. Building the Docker Image + +To build the Docker image, run the following command: + +``` +docker build -t ctoaster-backend:1.0 . +``` +2. Running the Docker Image +To run the Docker container in detached mode, use: + +``` +docker run -d --name ctoaster-backend-container -p 8000:8000 ctoaster-backend:1.0 +``` +3. Checking Container Logs +To monitor the logs of the running container: + +List all running containers: + +``` +docker container ls +``` +# or +``` +docker ps +``` +Check the logs using the container ID: + +``` +docker logs +``` +4. Pushing the Docker Image to Google Container Registry +To upload the Docker image to Google Container Registry: + +Authenticate with Google Cloud: + +``` +gcloud auth login +``` +``` +gcloud auth configure-docker +``` + +Tag the Docker image: + +``` +docker tag ctoaster-backend:1.0 us-west2-docker.pkg.dev/ucr-ursa-major-ridgwell-lab/cupcake/ctoaster-backend:1.0 +``` + + +Push the Docker image: + +``` +docker push us-west2-docker.pkg.dev/ucr-ursa-major-ridgwell-lab/cupcake/ctoaster-backend:1.0 +``` +5. Pulling the Docker Image +To pull the Docker image from Google Container Registry: + +``` +docker pull us-west2-docker.pkg.dev/ucr-ursa-major-ridgwell-lab/cupcake/ctoaster-backend:1.0 +``` +6. Listing Existing Docker Images +To list all Docker images in the cupcake folder of your Google Container Registry: + +``` +gcloud artifacts docker images list us-west2-docker.pkg.dev/ucr-ursa-major-ridgwell-lab/cupcake/ctoaster-backend +``` + +7. Deleting a Docker Image +To delete a Docker image from Google Container Registry: + +Authenticate with Google Cloud: + +``` +gcloud auth login +``` + +``` +gcloud auth configure-docker +``` + +List repositories: + +``` +gcloud artifacts repositories list +``` + +List Docker images: + +``` +gcloud artifacts docker images list us-west2-docker.pkg.dev/ucr-ursa-major-ridgwell-lab/cupcake +``` + +Delete the Docker image: + +``` +gcloud artifacts docker images delete us-west2-docker.pkg.dev/ucr-ursa-major-ridgwell-lab/cupcake/ctoaster-backend:1.0 --quiet +``` + +# ๐Ÿ”ฅ CTOASTER Backend - Kubernetes Deployment Guide + +This guide outlines how to deploy the CTOASTER Backend application on a Google Kubernetes Engine (GKE) cluster. + +--- + +## ๐Ÿš€ **1. Prerequisites** +Ensure you have the following installed: + +- [Google Cloud SDK](https://cloud.google.com/sdk/docs/install) +- [kubectl](https://kubernetes.io/docs/tasks/tools/) for Kubernetes +- [GKE Auth Plugin](https://cloud.google.com/kubernetes-engine/docs/how-to/cluster-access-for-kubectl#install_plugin) + +To verify installations: +```bash +gcloud version +kubectl version --client +``` + +--- + +## โ˜๏ธ **2. Deploy CTOASTER Backend to GKE** + +### **2.1 Enable GKE and Create Cluster** +Ensure GKE API is enabled: +```bash +gcloud services enable container.googleapis.com +``` + +Create GKE cluster (**only if a new cluster is needed**): +```bash +gcloud container clusters create ctoaster-cluster \ + --region us-west2 \ + --num-nodes=2 \ + --enable-autoupgrade +``` + +Connect `kubectl` to the cluster: +```bash +gcloud container clusters get-credentials ctoaster-cluster --region us-west2 +``` + +--- + +### **2.2 Deploy Backend App to GKE** + +1. **Push Docker image to Google Artifact Registry (GAR):** +```bash +gcloud auth configure-docker us-west2-docker.pkg.dev + +# Tag and push the image +docker tag ctoaster-backend:1.0 us-west2-docker.pkg.dev/ucr-ursa-major-ridgwell-lab/cupcake/ctoaster-backend:1.0 +docker push us-west2-docker.pkg.dev/ucr-ursa-major-ridgwell-lab/cupcake/ctoaster-backend:1.0 +``` + +2. **Create Kubernetes manifests (**needs to be done only once per cluster**):** +- `deployment.yaml`: +```yaml +apiVersion: apps/v1 +kind: Deployment +metadata: + name: ctoaster-backend-deployment + labels: + app: ctoaster-backend +spec: + replicas: 2 + selector: + matchLabels: + app: ctoaster-backend + template: + metadata: + labels: + app: ctoaster-backend + spec: + containers: + - name: ctoaster-backend + image: us-west2-docker.pkg.dev/ucr-ursa-major-ridgwell-lab/cupcake/ctoaster-backend:1.0 + ports: + - containerPort: 8000 + env: + - name: DATABASE_URL + value: "your-database-url" + - name: OTHER_ENV_VAR + value: "some-value" + imagePullPolicy: Always +``` + +- `service.yaml`: +```yaml +apiVersion: v1 +kind: Service +metadata: + name: ctoaster-backend-service +spec: + type: LoadBalancer + ports: + - port: 80 + targetPort: 8000 + selector: + app: ctoaster-backend +``` + +--- + +3. **Deploy to GKE:** +```bash +kubectl apply -f deployment.yaml +kubectl apply -f service.yaml +``` + +4. **Check status:** +```bash +kubectl get pods +kubectl get svc +``` + +Expected output: +``` +NAME READY STATUS RESTARTS AGE +ctoaster-backend-deployment-xyz123 1/1 Running 0 10s +``` + +To get the external IP of the backend service: +```bash +kubectl get svc ctoaster-backend-service +``` +Expected output: +``` +NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE +ctoaster-backend-service LoadBalancer 34.118.234.130 35.235.72.29 80:31234/TCP 5m +``` + +You can now access the API at: +``` +http://35.235.72.29 +``` + +--- + +# โš–๏ธ 4 CTOASTER Backend - Horizontal Pod Autoscaler (HPA) Guide + +This guide explains how to automatically scale the number of pods for the `ctoaster-backend` Kubernetes deployment based on CPU utilization. + +--- + +## ๐Ÿ“Œ Prerequisites + +1. Your backend deployment **must define resource requests and limits**: + +```yaml +resources: + requests: + cpu: "250m" + memory: "256Mi" + limits: + cpu: "500m" + memory: "512Mi" +``` + +2. The **Kubernetes metrics server** must be installed. If not already installed, run: + +```bash +kubectl apply -f https://github.com/kubernetes-sigs/metrics-server/releases/latest/download/components.yaml +``` + +--- + +## ๐Ÿ“„ Create HPA Manifest + +Create a file named `hpa-backend.yaml` with the following contents: + +```yaml +apiVersion: autoscaling/v2 +kind: HorizontalPodAutoscaler +metadata: + name: ctoaster-backend-hpa +spec: + scaleTargetRef: + apiVersion: apps/v1 + kind: Deployment + name: ctoaster-backend + minReplicas: 2 + maxReplicas: 5 + metrics: + - type: Resource + resource: + name: cpu + target: + type: Utilization + averageUtilization: 60 +``` + +--- + +## ๐Ÿš€ Apply the HPA + +Apply the updated deployment (if you added `resources:`) and then the HPA manifest: + +```bash +kubectl apply -f deployment.yaml +kubectl apply -f hpa-backend.yaml +``` + +--- + +## ๐Ÿ” Monitor the HPA + +You can verify that the HPA is active and functioning: + +```bash +kubectl get hpa +kubectl top pods +``` + +Example output: +``` +NAME REFERENCE TARGETS MINPODS MAXPODS REPLICAS AGE +ctoaster-backend-hpa Deployment/ctoaster-backend 45%/60% 2 5 2 1m +``` + +--- + +## ๐Ÿ” **5. Monitoring & Troubleshooting** + +1. **Check pod status:** +```bash +kubectl get pods +``` + +2. **View pod logs:** +```bash +kubectl logs -f +``` + +3. **Check deployment:** +```bash +kubectl describe deployment ctoaster-backend-deployment +``` + +4. **Check service and external IP:** +```bash +kubectl get svc +``` + +--- + +## ๐Ÿงน **6. Cleanup (Optional)** +To delete the GKE cluster and avoid billing: +```bash +# Delete cluster +gcloud container clusters delete ctoaster-cluster --region us-west2 + +# Delete Docker image from GAR +gcloud artifacts docker images delete us-west2-docker.pkg.dev/ucr-ursa-major-ridgwell-lab/cupcake/ctoaster-backend:1.0 +``` + +--- + +## ๐Ÿ“ž **7. Support** +For any issues or further assistance: +- Check GKE logs: `kubectl describe pod ` +- Visit [Google Kubernetes Engine Docs](https://cloud.google.com/kubernetes-engine/docs) +- Contact the project admin or Slack channel. + +--- + +๐ŸŽ‰ **That's it! Your CTOASTER Backend is now running on Kubernetes with a LoadBalancer service!** ๐Ÿš€๐Ÿ”ฅ \ No newline at end of file diff --git a/SConstruct b/SConstruct index ba2c3153..c77b8d27 100644 --- a/SConstruct +++ b/SConstruct @@ -1,5 +1,5 @@ from __future__ import print_function - +import glob import os import platform as P import sys @@ -71,4 +71,4 @@ if "ld_library_path" in f90: # Build configuration. Export("env", "subdirs", "build_type") SConscript(os.path.join(srcdir, "SConscript"), variant_dir="#build", duplicate=0) -Install(".", "build/cupcake.exe") +Install(".", "build/carrotcake.exe") diff --git a/deployment.yaml b/deployment.yaml new file mode 100644 index 00000000..8ea1d2e7 --- /dev/null +++ b/deployment.yaml @@ -0,0 +1,30 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: ctoaster-backend +spec: + replicas: 2 + selector: + matchLabels: + app: ctoaster-backend + template: + metadata: + labels: + app: ctoaster-backend + spec: + containers: + - name: ctoaster-backend + image: us-west2-docker.pkg.dev/ucr-ursa-major-ridgwell-lab/cupcake/ctoaster-backend:1.0 + imagePullPolicy: Always + ports: + - containerPort: 8000 + env: + - name: ENVIRONMENT + value: "production" + resources: + requests: + cpu: "250m" + memory: "256Mi" + limits: + cpu: "500m" + memory: "512Mi" diff --git a/doc/cgenie-coverage-testing.tex b/doc/cgenie-coverage-testing.tex index 031b3e04..009e3e46 100644 --- a/doc/cgenie-coverage-testing.tex +++ b/doc/cgenie-coverage-testing.tex @@ -29,7 +29,7 @@ \newfloat{listing}{tbp}{lop} \floatname{listing}{Listing} -\title{Coverage testing for GENIE \texttt{cupcake}} +\title{Coverage testing for GENIE \texttt{carrotcake}} \author{Ian~Ross} \date{4 March 2015} diff --git a/doc/cgenie-cupcake-mac-doc.tex b/doc/cgenie-cupcake-mac-doc.tex index 30dc120d..74fb430e 100644 --- a/doc/cgenie-cupcake-mac-doc.tex +++ b/doc/cgenie-cupcake-mac-doc.tex @@ -1,5 +1,5 @@ % -% cgenie-cupcake-mac-doc.tex +% cgenie-carrotcake-mac-doc.tex % % Developed by Gregory J. L. Tourte % Copyright (c) 2015 School of Geographical Sciences - The University of Bristol @@ -89,7 +89,7 @@ breaksymbolleft = \squelch{\tiny\ensuremath{\hookrightarrow}}, } -\title{cGENIE (Cupcake v0.3)\\ +\title{cGENIE (carrotcake v0.3)\\ Mac OS X Instructions} \author{Gregory J. L. Tourte} @@ -296,7 +296,7 @@ \subsection{Python} \subsection{Fortran Compiler (\texttt{gfortran})} The source of the \texttt{gfortran} compiler has changed since the instructions -for Cupcake v0.2. The reasons for this change is that packages have been +for carrotcake v0.2. The reasons for this change is that packages have been updated on the third party repositories, both macport and homebrew, and none of them work properly. The homebrew package we were using previously has been removed in favour of a full \texttt{GCC} install (overriding the system one @@ -347,14 +347,14 @@ \subsection{NetCDF Libraries} $ sudo port install netcdf-fortran \end{minted} -\section{Installing and running cGENIE (cupcake)} +\section{Installing and running cGENIE (carrotcake)} With all the steps described in this document, you should be able to follow the instructions described in the -\href{run:./cupcake-config-build.pdf}{\texttt{cupcake-config-build.pdf}} file +\href{run:./carrotcake-config-build.pdf}{\texttt{carrotcake-config-build.pdf}} file in this folder to install and run the model. -If you have any issues with running cGENIE (cupcake) on Mac OS X, please report +If you have any issues with running cGENIE (carrotcake) on Mac OS X, please report them at \url{https://github.com/genie-model/cgenie/issues}. \section{Quirks} diff --git a/doc/cupcake-config-build.tex b/doc/cupcake-config-build.tex index 128025a6..26e2c1b6 100644 --- a/doc/cupcake-config-build.tex +++ b/doc/cupcake-config-build.tex @@ -29,7 +29,7 @@ \newfloat{listing}{tbp}{lop} \floatname{listing}{Listing} -\title{(cTOASTER) \texttt{cupcake} Configuration and Build System} +\title{(cTOASTER) \texttt{carrotcake} Configuration and Build System} \author{Ian Ross, with contributions from:\\Andy Ridgwell} \date{\today} @@ -44,7 +44,7 @@ \texttt{typewriter} font. Commands that extend over several lines are marked with lines ending \texttt{...} with the following line beginning \texttt{...}.}} describes the new configuration and build system -developed for the \texttt{cupcake} release of the \textbf{cTOASTER} model. +developed for the \texttt{carrotcake} release of the \textbf{cTOASTER} model. Documentation is divided into three sections, one for users of the model, one for those who wish to modify the model and one for those concerned with maintaining and extending the configuration and build @@ -58,8 +58,8 @@ \section{Quick start} Open a shell in your home directory and do this (this is based on the current repository setup, which will probably change a little soon): \begin{verbatim} - git clone https://github.com/derpycode/ctoaster.cupcake.git - cd ~/ctoaster.cupcake + git clone https://github.com/derpycode/ctoaster.carrotcake.git + cd ~/ctoaster.carrotcake ./setup-ctoaster \end{verbatim} (Accept the defaults for all the questions it asks -- just hit return at each prompt to accept the default.) @@ -72,7 +72,7 @@ \section{Quick start} \begin{verbatim} ./new-job -b cgenie.eb_go_gs_ac_bg.p0650e.NONE ... ... -u LABS/LAB_0.snowball snowball 10 - cd ~/ctoaster.cupcake-jobs/snowball + cd ~/ctoaster.carrotcake-jobs/snowball ./go run \end{verbatim} The job results appear in the \texttt{output} directory in the job @@ -80,13 +80,13 @@ \section{Quick start} There is also a facility to configure and run experiments in the same way as for \textbf{cgenie.muffin}: \begin{verbatim} - ./run-cupcake [] + ./run-carrotcake [] \end{verbatim} which for the example above, would be: \begin{verbatim} - ./run-cupcake cgenie.eb_go_gs_ac_bg.p0650e.NONE LABS LAB_0.snowball 10 + ./run-carrotcake cgenie.eb_go_gs_ac_bg.p0650e.NONE LABS LAB_0.snowball 10 \end{verbatim} -In this, \texttt{run-cupcake.py} first configures a new job using \texttt{./new-job}, +In this, \texttt{run-carrotcake.py} first configures a new job using \texttt{./new-job}, seamlessly changes directory, and then automatically runs it using \texttt{./go run} %---------------------------------------------------------------------- @@ -100,18 +100,18 @@ \section{Quick start} %---------------------------------------------------------------------- \section{Installation and setup} -To install \textbf{cTOASTER}, first choose a location for the installation. (On linux machines, it's probably best just to install \textbf{cTOASTER} in your home directory.) Then clone the main \texttt{ctoaster.cupcake} repository from GitHub +To install \textbf{cTOASTER}, first choose a location for the installation. (On linux machines, it's probably best just to install \textbf{cTOASTER} in your home directory.) Then clone the main \texttt{ctoaster.carrotcake} repository from GitHub using the command: \begin{verbatim} - git clone https://github.com/derpycode/ctoaster.cupcake.git + git clone https://github.com/derpycode/ctoaster.carrotcake.git \end{verbatim} -This will produce a new directory called \texttt{ctoaster.cupcake} containing the model source code and build scripts. +This will produce a new directory called \texttt{ctoaster.carrotcake} containing the model source code and build scripts. Before using the model, it's necessary to do a little bit of setup. -Go into the new \texttt{ctoaster.cupcake} directory and run the +Go into the new \texttt{ctoaster.carrotcake} directory and run the \texttt{setup-ctoaster} script: \begin{verbatim} - cd ~/ctoaster.cupcake + cd ~/ctoaster.carrotcake ./setup-ctoaster \end{verbatim} The script will ask where you want to put a number of things -- it's usually find to just take the defaults (just hit enter at each of the prompts). In all of what follows below, we'll assume that you chose the defaults. The @@ -119,16 +119,16 @@ \section{Installation and setup} \begin{itemize} \item{The \textbf{cTOASTER} root installation directory: unless you know what you're doing, - \\accept the \texttt{~/ctoaster.cupcake} default for this.} - \item{The \textbf{cTOASTER} data directory (default \texttt{~/ctoaster.cupcake-data}) + \\accept the \texttt{~/ctoaster.carrotcake} default for this.} + \item{The \textbf{cTOASTER} data directory (default \texttt{~/ctoaster.carrotcake-data}) where base and user model configurations are stored, along with forcing files.} - \item{The \textbf{cTOASTER} test directory (default \texttt{~/ctoaster.cupcake-test}) + \item{The \textbf{cTOASTER} test directory (default \texttt{~/ctoaster.carrotcake-test}) where \textbf{cTOASTER} jobs with known good outputs can be stored for use as tests -- it's possible to run sets of tests and compare their results with the known good values with a single command, which is useful for making sure that the model is working.} - \item{The \textbf{cTOASTER} jobs directory (default \texttt{~/ctoaster.cupcake-jobs}) + \item{The \textbf{cTOASTER} jobs directory (default \texttt{~/ctoaster.carrotcake-jobs}) where new \textbf{cTOASTER} jobs are set up -- the \texttt{new-job} script (see next section) sets jobs up here by default.} \item{The default model version to use for running jobs. @@ -140,7 +140,7 @@ \section{Installation and setup} Once the data and test repositories have been downloaded, \textbf{cTOASTER} is ready to use. The setup information is written to a \texttt{.ctoasterrc} file in your home directory. If you ever want to set the model up afresh, just remove this file and run the \texttt{setup-ctoaster} script again. -To check that the installation has been successful and that the model works on your machine, you can run some basic test jobs -- in the \texttt{~/ctoaster.cupcake} directory, just type: +To check that the installation has been successful and that the model works on your machine, you can run some basic test jobs -- in the \texttt{~/ctoaster.carrotcake} directory, just type: \begin{verbatim} ./tests run basic \end{verbatim} @@ -155,16 +155,16 @@ \section{Installation and setup} \section{Creating new jobs} New \textbf{cTOASTER} jobs are configured using the \texttt{new-job} script in -\texttt{~/ctoaster.cupcake}. This takes a number of arguments that describe the +\texttt{~/ctoaster.carrotcake}. This takes a number of arguments that describe the job to be set up and produces a job directory under -\\\texttt{~/ctoaster.cupcake-jobs} containing everything needed to build and run +\\\texttt{~/ctoaster.carrotcake-jobs} containing everything needed to build and run the model with the selected configuration. The \texttt{new-job} script should be run as: \begin{verbatim} new-job [options] job-name run-length \end{verbatim} where \texttt{job-name} is the name to be used for the job directory -to be created under \texttt{~/ctoaster.cupcake-jobs} and \texttt{run-length} is +to be created under \texttt{~/ctoaster.carrotcake-jobs} and \texttt{run-length} is the length of the model run in years. The possible options for \texttt{new-job} are as follows (in each case given in both short and long forms where these exist). First, there are three options that @@ -177,21 +177,21 @@ \section{Creating new jobs} -b BASE_CONFIG --base-config=BASE_CONFIG \end{verbatim} The base model configuration to use -- these are stored in the -\texttt{~/ctoaster.cupcake-data/base-configs} directory. +\texttt{~/ctoaster.carrotcake-data/base-configs} directory. \begin{verbatim} -u USER_CONFIG --user-config=USER_CONFIG \end{verbatim} The user model configuration to apply on top of the base configuration -- model user configurations are stored in the -\texttt{~/ctoaster.cupcake-data/user-configs} directory. +\texttt{~/ctoaster.carrotcake-data/user-configs} directory. \begin{verbatim} -c CONFIG --config=CONFIG \end{verbatim} Full configuration name (this is mostly used for conversions of -pre-\texttt{cupcake} tests) -- full configurations are stored in the -\texttt{~/ctoaster.cupcake-data/full-configs} directory. +pre-\texttt{carrotcake} tests) -- full configurations are stored in the +\texttt{~/ctoaster.carrotcake-data/full-configs} directory. In addition to the configuration file options, the following additional options may be supplied to \texttt{new-job}: @@ -215,11 +215,11 @@ \section{Creating new jobs} \begin{verbatim} --old-restart \end{verbatim} -It may sometimes be useful to restart from an old pre-\texttt{cupcake} +It may sometimes be useful to restart from an old pre-\texttt{carrotcake} job. This flag indicates that the job name supplied to the \texttt{-r} flag is the name of an old \textbf{cTOASTER} job whose output can be found in the -\\\texttt{~/ctoaster.cupcake\_output} directory. +\\\texttt{~/ctoaster.carrotcake\_output} directory. \begin{verbatim} --t100 @@ -232,7 +232,7 @@ \section{Creating new jobs} -j JOB_DIR --job-dir=JOB_DIR \end{verbatim} It can sometimes be useful to put cTOASTER jobs somewhere other than -\texttt{~/ctoaster.cupcake-jobs}. This flag allows an alternative job directory +\texttt{~/ctoaster.carrotcake-jobs}. This flag allows an alternative job directory to be specified. \begin{verbatim} @@ -240,7 +240,7 @@ \section{Creating new jobs} \end{verbatim} Normally, \texttt{new-job} will generate a job set up to use the default model version which was selected when the -\texttt{setup-ctoaster.cupcake} script was run. This flag allows for a +\texttt{setup-ctoaster.carrotcake} script was run. This flag allows for a different model version to be selected. \subsection*{Examples} @@ -251,7 +251,7 @@ \subsection*{Examples} \end{verbatim} This configures the first example job in the workshop handout. After running this invocation of \texttt{new-job}, a new -\texttt{~/ctoaster.cupcake-jobs/snowball} job directory will have been created +\texttt{~/ctoaster.carrotcake-jobs/snowball} job directory will have been created from which the job can be executed. \begin{verbatim} @@ -269,10 +269,10 @@ \section{Running jobs} be run from the newly created job directory using a ``\texttt{go}'' script. Configuring and running a job is as simple as: \begin{verbatim} - cd ~/ctoaster.cupcake + cd ~/ctoaster.carrotcake ./new-job -b cgenie.eb_go_gs_ac_bg.p0650e.NONE ... ... -u LABS/LAB_0.snowball snowball 10 - cd ~/ctoaster.cupcake-jobs/snowball + cd ~/ctoaster.carrotcake-jobs/snowball ./go run \end{verbatim} The \texttt{go} script has three main options and two advanced @@ -304,7 +304,7 @@ \section{Running jobs} %---------------------------------------------------------------------- \section{Managing configuration files} -Configuration files are all kept in \texttt{~/ctoaster.cupcake-data}, base +Configuration files are all kept in \texttt{~/ctoaster.carrotcake-data}, base configurations in the \texttt{base-configs} directory and user configurations in \texttt{user-configs}. All of this configuration data is held in a git repository on \textbf{GitHub}, so if you want to add user @@ -322,7 +322,7 @@ \section{Managing tests} people changing the \textbf{cTOASTER} model code, but it can still be useful to save jobs as tests. -The \texttt{tests} script in \texttt{~/ctoaster.cupcake} is used to manage and +The \texttt{tests} script in \texttt{~/ctoaster.carrotcake} is used to manage and run test jobs. To list the available tests, do \begin{verbatim} ./tests list @@ -335,7 +335,7 @@ \section{Managing tests} \texttt{basic/biogem}), a set of tests (e.g. \texttt{basic}) or \texttt{ALL}, which runs \emph{all} available tests. The tests are run as normal cTOASTER jobs in a sub-directory of \ -\\texttt{~/ctoaster.cupcake-jobs} +\\texttt{~/ctoaster.carrotcake-jobs} with a name of the form \texttt{test-YYYYMMDD-HHMMSS} based on the current date and time. As well as full test job output, build and run logs, a \texttt{test.log} file is produced in this test directory, @@ -347,7 +347,7 @@ \section{Managing tests} ./tests add \end{verbatim} where \texttt{} is the name of an existing job in -\texttt{~/ctoaster.cupcake-jobs}. Note that you need to run the job before you +\texttt{~/ctoaster.carrotcake-jobs}. Note that you need to run the job before you can add it as a test! The test script will ask you which output files you want to use for comparison for each model component -- there are sensible defaults in most cases, but you can select individual files @@ -398,19 +398,19 @@ \section{Managing model versions} Model versions are indicated by git 'tags'. In order to see a list of available model versions, use the following command in the -\texttt{~/ctoaster.cupcake} directory: +\texttt{~/ctoaster.carrotcake} directory: \begin{verbatim} git tag -l \end{verbatim} To configure a job to use a different model version from the default, simply add a \texttt{-v} flag to \texttt{new-job} specifying the model version you want to use. For example, to configure a job to use the -\texttt{cupcake-1.0} version of the model, use something like the +\texttt{carrotcake-1.0} version of the model, use something like the following command: \begin{verbatim} ./new-job -b cgenie.eb_go_gs_ac_bg.p0650e.NONE ... ... -u LABS/LAB_0.snowball snowball 10 ... - ... -v cupcake-1.0 + ... -v carrotcake-1.0 \end{verbatim} Within a job directory, you can see what model version the job was configured with by looking at the contents of the @@ -420,11 +420,11 @@ \section{Managing model versions} %---------------------------------------------------------------------- \section{Legacy (cGENIE) work-flow} -The example experiment above can also be configured and run via a single command\footnote{Usage: \texttt{run-cupcake []}} : +The example experiment above can also be configured and run via a single command\footnote{Usage: \texttt{run-carrotcake []}} : \begin{verbatim} - ./run-cupcake cgenie.eb_go_gs_ac_bg.p0650e.NONE LABS LAB_0.snowball 10 + ./run-carrotcake cgenie.eb_go_gs_ac_bg.p0650e.NONE LABS LAB_0.snowball 10 \end{verbatim} -This employs \texttt{run-cupcake.py}, which first configures a new job using \texttt{./new-job}, +This employs \texttt{run-carrotcake.py}, which first configures a new job using \texttt{./new-job}, automatically changes to the corresponding job directory, and then runs it using \texttt{./go run}. %---------------------------------------------------------------------- @@ -450,9 +450,9 @@ \section{Installation and setup for development} default model version to use, you should answer ``\texttt{DEVELOPMENT}''. This causes model executables for all jobs to be built by default from the source code currently in -\texttt{~/ctoaster.cupcake/src}, rather than from a specified past model +\texttt{~/ctoaster.carrotcake/src}, rather than from a specified past model version. In this way, you can make changes to the model source code -under \texttt{~/ctoaster.cupcake/src} and doing a \texttt{./go run} in a job +under \texttt{~/ctoaster.carrotcake/src} and doing a \texttt{./go run} in a job directory will trigger a build and execution of the model based on the changed code. (For a simpler way to check for successful model compilation, see below in Section~\ref{sec:build-system}.) @@ -460,9 +460,9 @@ \section{Installation and setup for development} %---------------------------------------------------------------------- \section{Model source organisation} -The model source lives in \texttt{~/ctoaster.cupcake/src}, with one subdirectory +The model source lives in \texttt{~/ctoaster.carrotcake/src}, with one subdirectory for each model component (\texttt{embm}, \texttt{biogem}, etc.), the -main \texttt{cupcake.f90} program, plus a couple of extra sub-directories +main \texttt{carrotcake.f90} program, plus a couple of extra sub-directories for utility routines. All of the code is FORTRAN 90 and all source files accordingly have a \texttt{.f90} extension. @@ -484,7 +484,7 @@ \section{Build system} The build system uses \textbf{SCons}, a 'Make replacement' written in \textbf{โ€ข}. The rest of the build system is also written in Python -and lives in \texttt{~/ctoaster.cupcake/scripts}. In the normal course of +and lives in \texttt{~/ctoaster.carrotcake/scripts}. In the normal course of things, it shouldn't be necessary for \textbf{cTOASTER} model developers to touch this stuff -- it should just work. (There are cases where this isn't quite true, mostly to do with major changes in the layout or naming of @@ -498,21 +498,21 @@ \section{Build system} account of the model version requested) and again, should just work. The scripts perform builds in directories under -\texttt{~/ctoaster.cupcake-jobs/MODELS} -- after running a few jobs, you'll see +\texttt{~/ctoaster.carrotcake-jobs/MODELS} -- after running a few jobs, you'll see one directory under there for each model version you've used. The directories all ultimately have the form: \begin{verbatim} - ~/ctoaster.cupcake-jobs/MODELS//// + ~/ctoaster.carrotcake-jobs/MODELS//// \end{verbatim} where the different components have the following meanings: \begin{description} \item[\texttt{}]{The model version for this build. If you run jobs with a non-\texttt{DEVELOPMENT} model version, you'll - also see a directory called \texttt{~/ctoaster.cupcake-jobs/MODELS/REPOS} + also see a directory called \texttt{~/ctoaster.carrotcake-jobs/MODELS/REPOS} holding repository copies at fixed versions -- when a build is required for a non-\texttt{DEVELOPMENT} model version, the source code is accessed from one of these \texttt{REPOS} directories - rather than from \texttt{~/ctoaster.cupcake/src}.} + rather than from \texttt{~/ctoaster.carrotcake/src}.} \item[\texttt{}]{A ``platform'' is basically a combination of a machine name or type and a compiler/NetCDF directory combination -- see Section~\ref{sec:platforms}. Having this level @@ -538,7 +538,7 @@ \section{Build system} for a job with preprocessor definitions \texttt{cTOASTERNX=36}, \texttt{cTOASTERNY=36}: \begin{verbatim} - /home/iross/ctoaster.cupcake-jobs/MODELS/DEVELOPMENT/seneca/... + /home/iross/ctoaster.carrotcake-jobs/MODELS/DEVELOPMENT/seneca/... ...50b3ce7f3162a0f783e4424c9a294de0061e0cdc/ship \end{verbatim} @@ -556,12 +556,12 @@ \section{Build system} running jobs, it's not very convenient for day-to-day development, where you often just want to check that the model compiles. In order to make this convenient, the SCons scripts are set up so that it's -possible to just run \texttt{scons} in the \texttt{~/ctoaster.cupcake} directory +possible to just run \texttt{scons} in the \texttt{~/ctoaster.carrotcake} directory and have \emph{a} version of the model built right there (actually -into a \texttt{~/ctoaster.cupcake/build} directory). What this means is that +into a \texttt{~/ctoaster.carrotcake/build} directory). What this means is that you can set up your editor compilation command (if you use \textbf{Emacs}, the thing that gets run when you type \texttt{C-c m}) to be -``\texttt{scons -C ~/ctoaster.cupcake}'' and all your usual compiler error +``\texttt{scons -C ~/ctoaster.carrotcake}'' and all your usual compiler error message chasing commands will work just right. (The \texttt{-C} argument to the \texttt{scons} program is the same as that for \texttt{make}: it tells SCons to change to a particular directory @@ -576,8 +576,8 @@ \section{Primary workflow} \begin{enumerate} \item{Do \texttt{git checkout -b } to create a git topic branch to work on.} - \item{Edit files under \texttt{~/ctoaster.cupcake/src}.} - \item{Test model build by running \texttt{scons -C ~/ctoaster.cupcake}.} + \item{Edit files under \texttt{~/ctoaster.carrotcake/src}.} + \item{Test model build by running \texttt{scons -C ~/ctoaster.carrotcake}.} \item{Set up test jobs using \texttt{new-job}.} \item{Run jobs in their job directories using \texttt{./go run} (uses development code).} @@ -614,7 +614,7 @@ \section{Model input parameter handling} configuration files and with configuration files that do not specify values for newly introduced parameter names. The way that this works is that the default namelists for each module -(e.g. \texttt{~/ctoaster.cupcake/src/embm/embm-defaults.nml}) contain a default +(e.g. \texttt{~/ctoaster.carrotcake/src/embm/embm-defaults.nml}) contain a default value for every parameter appearing in the relevant namelist in the FORTRAN code. The \emph{new-job} script then ignores any parameters appearing in configuration files whose names do not occur in the @@ -656,7 +656,7 @@ \section{Recommended Git working practices} work on something else. This ``one repository, many branches'' style of working is very powerful.} \item[Repository forks and pull requests]{Instead of just cloning - the \texttt{ctoaster.cupcake} repository and pushing changes directly to it + the \texttt{ctoaster.carrotcake} repository and pushing changes directly to it (which is only possible if you're one of the owners of the repository), create a personal \emph{fork} of the repository on GitHub and do your work in that. You set up the main repository @@ -698,7 +698,7 @@ \section{Platform files} The build system has a simple facility for managing compiler paths and options and the location of NetCDF libraries for building \textbf{cTOASTER} on different platforms. This mechanism is based on ``platform files'' in -this directory \texttt{~/ctoaster.cupcake/platforms}, each of which is named +this directory \texttt{~/ctoaster.carrotcake/platforms}, each of which is named after the hostname of the machine it's for, or the host and compiler combination (e.g. for my machine \texttt{seneca}, something like \texttt{seneca-gfortran} or \texttt{seneca-ifort}). (There is also a @@ -770,18 +770,18 @@ \section{Platform files} Porting \textbf{cTOASTER} to a new platform should require little more than making a new platform file: just copy an existing one, ideally one that uses the same compiler, and edit the locations of the NetCDF libraries. -(The platform file should live in \texttt{~/ctoaster.cupcake/platforms} and its +(The platform file should live in \texttt{~/ctoaster.carrotcake/platforms} and its name should be whatever is returned from the Linux \texttt{hostname} command.) It you want to use different compilers on the same platform, just create multiple platform files called \texttt{-} -- you'll probably also want to have a default platform with just the hostname so that you can do builds without setting the platform explicitly. If you want to do builds -directly in the \texttt{~/ctoaster.cupcake} directory using a non-default +directly in the \texttt{~/ctoaster.carrotcake} directory using a non-default compiler, you can either just move the platform files around in -\texttt{~/ctoaster.cupcake/platforms} so that the default file for your platform +\texttt{~/ctoaster.carrotcake/platforms} so that the default file for your platform uses the compiler you want, or (not really recommended) you can create -a file called \texttt{~/ctoaster.cupcake/config/platform-name} containing the +a file called \texttt{~/ctoaster.carrotcake/config/platform-name} containing the name of the platform you want to use\footnote{This suborns the platform selection method used in builds as run from the \texttt{go} script, and the reason it's not really recommended is that it will @@ -807,13 +807,13 @@ \section{Python installation and shell script wrappers} All the configuration and build scripts require Python 3 (and called as \texttt{python3}). -The main Python scripts all live in \texttt{~/ctoaster.cupcake/tools}. +The main Python scripts all live in \texttt{~/ctoaster.carrotcake/tools}. \\The \texttt{setup-ctoaster}, \texttt{new-job}, \texttt{go} and \texttt{tests} programs are all shell scripts that invoke \texttt{python3}. \section{Model versions, repositories and development code} -The whole story with the \texttt{~/ctoaster.cupcake-jobs/MODELS} directory +The whole story with the \texttt{~/ctoaster.carrotcake-jobs/MODELS} directory hierarchy for model builds is a little complicated, but it solves a number of related problems: \begin{enumerate} @@ -830,14 +830,14 @@ \section{Model versions, repositories and development code} \end{enumerate} When a specific model version is required for a job, the -\texttt{ctoaster.cupcake} source repository is unpacked into a directory under -\texttt{~/ctoaster.cupcake-jobs/MODELS/REPOS} at exactly that version and the +\texttt{ctoaster.carrotcake} source repository is unpacked into a directory under +\texttt{~/ctoaster.carrotcake-jobs/MODELS/REPOS} at exactly that version and the source tree and configuration scripts for that model version are used --- this means that even if your main \texttt{~/ctoaster.cupcake} directory, for -example, is at version \texttt{cupcake-3.5}, if you run the -\texttt{new-job} script telling it to use version \texttt{cupcake-1.0} +-- this means that even if your main \texttt{~/ctoaster.carrotcake} directory, for +example, is at version \texttt{carrotcake-3.5}, if you run the +\texttt{new-job} script telling it to use version \texttt{carrotcake-1.0} for the new job, \emph{all of the configuration and build steps} for -the new job will be done with the \texttt{cupcake-1.0} versions of the +the new job will be done with the \texttt{carrotcake-1.0} versions of the model and configuration scripts. This allows for perfect reproducibility of jobs between model versions. @@ -885,7 +885,7 @@ \section{Job hashing} When the \texttt{go} script needs to work out which directory to use under -\\\texttt{~/ctoaster.cupcake-jobs/MODELS} to use for the model for the +\\\texttt{~/ctoaster.carrotcake-jobs/MODELS} to use for the model for the current job, it reads the \texttt{jobs.py} file and turns the coordinate definitions into a canonical form (basically just by stripping whitespace and line endings, and sorting the variable names, @@ -899,25 +899,25 @@ \section{SConstruct organisation} The way that the \textbf{cTOASTER} build system using SConstruct is a little bit complicated, mostly in order to manage the segregation of different model builds as described above. The main -\texttt{~/ctoaster.cupcake/SConstruct} SCons file depends on reading some +\texttt{~/ctoaster.carrotcake/SConstruct} SCons file depends on reading some supporting files from the model build directory (which is either one -of the directories under \texttt{~/ctoaster.cupcake-jobs/MODELS} or, for the -degenerate case of testing model compilation, \texttt{~/ctoaster.cupcake}). +of the directories under \texttt{~/ctoaster.carrotcake-jobs/MODELS} or, for the +degenerate case of testing model compilation, \texttt{~/ctoaster.carrotcake}). Those supporting files are the \texttt{job.py} file that defines the model coordinate sizes, described in the previous section, and a file called \texttt{version.py} that tells the \texttt{SConstruct} script -where to find the model source code (\texttt{~/ctoaster.cupcake/src} for +where to find the model source code (\texttt{~/ctoaster.carrotcake/src} for development builds, or a directory under -\\\texttt{~/ctoaster.cupcake-jobs/MODELS/REPOS} for builds using a specified model +\\\texttt{~/ctoaster.carrotcake-jobs/MODELS/REPOS} for builds using a specified model version) and build scripts and the type of build to perform (e.g. \texttt{ship} or \texttt{debug}). If you look in one of the model build directories under -\texttt{~/ctoaster.cupcake-jobs/MODELS}, you'll find that that's more or less +\texttt{~/ctoaster.carrotcake-jobs/MODELS}, you'll find that that's more or less all there is there, apart from the \texttt{SConstruct} file, the \texttt{build} directory where compilation output goes, a -\texttt{build.log} file and the \texttt{cupcake.exe} executable output. +\texttt{build.log} file and the \texttt{carrotcake.exe} executable output. Setting things up this way means that model source code and build results are always kept seperate and there should never be any confusion. @@ -936,10 +936,10 @@ \section{Data file setup for jobs} configuration scripts. Because of the way that the namelist parameters are defined for some of the \textbf{cTOASTER} model components, it's not possible to determine exactly which model input files (from -\texttt{~/ctoaster.cupcake/data} or from -\\\texttt{~/ctoaster.cupcake-data/forcings}) are +\texttt{~/ctoaster.carrotcake/data} or from +\\\texttt{~/ctoaster.carrotcake-data/forcings}) are required to run a particular job. That's a bit of a problem, since -the idea is that each job directory under \texttt{~/ctoaster.cupcake-jobs} +the idea is that each job directory under \texttt{~/ctoaster.carrotcake-jobs} should be self-contained so that you could tar them up and send them to someone else for them to duplicate the job you were running, or for archiving purposes. This is also important for producing @@ -949,7 +949,7 @@ \section{Data file setup for jobs} requires a little bit of ingenuity. The \texttt{copy\_data\_files} routine (in -\texttt{~/ctoaster.cupcake/scripts/config\_utils.py}) uses some simple +\texttt{~/ctoaster.carrotcake/scripts/config\_utils.py}) uses some simple heuristics to figure out what input files might be needed. For each \textbf{cTOASTER} component, it extracts a list of candidate filenames from the namelist for that component -- basically all string-link parameters @@ -958,11 +958,11 @@ \section{Data file setup for jobs} going on to the next step -- in each case, if a candidate is matched, the match is copied to the job's \texttt{input} directory: \begin{enumerate} - \item{Look in the \texttt{~/ctoaster.cupcake/data} sub-directory for the + \item{Look in the \texttt{~/ctoaster.carrotcake/data} sub-directory for the relevant model component for an exact match to the candidate.} - \item{Look in \texttt{~/ctoaster.cupcake-data/forcings} for an exact match to + \item{Look in \texttt{~/ctoaster.carrotcake-data/forcings} for an exact match to the candidate.} - \item{Look in the \texttt{~/ctoaster.cupcake/data} sub-directory for the + \item{Look in the \texttt{~/ctoaster.carrotcake/data} sub-directory for the relevant model component for partial matches to the candidate, i.e. files whose name contain the string we're looking for but aren't an exact match.} @@ -986,20 +986,20 @@ \section{Data file setup for jobs} %====================================================================== \appendix -\chapter{(Some) teaching labs updated for \texttt{cupcake}} +\chapter{(Some) teaching labs updated for \texttt{carrotcake}} A few configuration and data files needed some fixups for these things to work -- these changes have all been committed to the -\texttt{ctoaster.cupcake-data} repository. +\texttt{ctoaster.carrotcake-data} repository. \section{Session \#0000} \subsection*{Section 1} \begin{verbatim} - git clone https://github.com/genie-model/ctoaster.cupcake.git - cd ~/ctoaster.cupcake - ./setup-ctoaster.cupcake + git clone https://github.com/genie-model/ctoaster.carrotcake.git + cd ~/ctoaster.carrotcake + ./setup-ctoaster.carrotcake ./tests run basic \end{verbatim} @@ -1008,7 +1008,7 @@ \subsection*{Sections 2--6} \begin{verbatim} ./new-job -b cgenie.eb_go_gs_ac_bg.p0650e.NONE -u LABS/LAB_0.snowball ... ... LAB_0.snowball 10 - cd ~/ctoaster.cupcake-jobs/LAB_0.snowball + cd ~/ctoaster.carrotcake-jobs/LAB_0.snowball ./go run cd output/biogem cat biogem_series_ocn_temp.res @@ -1017,22 +1017,22 @@ \subsection*{Sections 2--6} \subsection*{Section 7} \begin{verbatim} - cd ~/ctoaster.cupcake_output - wget http://www.seao2.info/ctoaster.cupcake/labs/UoB.2013/... + cd ~/ctoaster.carrotcake_output + wget http://www.seao2.info/ctoaster.carrotcake/labs/UoB.2013/... ...130328.p0650e.LiCa.OHM10.SPIN0.tar.gz tar xzf 130328.p0650e.LiCa.OHM10.SPIN0.tar.gz - cd ~/ctoaster.cupcake + cd ~/ctoaster.carrotcake ./new-job -b cgenie.eb_go_gs_ac_bg.p0650e.NONE -u LABS/LAB_0.snowball ... ... LAB_0.snowball-from-restart 100 ... ... -r 130328.p0650e.LiCa.OHM10.SPIN0 --old-restart - cd ~/ctoaster.cupcake-jobs/LAB_0.snowball-from-restart + cd ~/ctoaster.carrotcake-jobs/LAB_0.snowball-from-restart ./go run \end{verbatim} \subsection*{Section 8} \begin{verbatim} - cd ~/ctoaster.cupcake-data/user-configs + cd ~/ctoaster.carrotcake-data/user-configs cp LAB_0.snowball LAB_0.snowball-experiment \end{verbatim} @@ -1040,23 +1040,23 @@ \subsection*{Section 8} change the \texttt{ea\_radfor\_scl\_co2} variable (to 10.0, say). \begin{verbatim} - cd ~/ctoaster.cupcake + cd ~/ctoaster.carrotcake ./new-job -b cgenie.eb_go_gs_ac_bg.p0650e.NONE ... ... -u LABS/LAB_0.snowball-experiment ... ... LAB_0.snowball-experiment-1 100 - cd ~/ctoaster.cupcake-jobs/LAB_0.snowball-experiment-1 + cd ~/ctoaster.carrotcake-jobs/LAB_0.snowball-experiment-1 ./go run \end{verbatim} It's easy to make another job to extend this simulation -- just restart from the end of the last job: \begin{verbatim} - cd ~/ctoaster.cupcake + cd ~/ctoaster.carrotcake ./new-job -b cgenie.eb_go_gs_ac_bg.p0650e.NONE ... ... -u LABS/LAB_0.snowball-experiment ... ... LAB_0.snowball-experiment-1-extend 100 ... ... -r LAB_0.snowball-experiment-1 - cd ~/ctoaster.cupcake-jobs/LAB_0.snowball-experiment-1-extend + cd ~/ctoaster.carrotcake-jobs/LAB_0.snowball-experiment-1-extend ./go run \end{verbatim} You can do this indefinitely... @@ -1066,11 +1066,11 @@ \section{Session \#0001} \subsection*{Section 1.3} \begin{verbatim} - cd ~/ctoaster.cupcake_output - wget http://www.seao2.info/ctoaster.cupcake/labs/UoB.2013/... + cd ~/ctoaster.carrotcake_output + wget http://www.seao2.info/ctoaster.carrotcake/labs/UoB.2013/... ...EXAMPLE.worjh2.PO4Fe.SPIN.tar.gz tar xzf EXAMPLE.worjh2.PO4Fe.SPIN.tar.gz - cd ~/ctoaster.cupcake-data/user-configs/LABS + cd ~/ctoaster.carrotcake-data/user-configs/LABS cp LAB_1.colorinjection LAB_1.colorinjection-experiment \end{verbatim} @@ -1082,7 +1082,7 @@ \subsection*{Section 1.3} ... -u LABS/LAB_1.colorinjection-experiment ... ... LAB_1.colorinjection 20 ... ... -r EXAMPLE.worjh2.PO4Fe.SPIN --old-restart - cd ~/ctoaster.cupcake-jobs/LAB_1.colorinjection + cd ~/ctoaster.carrotcake-jobs/LAB_1.colorinjection ./go run \end{verbatim} @@ -1092,7 +1092,7 @@ \subsection*{Section 1.5} ./new-job -b cgenie.eb_go_gs_ac_bg.worjh2.rb ... ... -u LABS/LAB_1.hosing LAB_1.hosing 20 ... ... -r EXAMPLE.worjh2.PO4Fe.SPIN --old-restart - cd ~/ctoaster.cupcake-jobs/LAB_1.hosing + cd ~/ctoaster.carrotcake-jobs/LAB_1.hosing ./go run \end{verbatim} @@ -1104,7 +1104,7 @@ \subsection*{Section 1.1} ./new-job -O -b cgenie.eb_go_gs_ac_bg.worjh2.BASEFe ... ... -u LABS/LAB_2.CO2emissions LAB_2.CO2emissions 20 ... ... -r EXAMPLE.worjh2.PO4Fe.SPIN --old-restart - cd ~/ctoaster.cupcake-jobs/LAB_2.CO2emissions + cd ~/ctoaster.carrotcake-jobs/LAB_2.CO2emissions ./go run \end{verbatim} @@ -1114,7 +1114,7 @@ \subsection*{Section 1.2} ./new-job -O -b cgenie.eb_go_gs_ac_bg.worjh2.BASEFe ... ... -u LABS/LAB_2.CONTROL LAB_2.CONTROL 20 ... ... -r EXAMPLE.worjh2.PO4Fe.SPIN --old-restart - cd ~/ctoaster.cupcake-jobs/LAB_2.CONTROL + cd ~/ctoaster.carrotcake-jobs/LAB_2.CONTROL ./go run \end{verbatim} diff --git a/doc/cupcake-optimisation.tex b/doc/cupcake-optimisation.tex index a60e3c69..9d84a57f 100644 --- a/doc/cupcake-optimisation.tex +++ b/doc/cupcake-optimisation.tex @@ -29,7 +29,7 @@ \newfloat{listing}{tbp}{lop} \floatname{listing}{Listing} -\title{Optimisation work for GENIE \texttt{cupcake}} +\title{Optimisation work for GENIE \texttt{carrotcake}} \author{Ian~Ross} \date{2 April 2015} diff --git a/doc/cupcake-windows.tex b/doc/cupcake-windows.tex index d37fd077..0c55c920 100644 --- a/doc/cupcake-windows.tex +++ b/doc/cupcake-windows.tex @@ -29,7 +29,7 @@ \newfloat{listing}{tbp}{lop} \floatname{listing}{Listing} -\title{GENIE \texttt{cupcake} on Windows} +\title{GENIE \texttt{carrotcake} on Windows} \author{Ian~Ross} \date{4 March 2015} @@ -40,7 +40,7 @@ \maketitle -This document describes how to use the new \texttt{cupcake} version of +This document describes how to use the new \texttt{carrotcake} version of the GENIE model on Windows. Documentation is divided into two sections, one for users of the model and one for those who wish to modify the model. @@ -210,7 +210,7 @@ \section{Creating new jobs} -c CONFIG --config=CONFIG \end{verbatim} Full configuration name (this is mostly used for conversions of -pre-\texttt{cupcake} tests) -- full configurations are stored in the +pre-\texttt{carrotcake} tests) -- full configurations are stored in the \texttt{C:\textbackslash{}cgenie-data\textbackslash{}full-configs} directory. @@ -236,7 +236,7 @@ \section{Creating new jobs} \begin{verbatim} --old-restart \end{verbatim} -It may sometimes be useful to restart from an old pre-\texttt{cupcake} +It may sometimes be useful to restart from an old pre-\texttt{carrotcake} job. This flag indicates that the job name supplied to the \texttt{-r} flag is the name of an old GENIE job whose output can be found in the \texttt{C:\textbackslash{}cgenie\_output} directory. @@ -426,12 +426,12 @@ \section{Managing model versions} To configure a job to use a different model version from the default, simply add a \texttt{-v} flag to \texttt{new-job} specifying the model version you want to use. For example, to configure a job to use the -\texttt{cupcake-1.0} version of the model, use something like the +\texttt{carrotcake-1.0} version of the model, use something like the following command: \begin{verbatim} new-job -b cgenie.eb_go_gs_ac_bg.p0650e.NONE ... ... -u LABS\LAB_0.snowball snowball 10 ... - ... -v cupcake-1.0 + ... -v carrotcake-1.0 \end{verbatim} Within a job directory, you can see what model version the job was configured with by looking at the contents of the @@ -446,7 +446,7 @@ \chapter{For GENIE developers} For developers of GENIE, there are a few extra things to know beyond what's needed to run the model. Most of this is covered in the -\emph{GENIE \texttt{cupcake} Configuration and Build System} document. +\emph{GENIE \texttt{carrotcake} Configuration and Build System} document. The only Windows-specific feature to be covered here is how to debug the model in Visual Studio. This is a little complicated and there diff --git a/doc/cupcake.install.tex b/doc/cupcake.install.tex index de30e10a..00f878a7 100644 --- a/doc/cupcake.install.tex +++ b/doc/cupcake.install.tex @@ -24,7 +24,7 @@ %\addtolength{\oddsidemargin}{1.0cm} %\addtolength{\bottommargin}{1.0cm} -\title{cupcake installation HOW-TO\vspace{-8mm}} +\title{carrotcake installation HOW-TO\vspace{-8mm}} \author{} \date{\today} \usepackage[normalem]{ulem} @@ -41,7 +41,7 @@ % --- cupcate --------------------------------------------------------------------------- %---------------------------------------------------------------------------------------- -\noindent This is a brief guide to installing \textbf{cTOASTER.cupcake} under \textbf{Ubuntu}. +\noindent This is a brief guide to installing \textbf{cTOASTER.carrotcake} under \textbf{Ubuntu}. These instructions are valid for a fresh install of \textbf{Ubuntu} distribution version 22.04 LTS ('Jammy Jellyfish'). For a different distribution or more established installation, different or fewer respectively components may be needed to be installed and may require a little trial-and-error. @@ -70,12 +70,12 @@ \subsection{Installation} \vspace{4pt} \item \textbf{Get the code!} \vspace{2pt} -\\You may as well start off by cloning\footnote{If your system does not know what \textbf{git} is (it should be present by default on Ubuntu 22.04): \texttt{sudo apt install git}} the \textbf{cupcake} code (although you could equally do this last). +\\You may as well start off by cloning\footnote{If your system does not know what \textbf{git} is (it should be present by default on Ubuntu 22.04): \texttt{sudo apt install git}} the \textbf{carrotcake} code (although you could equally do this last). From your home directory: \vspace{-2pt} \begin{verbatim} -git clone https://github.com/derpycode/ctoaster.cupcake.git +git clone https://github.com/derpycode/ctoaster.carrotcake.git \end{verbatim} \end{enumerate} @@ -86,7 +86,7 @@ \subsection{Installation} \vspace{2pt} -\noindent When trying to run \texttt{ctoaster.cupcake}, it checks for a valid \textbf{netCDF} installation, so this may as well be your first step ... except you will need the appropriate compilers etc. if you do not already have them ... +\noindent When trying to run \texttt{ctoaster.carrotcake}, it checks for a valid \textbf{netCDF} installation, so this may as well be your first step ... except you will need the appropriate compilers etc. if you do not already have them ... %------------------------------------------------ \begin{enumerate}[noitemsep] diff --git a/horizontal-pod-autoscaler.yaml b/horizontal-pod-autoscaler.yaml new file mode 100644 index 00000000..8c7d0899 --- /dev/null +++ b/horizontal-pod-autoscaler.yaml @@ -0,0 +1,18 @@ +apiVersion: autoscaling/v2 +kind: HorizontalPodAutoscaler +metadata: + name: ctoaster-backend-hpa +spec: + scaleTargetRef: + apiVersion: apps/v1 + kind: Deployment + name: ctoaster-backend + minReplicas: 2 + maxReplicas: 5 + metrics: + - type: Resource + resource: + name: cpu + target: + type: Utilization + averageUtilization: 60 diff --git a/platforms/LINUX b/platforms/LINUX index f47f4b12..4f3fab6b 100644 --- a/platforms/LINUX +++ b/platforms/LINUX @@ -54,7 +54,7 @@ netcdf_base = None # Store as a pair (tuple) of include and lib paths as distro # For example if you have installed the NetCDF libraries yourself under /home/youruser/netcdf # you should add ('home/youruser/netcdf/include','/home/youruser/netcdf/lib') to the list below: # -poss_netcdf = [('/usr/include','/usr/lib/x86_64-linux-gnu'), ('/usr/lib64/gfortran/modules','/usr/lib64')] +poss_netcdf = [('/usr/local/include','/usr/local/lib'), ('/usr/include','/usr/lib/x86_64-linux-gnu'), ('/usr/lib64/gfortran/modules','/usr/lib64')] for poss in poss_netcdf: if (os.path.exists(os.path.join(poss[0], 'netcdf.mod')) and diff --git a/poc/tkinter-f2py2/Makefile b/poc/tkinter-f2py2/Makefile index bc76b6a0..2fcb89f4 100644 --- a/poc/tkinter-f2py2/Makefile +++ b/poc/tkinter-f2py2/Makefile @@ -1,9 +1,9 @@ # A simple makefile using f2py -all: cupcake.so +all: carrotcake.so -cupcake.so: model.f90 - f2py -c -m cupcake $^ +carrotcake.so: model.f90 + f2py -c -m carrotcake $^ .PHONY: all clean spotless diff --git a/poc/tkinter-f2py2/gui.py b/poc/tkinter-f2py2/gui.py index 00f82085..7dc8adbd 100644 --- a/poc/tkinter-f2py2/gui.py +++ b/poc/tkinter-f2py2/gui.py @@ -6,7 +6,7 @@ import matplotlib.pyplot as plt from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg # import the shared object created using f2py -import cupcake +import carrotcake def close_window(): root.destroy() @@ -15,15 +15,15 @@ def close_window(): root = tk.Tk() # call the Fortran init routine -cupcake.initialise('runtime_params.nml') -offset_step = (2 * math.pi) / cupcake.runtime_parameters.nx +carrotcake.initialise('runtime_params.nml') +offset_step = (2 * math.pi) / carrotcake.runtime_parameters.nx # start assembling the graph we'll plot in the GUI # We'll use Python's Matplotlib to create the plot # and tkinter to display it. figure = plt.Figure(figsize=(5,4), dpi=100) ax = figure.add_subplot(111) -root.title('cupcake GUI') +root.title('carrotcake GUI') line = FigureCanvasTkAgg(figure, root) line.show() line.get_tk_widget().pack(side=tk.LEFT, fill=tk.BOTH) @@ -31,12 +31,12 @@ def close_window(): button.pack(side=tk.BOTTOM) # The main timestep loop -for ii in range (0, cupcake.runtime_parameters.koverall): +for ii in range (0, carrotcake.runtime_parameters.koverall): print "loop iteration: ", ii - cupcake.timestep(ii * offset_step) + carrotcake.timestep(ii * offset_step) # create dataframe (using pandas) for subsequent use by Matplotlib - data = {'xvals': cupcake.global_variables.xvals, - 'yvals': cupcake.global_variables.yvals} + data = {'xvals': carrotcake.global_variables.xvals, + 'yvals': carrotcake.global_variables.yvals} df = DataFrame(data, columns = ['xvals','yvals']) # plot a figure from the dataframe and put into a tkinter window df = df[['xvals','yvals']].groupby('xvals').sum() @@ -46,7 +46,7 @@ def close_window(): line.draw() time.sleep(3) -cupcake.finalise() +carrotcake.finalise() # An infinite loop root.mainloop() diff --git a/requirements.txt b/requirements.txt index 3ff4fc6a..ebec3c42 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,14 +1,37 @@ +annotated-types==0.6.0 +anyio==4.3.0 +certifi==2024.2.2 +charset-normalizer==3.3.2 +click==8.1.7 contourpy==1.1.1 cycler==0.12.1 +exceptiongroup==1.2.1 +fastapi==0.110.3 fonttools==4.49.0 -importlib_resources==6.1.3 +fuzzywuzzy==0.18.0 +h11==0.14.0 +idna==3.7 +importlib-resources==6.1.1 kiwisolver==1.4.5 +Levenshtein==0.25.0 matplotlib==3.7.5 numpy==1.24.4 packaging==23.2 pillow==10.2.0 -pyparsing==3.1.2 -python-dateutil==2.9.0.post0 +pydantic==2.7.1 +pydantic_core==2.18.2 +pyparsing==3.1.1 +python-dateutil==2.8.2 +python-Levenshtein==0.25.0 +rapidfuzz==3.6.2 +requests==2.31.0 SCons==4.6.0.post1 six==1.16.0 +sniffio==1.3.1 +starlette==0.37.2 +typing==3.7.4.3 +typing_extensions==4.11.0 +urllib3==2.2.1 +uvicorn==0.29.0 zipp==3.17.0 + diff --git a/run-cupcake b/run-carrotcake similarity index 80% rename from run-cupcake rename to run-carrotcake index 941e0f12..c4170b43 100755 --- a/run-cupcake +++ b/run-carrotcake @@ -4,4 +4,4 @@ if [ ! -f ~/.ctoasterrc ]; then exit 0 fi ctoaster_root=`grep ctoaster_root ~/.ctoasterrc | cut -d: -f2 | sed -e 's/^ *//'` -python3 $ctoaster_root/tools/run-cupcake.py $* +python3 $ctoaster_root/tools/run-carrotcake.py $* diff --git a/run-cupcake.bat b/run-carrotcake.bat similarity index 88% rename from run-cupcake.bat rename to run-carrotcake.bat index 2971b2ec..f4694b93 100644 --- a/run-cupcake.bat +++ b/run-carrotcake.bat @@ -7,6 +7,6 @@ SET _find_cmd=FINDSTR ctoaster_root "%USERPROFILE%\.ctoasterrc" FOR /F "tokens=2" %%r IN ('%_find_cmd%') DO ( CALL %%r\tools\find_python.bat IF NOT DEFINED PYTHON EXIT /B 0 - %PYTHON% %%r\tools\run-cupcake.py %* + %PYTHON% %%r\tools\run-carrotcake.py %* EXIT /B 0 ) diff --git a/service.yaml b/service.yaml new file mode 100644 index 00000000..030ac80e --- /dev/null +++ b/service.yaml @@ -0,0 +1,13 @@ +# service.yaml +apiVersion: v1 +kind: Service +metadata: + name: ctoaster-backend-service +spec: + selector: + app: ctoaster-backend + ports: + - protocol: TCP + port: 80 + targetPort: 8000 + type: LoadBalancer # Changed from ClusterIP to LoadBalancer \ No newline at end of file diff --git a/src/SConscript b/src/SConscript index 4eadf36e..45a08163 100644 --- a/src/SConscript +++ b/src/SConscript @@ -11,7 +11,7 @@ for sd in subdirs: res = env.SConscript(consFile) allobjs += res['objs'] allsrcs += res['srcs'] -allsrcs = list(map(str, Glob('cupcake.f90'))) + list(map(str, allsrcs)) +allsrcs = list(map(str, Glob('carrotcake.f90'))) + list(map(str, allsrcs)) ###print(allobjs) @@ -26,10 +26,10 @@ env.Program('nccompare.exe', [os.path.join('tools', 'nccompare.f90')]) ###print('***') if P.system() != 'Windows': - env.Program('cupcake.exe', ['cupcake.f90'] + objs) + env.Program('carrotcake.exe', ['carrotcake.f90'] + objs) else: - if build_type == 'debug': env.Replace(PDB='cupcake-debug.pdb') - exe = env.Program('cupcake.exe', ['cupcake.f90'] + objs) + if build_type == 'debug': env.Replace(PDB='carrotcake-debug.pdb') + exe = env.Program('carrotcake.exe', ['carrotcake.f90'] + objs) if len(exe) > 1: exe = exe[0] var = 'Debug' if build_type == 'debug' else 'Release' env.MSVSProject(target = 'ctoaster-scons' + env['MSVSPROJECTSUFFIX'], diff --git a/src/cupcake.f90 b/src/carrotcake.f90 similarity index 100% rename from src/cupcake.f90 rename to src/carrotcake.f90 diff --git a/src/common/gem_util.f90 b/src/common/gem_util.f90 index 2b79e732..5e62ee4a 100644 --- a/src/common/gem_util.f90 +++ b/src/common/gem_util.f90 @@ -424,7 +424,6 @@ END SUBROUTINE sub_check_fileformat ! REPORT ERROR ! (generic error reporting subroutine) SUBROUTINE sub_report_error(dum_mod,dum_proc,dum_mes,dum_act,dum_data,dum_fatal) - USE genie_global, ONLY: write_status IMPLICIT NONE ! dummy arguments CHARACTER(LEN=*),INTENT(in)::dum_mod @@ -454,7 +453,8 @@ SUBROUTINE sub_report_error(dum_mod,dum_proc,dum_mes,dum_act,dum_data,dum_fatal) PRINT*,' ' PRINT*,'*********** END ************' PRINT*,' ' - CALL write_status('ERRORED') + ! CALL write_status('ERRORED') ! NOTE: requires USE genie_global, ONLY: write_status + stop ELSE PRINT*,' ' PRINT*,'*** WARNING ***' diff --git a/tools/REST.py b/tools/REST.py new file mode 100644 index 00000000..fe4f8538 --- /dev/null +++ b/tools/REST.py @@ -0,0 +1,1009 @@ +import asyncio +import datetime +import logging +import os +import shutil +import subprocess as sp +import sys +import time + +from fastapi import FastAPI, HTTPException, Request, Query +from fastapi.middleware.cors import CORSMiddleware +from starlette.background import BackgroundTasks +from starlette.responses import StreamingResponse + +from tools.utils import read_ctoaster_config + +# Initialize the configuration +read_ctoaster_config() + +# Ensure global variables are initialized +if not read_ctoaster_config(): + raise RuntimeError("Failed to read ctoaster configuration") + +from tools.utils import ctoaster_data, ctoaster_jobs, ctoaster_root, ctoaster_version + +app = FastAPI() + +# CORS configuration +origins = ["http://localhost:5001", "*"] # React development server + +app.add_middleware( + CORSMiddleware, + allow_origins=origins, + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + +# Configure logging +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + + +@app.get("/jobs") +def list_jobs(): + try: + if ctoaster_jobs is None: + raise ValueError("ctoaster_jobs is not defined") + + job_list = os.listdir(ctoaster_jobs) + jobs = [] + for job in job_list: + job_path = os.path.join(ctoaster_jobs, job) + if os.path.isdir(job_path) and job.strip() != "MODELS": + jobs.append({"name": job, "path": job_path}) + return {"jobs": jobs} + except Exception as e: + return {"error": str(e)} + + +# Global variable to store the currently selected job name +selected_job_name = None + +# Global variable to store the currently selected job name +selected_job_name = None + + +@app.get("/job/{job_name}") +def get_job_details(job_name: str): + global selected_job_name + selected_job_name = job_name # Store the selected job name + try: + if ctoaster_jobs is None: + raise ValueError("ctoaster_jobs is not defined") + + job_path = os.path.join(ctoaster_jobs, job_name) + + if not os.path.isdir(job_path): + logger.info(f"Job not found: {job_path}") + return {"error": "Job not found"} + + # Determine job status + status = "UNCONFIGURED" + if os.path.exists(os.path.join(job_path, "data_genie")): + status = "RUNNABLE" + if os.path.exists(os.path.join(job_path, "status")): + with open(os.path.join(job_path, "status")) as f: + status_line = f.readline().strip() + status = status_line.split()[0] if status_line else "ERROR" + + # Determine run length and T100 from the config file + run_length = "n/a" + t100 = "n/a" + config_path = os.path.join(job_path, "config", "config") + if os.path.exists(config_path): + with open(config_path) as f: + for line in f: + if line.startswith("run_length:"): + run_length = line.split(":")[1].strip() + if line.startswith("t100:"): + t100 = line.split(":")[1].strip().lower() == "true" + + job_details = { + "name": job_name, + "path": job_path, + "status": status, + "run_length": run_length, + "t100": "true" if t100 else "false", + } + + logger.info(f"Job details retrieved: {job_details}") + + return {"job": job_details} + except Exception as e: + logger.error(f"Error retrieving job details: {str(e)}") + return {"error": str(e)} + + +@app.delete("/delete-job") +def delete_job(): + global selected_job_name + try: + if not selected_job_name: + raise HTTPException(status_code=400, detail="No job selected") + + if ctoaster_jobs is None: + raise ValueError("ctoaster_jobs is not defined") + + job_path = os.path.join(ctoaster_jobs, selected_job_name) + + if not os.path.isdir(job_path): + logger.info(f"Job not found: {job_path}") + return {"error": "Job not found"} + + # Delete the job directory + shutil.rmtree(job_path) + + local_job_name = selected_job_name + + # Clear the selected job name + selected_job_name = None + + logger.info(f"Job deleted: {job_path}") + return {"message": f"Job '{local_job_name}' deleted successfully"} + + except Exception as e: + logger.error(f"Error deleting job: {str(e)}") + raise HTTPException(status_code=500, detail=f"Error deleting job: {str(e)}") + # if not selected_job_name: + # raise HTTPException(status_code=400, detail="No job selected") + + # from utils import ctoaster_jobs + + # if ctoaster_jobs is None: + # raise ValueError("ctoaster_jobs is not defined") + + # job_path = os.path.join(ctoaster_jobs, selected_job_name) + + # if not os.path.isdir(job_path): + # logger.info(f"Job not found: {job_path}") + # return {"error": "Job not found"} + + # try: + # os.rmdir(job_path) + # except Exception as e: + # logger.error(f"Error deleting job: {str(e)}") + # return {"error": f"Error deleting job: {str(e)}"} + + # return {"message": f"Job '{selected_job_name}' deleted successfully"} + + +import shutil + +@app.post("/add-job") +async def add_job(request: Request): + data = await request.json() + job_name = data.get("job_name") + + if not job_name: + raise HTTPException(status_code=400, detail="Job name is required") + + if ctoaster_jobs is None: + raise ValueError("ctoaster_jobs is not defined") + + job_dir = os.path.join(ctoaster_jobs, job_name) + if os.path.exists(job_dir): + raise HTTPException(status_code=400, detail="Job already exists") + + # Create the job directory + try: + os.makedirs(os.path.join(job_dir, "config")) + except Exception as e: + raise HTTPException( + status_code=500, detail=f"Could not create job directory: {str(e)}" + ) + + # Create the main config file + config_path = os.path.join(job_dir, "config", "config") + try: + with open(config_path, "w") as config_file: + config_file.write( + "base_config: ?\nuser_config: ?\nrun_length: ?\nt100: ?\n" + ) + except Exception as e: + raise HTTPException( + status_code=500, detail=f"Could not write configuration file: {str(e)}" + ) + + return {"status": "success", "message": f"Job '{job_name}' created successfully"} + + +@app.get("/run-segments/{job_name}") +def get_run_segments(job_name: str): + try: + if ctoaster_jobs is None: + raise ValueError("ctoaster_jobs is not defined") + + job_path = os.path.join(ctoaster_jobs, job_name) + + if not os.path.isdir(job_path): + raise HTTPException(status_code=404, detail="Job not found") + + segments_dir = os.path.join(job_path, "config", "segments") + + # Function to read the segments from the config directory + def read_segments(): + segments = [] + if os.path.exists(segments_dir): + for segment_id in os.listdir(segments_dir): + segment_path = os.path.join(segments_dir, segment_id) + if os.path.isdir(segment_path): + config_path = os.path.join(segment_path, "config") + if os.path.exists(config_path): + with open(config_path) as f: + for line in f: + if line.startswith("run_length:"): + run_length = int(line.split(":")[1].strip()) + segments.append((segment_id, run_length)) + return segments + + segments = read_segments() + + if not segments: + # Default case with a single segment. + return {"run_segments": ["1: 1-END"]} + + # Generate strings for each segment in the form ": -" + res = [f"{i + 1}: {start}-{end}" for i, (start, end) in enumerate(segments)] + + # Add a final segment representing the next step after the last known segment. + final_step = segments[-1][1] + 1 # Assumes segments are sorted and non-empty + res.append(f"{len(segments) + 1}: {final_step}-END") + + # Reverse the list to match the original behavior and convert to a tuple. + return {"run_segments": tuple(reversed(res))} + except Exception as e: + raise HTTPException( + status_code=500, detail=f"Error fetching run segments: {str(e)}" + ) + + +@app.get("/base-configs") +def get_base_configs(): + try: + base_configs_dir = os.path.join(ctoaster_data, "base-configs") + base_configs = [ + f.rpartition(".")[0] + for f in os.listdir(base_configs_dir) + if f.endswith(".config") + ] + base_configs.sort() + return {"base_configs": base_configs} + except Exception as e: + raise HTTPException( + status_code=500, detail=f"Error fetching base configs: {str(e)}" + ) + + +@app.get("/user-configs") +def get_user_configs(): + try: + user_configs_dir = os.path.join(ctoaster_data, "user-configs") + user_configs = [] + for root, _, files in os.walk(user_configs_dir): + for file in files: + user_configs.append( + os.path.relpath(os.path.join(root, file), user_configs_dir) + ) + user_configs.sort() + return {"user_configs": user_configs} + except Exception as e: + raise HTTPException( + status_code=500, detail=f"Error fetching user configs: {str(e)}" + ) + + +@app.get("/completed-jobs") +async def get_completed_jobs(): + try: + if ctoaster_jobs is None: + raise ValueError("ctoaster_jobs is not defined") + + completed_jobs = [] + # Iterate over all jobs in the jobs directory + for job_name in os.listdir(ctoaster_jobs): + job_path = os.path.join(ctoaster_jobs, job_name) + if os.path.isdir(job_path): + status_file = os.path.join(job_path, "status") + if os.path.exists(status_file): + status_parts = read_status_file(job_path) + if status_parts and status_parts[0] == "COMPLETE": + completed_jobs.append(job_name) + + return {"completed_jobs": completed_jobs} + except Exception as e: + logger.error(f"Error fetching completed jobs: {str(e)}") + raise HTTPException(status_code=500, detail=str(e)) + + +@app.get("/setup/{job_name}") +def get_setup(job_name: str): + try: + if ctoaster_jobs is None or ctoaster_data is None: + raise ValueError("ctoaster_jobs or ctoaster_data is not defined") + + job_path = os.path.join(ctoaster_jobs, job_name) + + if not os.path.isdir(job_path): + logger.info(f"Job not found: {job_path}") + return {"error": "Job not found"} + + # Read the setup details from the config file + config_path = os.path.join(job_path, "config", "config") + if not os.path.exists(config_path): + raise ValueError("Config file not found") + + setup_details = { + "base_config": "", + "user_config": "", + "modifications": "", + "run_length": "n/a", + "restart_from": "", + } + + # Read from the main config file + with open(config_path) as f: + for line in f: + if line.startswith("base_config:"): + setup_details["base_config"] = line.split(":", 1)[1].strip() + elif line.startswith("user_config:"): + setup_details["user_config"] = line.split(":", 1)[1].strip() + elif line.startswith("run_length:"): + setup_details["run_length"] = line.split(":", 1)[1].strip() + elif line.startswith("restart:"): + setup_details["restart_from"] = line.split(":", 1)[1].strip() + + # Read modifications + mods_path = os.path.join(job_path, "config", "config_mods") + if os.path.exists(mods_path): + with open(mods_path) as f: + setup_details["modifications"] = f.read().strip() + + return {"setup": setup_details} + except Exception as e: + logger.error(f"Error retrieving setup details: {str(e)}") + return {"error": str(e)} + + +@app.post("/setup/{job_name}") +async def update_setup(job_name: str, request: Request): + try: + data = await request.json() + if ctoaster_jobs is None or ctoaster_data is None: + raise ValueError("ctoaster_jobs or ctoaster_data is not defined") + + job_path = os.path.join(ctoaster_jobs, job_name) + + if not os.path.isdir(job_path): + logger.info(f"Job not found: {job_path}") + return {"error": "Job not found"} + + # Update the main config file + config_path = os.path.join(job_path, "config", "config") + if not os.path.exists(config_path): + raise ValueError("Config file not found") + + # Prepare the updated configuration data + base_config = data.get("base_config", "") + user_config = data.get("user_config", "") + modifications = data.get("modifications", "") + run_length = data.get("run_length", "n/a") + restart = data.get("restart_from", "") + if restart == "": + restart = None # Handle empty string as None + + with open(config_path, "w") as f: + if base_config: + f.write( + f"base_config_dir: {os.path.join(ctoaster_data, 'base-configs')}\n" + ) + f.write(f"base_config: {base_config}\n") + if user_config: + f.write( + f"user_config_dir: {os.path.join(ctoaster_data, 'user-configs')}\n" + ) + f.write(f"user_config: {user_config}\n") + if restart is not None: + f.write(f"restart: {restart}\n") + else: + f.write("restart: \n") + today = datetime.datetime.today().strftime("%Y-%m-%d %H:%M:%S") + f.write(f"config_date: {today}\n") + f.write(f"run_length: {run_length}\n") + + # Update the modifications file + mods_path = os.path.join(job_path, "config", "config_mods") + if modifications: + with open(mods_path, "w") as f: + f.write(modifications) + elif os.path.exists(mods_path): + os.remove(mods_path) + + # Regenerate the namelists + new_job_script = os.path.join(ctoaster_root, "tools", "new-job.py") + cmd = [ + sys.executable, + new_job_script, + "--gui", + "-b", + base_config, + "-u", + user_config, + "-j", + ctoaster_jobs, + job_name, + str(run_length), + ] + if modifications: + cmd.extend(["-m", mods_path]) + if restart: + cmd.extend(["--restart", restart]) + + try: + res = sp.check_output(cmd, stderr=sp.STDOUT, text=True).strip() + except sp.CalledProcessError as e: + res = f"ERR:Failed to run new-job script with error {e.output}" + raise ValueError(res) + except Exception as e: + res = f"ERR:Unexpected error {e}" + raise ValueError(res) + + if not res.startswith("OK"): + raise ValueError(res[4:]) + + return {"message": "Setup updated successfully"} + except Exception as e: + logger.error(f"Error updating setup details: {str(e)}") + return {"error": str(e)} + + +## Utility function used in run_job +def read_status_file(job_dir): + """ + Attempts to read the status file for a job, handling potential issues on Windows where + the file might be locked by another process. + + :param job_dir: The job directory containing the 'status' file. + :return: A list containing the status information or None if the file could not be read. + """ + status = None + safety = 0 + while not status and safety < 1000: + try: + if safety != 0: + time.sleep(0.001) + safety += 1 + with open(os.path.join(job_dir, "status")) as fp: + status = fp.readline().strip().split() + except IOError: + pass # You may log the error here if needed + if safety == 1000: + print("Failed to read the status file after multiple attempts.") + return status + + +@app.post("/run-job") +async def run_job(): + global selected_job_name + try: + if not selected_job_name: + raise HTTPException(status_code=400, detail="No job selected") + + if ctoaster_jobs is None: + raise ValueError("ctoaster_jobs is not defined") + + job_path = os.path.join(ctoaster_jobs, selected_job_name) + + if not os.path.isdir(job_path): + raise HTTPException(status_code=404, detail="Job not found") + + # Check if the job is in a runnable state + status = "UNCONFIGURED" + if os.path.exists(os.path.join(job_path, "data_genie")): + status = "RUNNABLE" + if os.path.exists(os.path.join(job_path, "status")): + # Use the read_status_file function to read the status + status_parts = read_status_file(job_path) + if status_parts: + status = status_parts[0] # The first element is the status + else: + status = "ERROR" + + if status not in ["RUNNABLE", "PAUSED"]: + raise HTTPException( + status_code=400, + detail=f"Job '{selected_job_name}' is not configured or runnable.", + ) + + # Correct path to check for the executable + exe = os.path.join( + ctoaster_jobs, + "MODELS", + ctoaster_version, # Replace with actual version variable or string + sys.platform.upper(), # Dynamically get platform information + "ship", + "carrotcake.exe", + ) + + # Check if executable exists + if not os.path.exists(exe): + raise HTTPException( + status_code=500, detail=f"Executable not found at {exe}" + ) + + # Copy the executable to the job directory + runexe = os.path.join(job_path, "carrotcake-ship.exe") + if os.path.exists(runexe): + os.remove(runexe) + shutil.copy(exe, runexe) + + # Handle resuming a paused job + command_file_path = os.path.join(job_path, "command") + if os.path.exists(command_file_path): + os.remove(command_file_path) + + if status == "PAUSED": + status_parts = read_status_file(job_path) + if status_parts and len(status_parts) >= 4: + _, koverall, _, genie_clock = status_parts[:4] + # Write the GUI_RESTART command to the command file + with open(command_file_path, "w") as command_file: + command_file.write(f"GUI_RESTART {koverall} {genie_clock}\n") + else: + raise HTTPException( + status_code=500, + detail="Status file does not contain the required parameters to resume the job.", + ) + + # Start executable and direct stdout and stderr to run.log in job directory + log_file_path = os.path.join(job_path, "run.log") + with open(log_file_path, "a") as log_file: + process = sp.Popen( + [runexe], cwd=job_path, stdout=log_file, stderr=sp.STDOUT + ) + + return {"message": f"Job '{selected_job_name}' is now running"} + except FileNotFoundError as fnfe: + error_message = f"File not found error: {str(fnfe)}" + logger.error(error_message) + raise HTTPException(status_code=500, detail=error_message) + except Exception as e: + error_message = f"Unexpected error running job '{selected_job_name}': {str(e)}" + logger.error(error_message) + raise HTTPException(status_code=500, detail=error_message) + + +@app.post("/pause-job") +async def pause_job(): + global selected_job_name + try: + if not selected_job_name: + raise HTTPException(status_code=400, detail="No job selected") + + if ctoaster_jobs is None: + raise ValueError("ctoaster_jobs is not defined") + + job_path = os.path.join(ctoaster_jobs, selected_job_name) + + if not os.path.isdir(job_path): + raise HTTPException(status_code=404, detail="Job not found") + + # Check if the job is currently running or paused + status_file_path = os.path.join(job_path, "status") + if not os.path.exists(status_file_path): + raise HTTPException(status_code=400, detail="Job status file not found") + + with open(status_file_path, "r") as status_file: + status_line = status_file.readline().strip() + if "PAUSED" in status_line: + raise HTTPException(status_code=400, detail="Job is already paused") + + # Write the PAUSE command to the command file + command_file_path = os.path.join(job_path, "command") + with open(command_file_path, "w") as command_file: + command_file.write("PAUSE\n") + + return {"message": f"Job '{selected_job_name}' has been paused"} + except Exception as e: + error_message = f"Unexpected error pausing job '{selected_job_name}': {str(e)}" + logger.error(error_message) + raise HTTPException(status_code=500, detail=error_message) + + +@app.get("/get-log/{job_name}") +async def get_log(job_name: str): + if not job_name: + raise HTTPException(status_code=400, detail="No job specified") + + if ctoaster_jobs is None: + raise ValueError("ctoaster_jobs is not defined") + + job_path = os.path.join(ctoaster_jobs, job_name) + log_file_path = os.path.join(job_path, "run.log") + + if not os.path.exists(log_file_path): + logger.info(f"Log file not found at: {log_file_path}") + # Return empty content instead of raising 404 + return {"content": ""} + + # Read the entire log file content + with open(log_file_path, "r") as log_file: + content = log_file.read() + + return {"content": content} + + +# SSE endpoint to stream job output +@app.get("/stream-output/{job_name}") +async def stream_output(job_name: str, background_tasks: BackgroundTasks): + """ + Stream the output of the specified job using Server-Sent Events (SSE). + """ + if not job_name: + raise HTTPException(status_code=400, detail="No job specified") + + if ctoaster_jobs is None: + raise ValueError("ctoaster_jobs is not defined") + + job_path = os.path.join(ctoaster_jobs, job_name) + log_file_path = os.path.join(job_path, "run.log") + + # Wait for the log file to be created (retry mechanism) + max_retries = 30 # Maximum number of retries + retry_interval = 1 # Time in seconds between retries + retry_count = 0 + + while not os.path.exists(log_file_path) and retry_count < max_retries: + logger.info(f"Waiting for log file to be created at: {log_file_path}") + await asyncio.sleep(retry_interval) + retry_count += 1 + + # If the log file is still not found, raise a 404 error + if not os.path.exists(log_file_path): + logger.error(f"Log file not found at: {log_file_path}") + raise HTTPException( + status_code=404, detail=f"Log file not found at: {log_file_path}" + ) + + # Function to read the log file line by line + async def log_file_reader(): + with open(log_file_path, "r") as log_file: + log_file.seek(0, os.SEEK_END) # Start at the end of the file + while True: + line = log_file.readline() + if line: + yield f"data: {line.strip()}\n\n" + else: + await asyncio.sleep(1) # Wait for new data + + # Start streaming the log file to the client + return StreamingResponse(log_file_reader(), media_type="text/event-stream") +# Namelist Apis + + +@app.get("/jobs/{job_id}/namelists") +def get_namelists(job_id: str): + if ctoaster_jobs is None: + raise ValueError("ctoaster_jobs is not defined") + + job_dir = os.path.join(ctoaster_jobs, job_id) + + if not os.path.isdir(job_dir): + raise HTTPException(status_code=404, detail="Job not found") + + # List files in job_dir that start with 'data_' and are files + namelists = [] + for filename in os.listdir(job_dir): + file_path = os.path.join(job_dir, filename) + if filename.startswith("data_") and os.path.isfile(file_path): + namelist_name = filename[len("data_") :] # Remove 'data_' prefix + namelists.append(namelist_name) + + return {"namelists": namelists} + + +@app.get("/jobs/{job_id}/namelists/{namelist_name}") +def get_namelist_content(job_id: str, namelist_name: str): + if ctoaster_jobs is None: + raise ValueError("ctoaster_jobs is not defined") + + job_dir = os.path.join(ctoaster_jobs, job_id) + + if not os.path.isdir(job_dir): + raise HTTPException(status_code=404, detail="Job not found") + + # Sanitize namelist_name to prevent directory traversal + safe_namelist_name = os.path.basename(namelist_name) + + # Construct the filename by adding 'data_' prefix + namelist_filename = f"data_{safe_namelist_name}" + namelist_file_path = os.path.join(job_dir, namelist_filename) + + if not os.path.isfile(namelist_file_path): + raise HTTPException(status_code=404, detail="Namelist not found") + + try: + with open(namelist_file_path, "r") as file: + content = file.read() + except Exception as e: + raise HTTPException( + status_code=500, detail=f"Error reading namelist file: {str(e)}" + ) + + return {"namelist_name": safe_namelist_name, "content": content} + + +@app.get("/get_data_files_list/{job_name}") +async def get_data_files_list(job_name: str): + if not job_name: + raise HTTPException(status_code=400, detail="No job specified") + + if ctoaster_jobs is None: + raise ValueError("ctoaster_jobs is not defined") + + job_path = os.path.join(ctoaster_jobs, job_name) + + if not os.path.isdir(job_path): + raise HTTPException(status_code=404, detail="Job not found") + + # Search for 'output/biogem' folder specifically + plot_data_path = None + for root, dirs, files in os.walk(job_path): + if "output/biogem" in root: + plot_data_path = root + break + + if not plot_data_path: + raise HTTPException( + status_code=404, detail="Output/biogem path not found" + ) + + print(":: Resolved plot_data_path ::", plot_data_path) + + try: + # Log all files in the directory for debugging purposes + all_files = os.listdir(plot_data_path) + print(":: All files in resolved path ::", all_files) + + # Match files starting with 'biogem_series' + data_file_name_prefix = "biogem_series" + data_file_list = [f for f in all_files if f.startswith(data_file_name_prefix)] + + print(":: List of matching files ::", data_file_list) + + if not data_file_list: + raise HTTPException( + status_code=404, + detail=f"No files found with prefix '{data_file_name_prefix}' in {plot_data_path}", + ) + + return data_file_list + + except Exception as e: + raise HTTPException( + status_code=500, detail=f"Error fetching data files: {str(e)}" + ) + +@app.get("/get-variables/{job_name}/{data_file_name}") +async def get_variables(job_name: str, data_file_name: str): + if not job_name or not data_file_name: + raise HTTPException(status_code=400, detail="Job name or data file name is missing") + + if ctoaster_jobs is None: + raise ValueError("ctoaster_jobs is not defined") + + # Construct the job path + job_path = os.path.join(ctoaster_jobs, job_name) + + if not os.path.isdir(job_path): + raise HTTPException(status_code=404, detail="Job not found") + + # Search for 'output/biogem' folder specifically + plot_data_path = None + for root, dirs, files in os.walk(job_path): + if "output/biogem" in root: + plot_data_path = root + break + + if not plot_data_path: + raise HTTPException( + status_code=404, detail="Output/biogem path not found" + ) + + # Construct the full path to the data file + data_file_path = os.path.join(plot_data_path, data_file_name) + + # Check if the data file exists + if not os.path.isfile(data_file_path): + raise HTTPException(status_code=404, detail="Data file not found") + + # Read the file and extract variables from the header line + try: + with open(data_file_path, 'r') as file: + header_line = file.readline().strip() + # Extract variables based on everything after '/' in the header + variables = [var.strip() for var in header_line.split('/')[1:]] # Skip the first column + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error reading the data file: {str(e)}") + + if not variables: + raise HTTPException(status_code=404, detail="No variables found in the data file") + + # Return the list of variables directly + return variables + + +from pydantic import BaseModel + +# Request body model for the POST API +class PlotDataRequest(BaseModel): + job_name: str + data_file_name: str + variable: str + +@app.post("/get-plot-data") +async def get_plot_data(request: PlotDataRequest): + job_name = request.job_name + data_file_name = request.data_file_name + variable = request.variable + + if not job_name or not data_file_name or not variable: + raise HTTPException(status_code=400, detail="Job name, data file, or variable is missing") + + if ctoaster_jobs is None: + raise ValueError("ctoaster_jobs is not defined") + + # Construct the job path + job_path = os.path.join(ctoaster_jobs, job_name) + + if not os.path.isdir(job_path): + raise HTTPException(status_code=404, detail="Job not found") + + # Search for 'output/biogem' folder specifically + plot_data_path = None + for root, dirs, files in os.walk(job_path): + if "output/biogem" in root: + plot_data_path = root + break + + if not plot_data_path: + raise HTTPException( + status_code=404, detail="Output/biogem path not found" + ) + + # Construct the full path to the data file + data_file_path = os.path.join(plot_data_path, data_file_name) + + # Check if the data file exists + if not os.path.isfile(data_file_path): + raise HTTPException(status_code=404, detail="Data file not found") + + # Read the file and extract data for the selected variable + try: + with open(data_file_path, 'r') as file: + header_line = file.readline().strip() + columns = header_line.split('/') # Split by '/' to match the column names + columns = [col.strip() for col in columns] + + # Get the first column name and the index of the selected variable + first_column_name = columns[0] # Use the original first column name + if variable not in columns: + raise HTTPException(status_code=404, detail="Variable not found in the data file") + + variable_index = columns.index(variable) + + # Read the data lines and extract the values for the first column and selected variable + data = [] + for line in file: + parts = line.strip().split() + if len(parts) > variable_index: + try: + first_column_value = float(parts[0]) # Value of the first column + data_value = float(parts[variable_index]) + data.append([first_column_value, data_value]) # Store as [first_column_value, data_value] pair + except ValueError: + continue # Skip lines with invalid data + + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error reading the data file: {str(e)}") + + if not data: + raise HTTPException(status_code=404, detail="No data found for the selected variable") + + # Return the original column names and data + return { + "columns": [first_column_name, variable], + "data": data + } + +from typing import Generator +from fastapi.responses import StreamingResponse + +# Configure logging +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + +def trim_variable(variable: str) -> str: + """Trim and normalize the variable by removing leading/trailing spaces.""" + return variable.strip() + +async def read_data_file(file_path: str, variable: str) -> Generator[str, None, None]: + """Generator function to yield existing and new data as it is written to the file.""" + try: + with open(file_path, 'r') as file: + # Read header and split by '/' + header_line = file.readline().strip() + columns = header_line.split('/') # Adjust delimiter if needed + trimmed_columns = [trim_variable(col) for col in columns] + trimmed_variable = trim_variable(variable) + + if trimmed_variable not in trimmed_columns: + logger.error(f"Variable '{trimmed_variable}' not found in columns: {trimmed_columns}") + raise HTTPException(status_code=404, detail=f"Variable '{variable}' not found in the file") + + variable_index = trimmed_columns.index(trimmed_variable) + + # Step 1: Stream all existing data in the file + while True: + line = file.readline() + if not line: + break # Stop when we reach the end of existing data + + parts = line.strip().split('/') + if len(parts) > variable_index: + try: + first_column_value = float(parts[0].strip()) + data_value = float(parts[variable_index].strip()) + yield f"data: {first_column_value},{data_value}\n\n" + except ValueError: + continue # Skip lines with invalid data + + # Step 2: Tail the file for new data + file.seek(0, os.SEEK_END) # Move to the end of the file to start tailing + while True: + line = file.readline() + if not line: + await asyncio.sleep(0.5) # Sleep briefly to wait for new data + continue + + parts = line.strip().split('/') + if len(parts) > variable_index: + try: + first_column_value = float(parts[0].strip()) + data_value = float(parts[variable_index].strip()) + yield f"data: {first_column_value},{data_value}\n\n" + except ValueError: + continue # Skip lines with invalid data + except HTTPException as http_exc: + raise http_exc + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error reading the data file: {str(e)}") + +@app.get("/get-plot-data-stream") +async def get_plot_data_stream( + job_name: str = Query(...), + data_file_name: str = Query(...), + variable: str = Query(...) +): + """GET API to stream data for plotting in real-time.""" + job_path = os.path.join(ctoaster_jobs, job_name) + if not os.path.isdir(job_path): + raise HTTPException(status_code=404, detail="Job not found") + + plot_data_path = None + for root, dirs, files in os.walk(job_path): + if "output/biogem" in root: + plot_data_path = root + break + + if not plot_data_path: + raise HTTPException(status_code=404, detail="Output/biogem path not found") + + data_file_path = os.path.join(plot_data_path, data_file_name) + if not os.path.isfile(data_file_path): + raise HTTPException(status_code=404, detail="Data file not found") + + # Return streaming response for real-time data + return StreamingResponse(read_data_file(data_file_path, variable), media_type="text/event-stream") + diff --git a/tools/config_utils.py b/tools/config_utils.py index f85e3f0f..903783c8 100755 --- a/tools/config_utils.py +++ b/tools/config_utils.py @@ -1,40 +1,52 @@ -import json, csv -import os, sys, shutil, glob +import csv +import glob +import json +import os import re +import shutil +import sys import utils as U # Regex for matching floating point values. -fp_re = '[+-]?(\d+(\.\d*)?|\.\d+)([eE][+-]?\d+)?' +fp_re = r"[+-]?(\d+(\.\d*)?|\.\d+)([eE][+-]?\d+)?" + # Read and parse a cTOASTER configuration file. def read_config(f, msg): # Clean string quotes and comments from parameter value. def clean(s): - if (s[0] == '"'): return s[1:].partition('"')[0] - elif (s[0] == "'"): return s[1:].partition("'")[0] - else: return s.partition('#')[0].strip() + if s[0] == '"': + return s[1:].partition('"')[0] + elif s[0] == "'": + return s[1:].partition("'")[0] + else: + return s.partition("#")[0].strip() + try: res = {} with open(f) as fp: for line in fp: - if re.match('^\s*#', line): continue - m = re.search('([a-zA-Z0-9_]+)=(.*)', line) - if m: res[m.group(1)] = clean(m.group(2).strip()) + if re.match(r"^\s*#", line): + continue + m = re.search("([a-zA-Z0-9_]+)=(.*)", line) + if m: + res[m.group(1)] = clean(m.group(2).strip()) return res except FileNotFoundError: - sys.exit(f'{msg} not found: {f}') # Modernized message using f-string + sys.exit(f"{msg} not found: {f}") # Modernized message using f-string except Exception as e: raise e # Merge module flags from base and user configurations. + def merge_flags(dicts): res = {} for d in dicts: for k, v in d.items(): - res[k] = 1 if v.lower() == '.true.' else 0 + res[k] = 1 if v.lower() == ".true." else 0 return res @@ -44,37 +56,50 @@ def merge_flags(dicts): srcdir = None datadir = None + def set_dirs(src, data): global srcdir, datadir srcdir = src datadir = data -module_info = { } -flagname_to_mod = { } + +module_info = {} +flagname_to_mod = {} + def load_module_info(): try: - with open(os.path.join(srcdir, 'module-info.csv')) as fp: + with open(os.path.join(U.ctoaster_root, "src/module-info.csv")) as fp: reader = csv.reader(fp, skipinitialspace=True) for row in reader: - if row[0].startswith('#'): + if row[0].startswith("#"): continue # Correctly skip comment lines - flag = f'ma_flag_{row[1]}' if row[1] != 'NONE' else row[1] - module_info[row[0]] = {'flag_name': flag, 'prefix': row[2], - 'nml_file': row[3], 'nml_name': row[4]} + flag = f"ma_flag_{row[1]}" if row[1] != "NONE" else row[1] + module_info[row[0]] = { + "flag_name": flag, + "prefix": row[2], + "nml_file": row[3], + "nml_name": row[4], + } flagname_to_mod[flag] = row[0] except Exception as e: # Catch and handle exceptions more specifically - error_msg = "Couldn't open module info file " + os.path.join(srcdir, 'module-info.csv') + error_msg = "Couldn't open module info file " + os.path.join( + srcdir, "module-info.csv" + ) if not srcdir: error_msg = "Internal error: source directory not set!" sys.exit(error_msg) + def module_from_flagname(flagname): - if not flagname_to_mod: load_module_info() + if not flagname_to_mod: + load_module_info() return flagname_to_mod[flagname] + def lookup_module(modname): - if not module_info: load_module_info() + if not module_info: + load_module_info() return module_info[modname] @@ -85,34 +110,35 @@ def extract_defines(maps): res = {} for m in maps: for k, v in m.items(): - if v.startswith('$(DEFINE)'): - ckv = v[9:].split('=') + if v.startswith("$(DEFINE)"): + ckv = v[9:].split("=") res[ckv[0]] = int(ckv[1]) return res def make_coordinates(defs): - res = { } + res = {} for k, v in defs.items(): - res['ma_dim_' + k] = v + res["ma_dim_" + k] = v return res def timestepping_options(runlen, coords, t100, quiet=False): - lons = coords['GOLDSTEINNLONS'] - lats = coords['GOLDSTEINNLATS'] - levs = coords['GOLDSTEINNLEVS'] + lons = coords["GOLDSTEINNLONS"] + lats = coords["GOLDSTEINNLATS"] + levs = coords["GOLDSTEINNLEVS"] # Define relative biogeochem time-stepping. dbiotbl = [ - # t96 t100 - # lons levs ntstp dbiostp ntstp dbiostp - [ 36, 16, 96, 2, 100, 2 ], - [ 36, 8, 96, 4, 100, 5 ], - [ 18, 16, 48, 1, 50, 2 ], - [ 18, 8, 48, 2, 50, 5 ], - [ 36, 32, 96, 1, 100, 1 ] ] + # t96 t100 + # lons levs ntstp dbiostp ntstp dbiostp + [36, 16, 96, 2, 100, 2], + [36, 8, 96, 4, 100, 5], + [18, 16, 48, 1, 50, 2], + [18, 8, 48, 2, 50, 5], + [36, 32, 96, 1, 100, 1], + ] # ANY-OTHER 96 1 100 1 nsteps = 100 if t100 else 96 dbio = 1 @@ -122,26 +148,38 @@ def timestepping_options(runlen, coords, t100, quiet=False): dbio = chk[5] if t100 else chk[3] if not quiet: if not quiet: - print(f"Setting time-stepping [GOLDSTEIN, BIOGEM:GOLDSTEIN]: {nsteps} {dbio}") + print( + f"Setting time-stepping [GOLDSTEIN, BIOGEM:GOLDSTEIN]: {nsteps} {dbio}" + ) # Define primary model time step. dstp = 3600.0 * 24.0 * 365.25 / 5.0 / nsteps - res = { } + res = {} # Primary model time step. - res['ma_genie_timestep'] = dstp + res["ma_genie_timestep"] = dstp # Relative time-stepping. - res.update({k: 5 for k in ['ma_ksic_loop', 'ma_kocn_loop', 'ma_klnd_loop']}) - res.update({k: dbio for k in ['ma_conv_kocn_katchem', 'ma_conv_kocn_kbiogem', 'ma_conv_kocn_krokgem']}) - res.update({k: nsteps for k in ['ma_conv_kocn_ksedgem', 'ma_kgemlite']}) - + res.update({k: 5 for k in ["ma_ksic_loop", "ma_kocn_loop", "ma_klnd_loop"]}) + res.update( + { + k: dbio + for k in [ + "ma_conv_kocn_katchem", + "ma_conv_kocn_kbiogem", + "ma_conv_kocn_krokgem", + ] + } + ) + res.update({k: nsteps for k in ["ma_conv_kocn_ksedgem", "ma_kgemlite"]}) # BIOGEM run length and SEDGEM sediment age. - for k in ['bg_par_misc_t_runtime', 'sg_par_misc_t_runtime']: res[k] = runlen + for k in ["bg_par_misc_t_runtime", "sg_par_misc_t_runtime"]: + res[k] = runlen # Overall cTOASTER run length. - for k in ['ma_koverall_total', 'ma_dt_write']: res[k] = runlen * 5 * nsteps + for k in ["ma_koverall_total", "ma_dt_write"]: + res[k] = runlen * 5 * nsteps # npstp: 'Health check' frequency (*) # iwstp: Climate model component restart frequency. @@ -150,28 +188,28 @@ def timestepping_options(runlen, coords, t100, quiet=False): # nyear: Climate components time-steps per year. # (*) A '+1' in effect disables this feature ###===> WHAT DOES "a '+1' in effect disables this feature" MEAN? - ps = ['ea', 'go', 'gs', 'ents'] + ps = ["ea", "go", "gs", "ents"] res.update({f"{p}_npstp": runlen * nsteps for p in ps}) res.update({f"{p}_iwstp": runlen * nsteps for p in ps}) res.update({f"{p}_itstp": runlen * nsteps + 1 for p in ps}) res.update({f"{p}_ianav": runlen * nsteps + 1 for p in ps}) - res.update({k: nsteps for k in ['ea_nyear', 'go_nyear', 'gs_nyear']}) + res.update({k: nsteps for k in ["ea_nyear", "go_nyear", "gs_nyear"]}) return res def restart_options(restart): - res = { } + res = {} # Set climate model re-start file details. # Set default flags. # Set NetCDF restart saving flag. - res.update({k: 'n' for k in ['ea_netout', 'go_netout', 'gs_netout']}) + res.update({k: "n" for k in ["ea_netout", "go_netout", "gs_netout"]}) # Set ASCII restart output flag. - res.update({k: 'y' for k in ['ea_ascout', 'go_ascout', 'gs_ascout']}) + res.update({k: "y" for k in ["ea_ascout", "go_ascout", "gs_ascout"]}) # Set ASCII restart number (i.e., output file string). - res.update({k: 'rst' for k in ['ea_lout', 'go_lout', 'gs_lout', 'ents_out_name']}) - res['ents_restart_file'] = 'rst.sland' + res.update({k: "rst" for k in ["ea_lout", "go_lout", "gs_lout", "ents_out_name"]}) + res["ents_restart_file"] = "rst.sland" # Configure use of restart. # ----------------------------- @@ -181,85 +219,93 @@ def restart_options(restart): # => set restart input number # => copy restart files to data directory if restart: - res.update({p + '_ans': 'c' for p in ['ea', 'go', 'gs', 'ents']}) - res.update({p + '_netin': 'n' for p in ['ea', 'go', 'gs', 'ents']}) - res.update({p + '_ctrl_continuing': '.TRUE.' for p in ['ac', 'bg', 'sg', 'rg']}) - res.update({k: 'rst.1' for k in ['ea_lin', 'go_lin', 'gs_lin']}) - res.update({ - 'ea_rstdir_name': 'restart/embm', - 'go_rstdir_name': 'restart/goldstein', - 'gs_rstdir_name': 'restart/goldsteinseaice', - 'ents_outdir_name': 'output/ents', - 'ents_dirnetout': 'restart/ents', - 'ents_rstdir_name': 'restart/ents', - 'ac_par_rstdir_name': 'restart/atchem', - 'bg_par_rstdir_name': 'restart/biogem', - 'sg_par_rstdir_name': 'restart/sedgem', - 'rg_par_rstdir_name': 'restart/rokgem', - }) + res.update({p + "_ans": "c" for p in ["ea", "go", "gs", "ents"]}) + res.update({p + "_netin": "n" for p in ["ea", "go", "gs", "ents"]}) + res.update({p + "_ctrl_continuing": ".TRUE." for p in ["ac", "bg", "sg", "rg"]}) + res.update({k: "rst.1" for k in ["ea_lin", "go_lin", "gs_lin"]}) + res.update( + { + "ea_rstdir_name": "restart/embm", + "go_rstdir_name": "restart/goldstein", + "gs_rstdir_name": "restart/goldsteinseaice", + "ents_outdir_name": "output/ents", + "ents_dirnetout": "restart/ents", + "ents_rstdir_name": "restart/ents", + "ac_par_rstdir_name": "restart/atchem", + "bg_par_rstdir_name": "restart/biogem", + "sg_par_rstdir_name": "restart/sedgem", + "rg_par_rstdir_name": "restart/rokgem", + } + ) else: - res.update({p + '_ans': 'n' for p in ['ea', 'go', 'gs', 'ents']}) - res.update({p + '_ctrl_continuing': '.FALSE.' for p in ['ac', 'bg', 'sg', 'rg']}) - + res.update({p + "_ans": "n" for p in ["ea", "go", "gs", "ents"]}) + res.update( + {p + "_ctrl_continuing": ".FALSE." for p in ["ac", "bg", "sg", "rg"]} + ) # Set NetCDF format biogeochem restart files. - res.update({p + '_ctrl_ncrst': '.TRUE.' for p in ['ac', 'bg', 'sg']}) + res.update({p + "_ctrl_ncrst": ".TRUE." for p in ["ac", "bg", "sg"]}) # Over-ride defaults. - res['bg_ctrl_force_oldformat'] = '.FALSE.' + res["bg_ctrl_force_oldformat"] = ".FALSE." return res def is_bool(x): - return str(x).lower() in ('.true.', '.false.') + return str(x).lower() in (".true.", ".false.") class Namelist: """Fortran namelists""" + def __init__(self, fp): - self.entries = { } - self.name = '' - mode = 'start' + self.entries = {} + self.name = "" + mode = "start" for line in fp: line = line.strip() - if mode == 'start': - if line.startswith('&'): + if mode == "start": + if line.startswith("&"): self.name = line[1:].strip() - mode = 'main' + mode = "main" else: - if line.startswith('&'): mode = 'done' + if line.startswith("&"): + mode = "done" else: - if line.endswith(','): line = line[:-1] - kv = line.split('=') - self.entries[kv[0]] = kv[1].strip('"\'') + if line.endswith(","): + line = line[:-1] + kv = line.split("=") + self.entries[kv[0]] = kv[1].strip("\"'") def formatValue(self, v): - if v.lower() == '.true.': - return '.TRUE.' - if v.lower() == '.false.': - return '.FALSE.' - if re.match('^' + fp_re + '$', v): + if v.lower() == ".true.": + return ".TRUE." + if v.lower() == ".false.": + return ".FALSE." + if re.match("^" + fp_re + "$", v): return v return f'"{v}"' def write(self, fp): - print(f'&{self.name}', file=fp) + print(f"&{self.name}", file=fp) for k in sorted(self.entries): v = self.entries[k] - print(f' {k}={self.formatValue(str(v))},', file=fp) - print('&END', file=fp) + print(f" {k}={self.formatValue(str(v))},", file=fp) + print("&END", file=fp) def merge(self, prefix, maps): """Merge configuration data into default namelist. Deals with - stripping model-dependent prefix and parameter arrays.""" + stripping model-dependent prefix and parameter arrays.""" plen = len(prefix) for m in maps: for k in m.keys(): - if k[0:plen] != prefix: continue - rk = k[plen+1:] - s = re.search('_(\d+)$', rk) - if s: rk = rk.rstrip('_0123456789') + '(' + s.group(1) + ')' - if (rk in self.entries): + if k[0:plen] != prefix: + continue + rk = k[plen + 1 :] + s = re.search(r"_(\d+)$", rk) + if s: + rk = rk.rstrip("_0123456789") + "(" + s.group(1) + ")" + if rk in self.entries: current = self.entries[rk] new = m[k] # Make sure that boolean values from default @@ -297,63 +343,81 @@ def merge(self, prefix, maps): # pick up some unused files, but we should get everything that we # need. + def copy_data_files(m, nml, outdir, extras): # Extract and filter parameter values. def check_data_item(s): - if not isinstance(s, str): return False - if s.lower() in ['.true.', '.false.', 'n', 'y', m]: return False - if re.match(fp_re, s): return False - if s == 'input/' + m: return False - for t in ['output/', 'restart/', '/']: - if s.startswith(t): return False + if not isinstance(s, str): + return False + if s.lower() in [".true.", ".false.", "n", "y", m]: + return False + if re.match(fp_re, s): + return False + if s == "input/" + m: + return False + for t in ["output/", "restart/", "/"]: + if s.startswith(t): + return False return True - cands = [os.path.basename(f) - for f in nml.entries.values() if check_data_item(f)] + + cands = [os.path.basename(f) for f in nml.entries.values() if check_data_item(f)] # Add per-module 'specials'. - if extras: cands += extras + if extras: + cands += extras # Look for exact file matches in module data directory. - checkdir = os.path.join(U.ctoaster_root, 'data', m) + checkdir = os.path.join(U.ctoaster_root, "data", m) + def exact(f): try: shutil.copy(os.path.join(checkdir, f), outdir) return True - except: return False + except: + return False + cands = [f for f in cands if not exact(f)] # Look for exact file matches in forcings directory. - checkdir = os.path.join(U.ctoaster_data, 'forcings') + checkdir = os.path.join(U.ctoaster_data, "forcings") + def forcing(f): try: shutil.copytree(os.path.join(checkdir, f), os.path.join(outdir, f)) return True - except: return False + except: + return False + cands = [f for f in cands if not forcing(f)] ###print(cands) ###for f in cands: print(f+ ' = ' + str(forcing(f))) ###for f in cands: print(forcing(f)) # Look for partial matches. - checkdir = os.path.join(U.ctoaster_root, 'data', m) + checkdir = os.path.join(U.ctoaster_root, "data", m) + def partial(f): ret = False try: - for match in glob.iglob(os.path.join(checkdir, '*' + f + '*')): + for match in glob.iglob(os.path.join(checkdir, "*" + f + "*")): shutil.copy(match, outdir) ret = True return ret - except: return ret + except: + return ret + cands = [f for f in cands if not partial(f)] # Copy restart files: if restarting from an old ctoaster job, assume # that the job is in ~/ctoaster_output. + def copy_restart_files(m, nml, outdir, restart_path): indir = os.path.join(restart_path, m) - fs = glob.glob(os.path.join(indir, '*rst*')) - fs += glob.glob(os.path.join(indir, '*restart*')) - if os.path.exists(os.path.join(indir, 'sedcore.nc')): - fs += [os.path.join(indir, 'sedcore.nc')] - for f in fs: shutil.copy(f, outdir) + fs = glob.glob(os.path.join(indir, "*rst*")) + fs += glob.glob(os.path.join(indir, "*restart*")) + if os.path.exists(os.path.join(indir, "sedcore.nc")): + fs += [os.path.join(indir, "sedcore.nc")] + for f in fs: + shutil.copy(f, outdir) diff --git a/tools/ctoaster-gui.py b/tools/ctoaster-gui.py index ab8ca127..a1cd1346 100755 --- a/tools/ctoaster-gui.py +++ b/tools/ctoaster-gui.py @@ -294,7 +294,7 @@ def clear_job(self): def run_job(self): """Run a job (button press callback)""" - # Check for existence of cupcake-ship.exe executable and build + # Check for existence of carrotcake-ship.exe executable and build # if necessary. exe = os.path.join( U.ctoaster_jobs, @@ -302,9 +302,9 @@ def run_job(self): U.ctoaster_version, platform, "ship", - "cupcake.exe", + "carrotcake.exe", ) - runexe = os.path.join(self.job.jobdir, "cupcake-ship.exe") + runexe = os.path.join(self.job.jobdir, "carrotcake-ship.exe") if not os.path.exists(exe): d = BuildExecutableDialog(self, self.job.jobdir) if not d.result: diff --git a/tools/go.py b/tools/go.py index 943e06ee..f690856a 100755 --- a/tools/go.py +++ b/tools/go.py @@ -295,7 +295,7 @@ def console_manage(cmd, logfp, cont, *rest): model_config = U.ModelConfig(build_type) model_dir = model_config.directory() -exe_name = 'cupcake-' + build_type + '.exe' if build_type else 'cupcake.exe' +exe_name = 'carrotcake-' + build_type + '.exe' if build_type else 'carrotcake.exe' # Clean up output directories for this job and (optionally) build @@ -306,8 +306,8 @@ def clean(clean_model): message(f'{clean_msg}...') if clean_model: model_config.clean() # calls method 'clean' for class ModelConfig [utils.py] - for exe in glob.iglob('cupcake-*.exe'): - os.remove(exe) # finds and removes 'cupcake-*.exe' files + for exe in glob.iglob('carrotcake-*.exe'): + os.remove(exe) # finds and removes 'carrotcake-*.exe' files if os.path.exists('build.log'): os.remove('build.log') # removes 'build.log' if it exists if os.path.exists('run.log'): @@ -345,7 +345,7 @@ def build(cont): if not need_build: message('Build is up to date') - shutil.copy(os.path.join(model_dir, 'cupcake.exe'), os.path.join(os.curdir, exe_name)) + shutil.copy(os.path.join(model_dir, 'carrotcake.exe'), os.path.join(os.curdir, exe_name)) if cont: cont() return @@ -360,7 +360,7 @@ def build2(result, cont): if result == 0: line('') message('Build OK') - shutil.copy(os.path.join(model_dir, 'cupcake.exe'), + shutil.copy(os.path.join(model_dir, 'carrotcake.exe'), os.path.join(os.curdir, exe_name)) if cont: cont() else: diff --git a/tools/new-job.py b/tools/new-job.py index a5cf5135..f269324a 100755 --- a/tools/new-job.py +++ b/tools/new-job.py @@ -1,40 +1,56 @@ -import os, sys, errno, shutil, datetime import argparse +import datetime +import errno +import os +import shutil import subprocess as sp +import sys -import utils as U import config_utils as C +import utils as U # cTOASTER configuration if not U.read_ctoaster_config(): - sys.exit('cTOASTER not set up: run the setup-ctoaster script!') + sys.exit("cTOASTER not set up: run the setup-ctoaster script!") # Command line arguments. -parser = argparse.ArgumentParser(description='Configure cTOASTER jobs') -parser.add_argument('job_name', nargs='?', help='Job name') -parser.add_argument('run_length', nargs='?', type=int, help='Run length') -parser.add_argument('-O', '--overwrite', action='store_true', help='Overwrite existing job') -parser.add_argument('-b', '--base-config', help='Base configuration name') -parser.add_argument('-u', '--user-config', help='User configuration name') -parser.add_argument('-m', '--config-mods', help='Configuration mods filename') -parser.add_argument('-c', '--config', help='Full configuration name') -parser.add_argument('-r', '--restart', help='Restart name') -parser.add_argument('--old-restart', action='store_true', help='Restart from old ctoaster job') -parser.add_argument('--t100', action='store_true', help='Use "T100" timestepping') -parser.add_argument('-t', '--test-job', help='Set up from test') -parser.add_argument('-j', '--job-dir', help='Alternative job directory', default=U.ctoaster_jobs) -parser.add_argument('-v', '--model-version', help='Model version to use', default=U.ctoaster_version) -parser.add_argument('-g', '--gui', action='store_true', help=argparse.SUPPRESS) +parser = argparse.ArgumentParser(description="Configure cTOASTER jobs") +parser.add_argument("job_name", nargs="?", help="Job name") +parser.add_argument("run_length", nargs="?", type=int, help="Run length") +parser.add_argument( + "-O", "--overwrite", action="store_true", help="Overwrite existing job" +) +parser.add_argument("-b", "--base-config", help="Base configuration name") +parser.add_argument("-u", "--user-config", help="User configuration name") +parser.add_argument("-m", "--config-mods", help="Configuration mods filename") +parser.add_argument("-c", "--config", help="Full configuration name") +parser.add_argument("-r", "--restart", help="Restart name") +parser.add_argument( + "--old-restart", action="store_true", help="Restart from old ctoaster job" +) +parser.add_argument("--t100", action="store_true", help='Use "T100" timestepping') +parser.add_argument("-t", "--test-job", help="Set up from test") +parser.add_argument( + "-j", "--job-dir", help="Alternative job directory", default=U.ctoaster_jobs +) +parser.add_argument( + "-v", "--model-version", help="Model version to use", default=U.ctoaster_version +) +parser.add_argument("-g", "--gui", action="store_true", help=argparse.SUPPRESS) args = parser.parse_args() # Validate the number of positional arguments based on whether test_job is specified -if not args.test_job and not (args.job_name and args.run_length is not None) or \ - args.test_job and (args.job_name or args.run_length is not None): +if ( + not args.test_job + and not (args.job_name and args.run_length is not None) + or args.test_job + and (args.job_name or args.run_length is not None) +): parser.print_help() sys.exit() @@ -59,74 +75,84 @@ sys.exit(f'Model version "{model_version}" does not exist') - def error_exit(msg): if running_from_gui: - sys.exit(f'ERR:{msg}') + sys.exit(f"ERR:{msg}") else: sys.exit(msg) + # If a specific model version is requested, set up a repository clone # on the appropriate branch and run the configuration script at that # version. -repo_version = 'DEVELOPMENT' -if (os.path.exists('repo-version')): - with open('repo-version') as fp: +repo_version = "DEVELOPMENT" +if os.path.exists("repo-version"): + with open("repo-version") as fp: repo_version = fp.readline().strip() if model_version != repo_version: repodir = U.setup_version_repo(model_version) os.chdir(repodir) - os.execv(sys.executable, - [os.path.join(os.curdir, 'tools', 'new-job.py')] + sys.argv) + os.execv( + sys.executable, [os.path.join(os.curdir, "tools", "new-job.py")] + sys.argv + ) # Check configuration file options. base_and_user_config = base_config and user_config if not base_and_user_config and not full_config and not test_job: - error_exit('Either base and user, full configuration or test must be specified') + error_exit("Either base and user, full configuration or test must be specified") if not base_and_user_config and config_mods: - error_exit('Configuration mods can only be specified if using base and user configuration') + error_exit( + "Configuration mods can only be specified if using base and user configuration" + ) nset = 0 -if base_and_user_config: nset += 1 -if full_config: nset += 1 -if test_job: nset += 1 +if base_and_user_config: + nset += 1 +if full_config: + nset += 1 +if test_job: + nset += 1 if nset > 1: - error_exit('Only one of base and user, full configuration, or test may be specified') - + error_exit( + "Only one of base and user, full configuration, or test may be specified" + ) -# +# if test_job: test_dir = os.path.join(U.ctoaster_test, test_job) - with open(os.path.join(test_dir, 'test_info')) as fp: + with open(os.path.join(test_dir, "test_info")) as fp: for line in fp: - k, _, v = line.partition(':') + k, _, v = line.partition(":") k = k.strip() v = v.strip() - if k == 'restart_from': + if k == "restart_from": restart = v - elif k == 'run_length': + elif k == "run_length": run_length = int(v) - elif k == 't100': - t100 = v == 'True' - + elif k == "t100": + t100 = v == "True" # Check for existence of any restart job. if restart: if old_restart: - restart_path = os.path.join(os.path.expanduser('~/ctoaster_output'), restart) + restart_path = os.path.join(os.path.expanduser("~/ctoaster_output"), restart) elif os.path.exists(restart): restart_path = restart else: - restart_path = os.path.join(job_dir_base, restart, 'output') + restart_path = os.path.join(job_dir_base, restart, "output") if not os.path.exists(restart_path): - error_msg = f'Old ctoaster restart job "{restart}" does not exist' if old_restart else f'Restart job "{restart}" does not exist' + error_msg = ( + f'Old ctoaster restart job "{restart}" does not exist' + if old_restart + else f'Restart job "{restart}" does not exist' + ) error_exit(error_msg) # All set up. Off we go... @@ -134,80 +160,87 @@ def error_exit(msg): if not running_from_gui: print(f' Job name: {job_name} {" [TEST]" if test_job else ""}') if base_and_user_config: - print(f'Base config: {base_config}') - print(f'User config: {user_config}') - if config_mods: - print(f'Config mods: {config_mods}') - if full_config: - print(f'Full config: {full_config}') - if not test_job: - print(f' Run length: {run_length}') - print(f' Overwrite: {overwrite}') - print(f' Model: {model_version}') - + print(f"Base config: {base_config}") + print(f"User config: {user_config}") + if config_mods: + print(f"Config mods: {config_mods}") + if full_config: + print(f"Full config: {full_config}") + if not test_job: + print(f" Run length: {run_length}") + print(f" Overwrite: {overwrite}") + print(f" Model: {model_version}") # Read and parse configuration files. -if (base_and_user_config): +if base_and_user_config: if not os.path.exists(base_config): - base_config_dir = os.path.join(U.ctoaster_data, 'base-configs') - base_config_path = os.path.join(base_config_dir, - base_config + '.config') + base_config_dir = os.path.join(U.ctoaster_data, "base-configs") + base_config_path = os.path.join(base_config_dir, base_config + ".config") else: base_config_dir = os.getcwd() base_config_path = base_config - base = C.read_config(base_config_path, 'Base configuration') + base = C.read_config(base_config_path, "Base configuration") if not os.path.exists(user_config): - user_config_dir = os.path.join(U.ctoaster_data, 'user-configs') + user_config_dir = os.path.join(U.ctoaster_data, "user-configs") user_config_path = os.path.join(user_config_dir, user_config) else: user_config_dir = os.getcwd() user_config_path = user_config - user = C.read_config(user_config_path, 'User configuration') + user = C.read_config(user_config_path, "User configuration") configs = [base, user] if config_mods: - mods = C.read_config(config_mods, 'Configuration modifications') + mods = C.read_config(config_mods, "Configuration modifications") configs.append(mods) elif full_config: if not os.path.exists(full_config): - full_config_dir = os.path.join(U.ctoaster_data, 'full-configs') - full_config_path = os.path.join(full_config_dir, - full_config + '.config') + full_config_dir = os.path.join(U.ctoaster_data, "full-configs") + full_config_path = os.path.join(full_config_dir, full_config + ".config") else: full_config_dir = os.getcwd() full_config_path = full_config - full = C.read_config(full_config_path, 'Full configuration') + full = C.read_config(full_config_path, "Full configuration") configs = [full] else: # Test job -- read base_config, user_config and full_config files # as they exist. - if os.path.exists(os.path.join(test_dir, 'full_config')): - full = C.read_config(os.path.join(test_dir, 'full_config'), - 'Full configuration') + if os.path.exists(os.path.join(test_dir, "full_config")): + full = C.read_config( + os.path.join(test_dir, "full_config"), "Full configuration" + ) configs = [full] else: - base = C.read_config(os.path.join(test_dir, 'base_config'), - 'Base configuration') - user = C.read_config(os.path.join(test_dir, 'user_config'), - 'User configuration') + base = C.read_config( + os.path.join(test_dir, "base_config"), "Base configuration" + ) + user = C.read_config( + os.path.join(test_dir, "user_config"), "User configuration" + ) configs = [base, user] # Set up source and per-module input data directories. -srcdir = 'src' -datadir = 'data' +srcdir = os.path.join(U.ctoaster_root, "src") +datadir = "data" C.set_dirs(srcdir, datadir) # Determine modules used in job. + def extract_mod_opts(c): - return [x for x in c.keys() if x.startswith('ma_flag_')] + return [x for x in c.keys() if x.startswith("ma_flag_")] + + mod_opts = map(extract_mod_opts, configs) + + def extract_mod_flags(c, os): - return { k: c[k] for k in os } + return {k: c[k] for k in os} + + mod_flags = map(extract_mod_flags, configs, mod_opts) merged_mod_flags = C.merge_flags(mod_flags) mod_flags = [k for k in merged_mod_flags.keys() if merged_mod_flags[k]] @@ -216,91 +249,107 @@ def extract_mod_flags(c, os): # Set up job directory and per-module sub-directories. + def safe_mkdir(p): - os.makedirs(p, exist_ok=True) # Ensures directory is created without raising an error if it already exists + os.makedirs( + p, exist_ok=True + ) # Ensures directory is created without raising an error if it already exists + job_dir = os.path.join(job_dir_base, job_name) if not running_from_gui: - if overwrite: shutil.rmtree(job_dir, ignore_errors=True) - try: safe_mkdir(job_dir) - except OSError as e: error_exit("Can't create job directory: " + job_dir) + if overwrite: + shutil.rmtree(job_dir, ignore_errors=True) + try: + safe_mkdir(job_dir) + except OSError as e: + error_exit("Can't create job directory: " + job_dir) try: for m in modules: - safe_mkdir(os.path.join(job_dir, 'input', m)) - safe_mkdir(os.path.join(job_dir, 'output', m)) - if restart: safe_mkdir(os.path.join(job_dir, 'restart', m)) - safe_mkdir(os.path.join(job_dir, 'input', 'main')) - safe_mkdir(os.path.join(job_dir, 'output', 'main')) - if restart: safe_mkdir(os.path.join(job_dir, 'restart', 'main')) + safe_mkdir(os.path.join(job_dir, "input", m)) + safe_mkdir(os.path.join(job_dir, "output", m)) + if restart: + safe_mkdir(os.path.join(job_dir, "restart", m)) + safe_mkdir(os.path.join(job_dir, "input", "main")) + safe_mkdir(os.path.join(job_dir, "output", "main")) + if restart: + safe_mkdir(os.path.join(job_dir, "restart", "main")) except Exception as e: - with open('/dev/tty', 'w') as fp: + with open("/dev/tty", "w") as fp: print(e, file=fp) # Write configuration information to job directory. -cfg_dir = os.path.join(job_dir, 'config') +cfg_dir = os.path.join(job_dir, "config") if not running_from_gui: # Check if cfg_dir exists and overwrite flag is True, then remove it if os.path.exists(cfg_dir) and overwrite: shutil.rmtree(cfg_dir) # Now, safely create the cfg_dir as it's either new or has been cleared - os.makedirs(cfg_dir, exist_ok=True) # Use exist_ok to avoid error if the directory was just deleted and recreated + os.makedirs( + cfg_dir, exist_ok=True + ) # Use exist_ok to avoid error if the directory was just deleted and recreated if not test_job: - with open(os.path.join(cfg_dir, 'config'), 'w') as fp: + with open(os.path.join(cfg_dir, "config"), "w") as fp: if base_config: - print(f'base_config_dir: {base_config_dir}', file=fp) - print(f'base_config: {base_config}', file=fp) + print(f"base_config_dir: {base_config_dir}", file=fp) + print(f"base_config: {base_config}", file=fp) if user_config: - print(f'user_config_dir: {user_config_dir}', file=fp) - print(f'user_config: {user_config}', file=fp) + print(f"user_config_dir: {user_config_dir}", file=fp) + print(f"user_config: {user_config}", file=fp) if full_config: - print(f'full_config_dir: {full_config_dir}', file=fp) - print(f'full_config: {full_config}', file=fp) + print(f"full_config_dir: {full_config_dir}", file=fp) + print(f"full_config: {full_config}", file=fp) if config_mods: - print(f'config_mods: {config_mods}', file=fp) - print(f'config_date: {datetime.datetime.today()}', file=fp) - print(f'run_length: {run_length}', file=fp) - print(f't100: {t100}', file=fp) - if restart: - print(f'restart: {restart}', file=fp) + print(f"config_mods: {config_mods}", file=fp) + print(f"config_date: {datetime.datetime.today()}", file=fp) + print(f"run_length: {run_length}", file=fp) + print(f"t100: {t100}", file=fp) + if restart: + print(f"restart: {restart}", file=fp) if test_job: - shutil.copyfile(os.path.join(test_dir, 'test_info'), - os.path.join(cfg_dir, 'config')) - if os.path.exists(os.path.join(test_dir, 'base_config')): - shutil.copyfile(os.path.join(test_dir, 'base_config'), - os.path.join(cfg_dir, 'base_config')) - if os.path.exists(os.path.join(test_dir, 'user_config')): - shutil.copyfile(os.path.join(test_dir, 'user_config'), - os.path.join(cfg_dir, 'user_config')) - if os.path.exists(os.path.join(test_dir, 'full_config')): - shutil.copyfile(os.path.join(test_dir, 'full_config'), - os.path.join(cfg_dir, 'full_config')) + shutil.copyfile( + os.path.join(test_dir, "test_info"), os.path.join(cfg_dir, "config") + ) + if os.path.exists(os.path.join(test_dir, "base_config")): + shutil.copyfile( + os.path.join(test_dir, "base_config"), os.path.join(cfg_dir, "base_config") + ) + if os.path.exists(os.path.join(test_dir, "user_config")): + shutil.copyfile( + os.path.join(test_dir, "user_config"), os.path.join(cfg_dir, "user_config") + ) + if os.path.exists(os.path.join(test_dir, "full_config")): + shutil.copyfile( + os.path.join(test_dir, "full_config"), os.path.join(cfg_dir, "full_config") + ) else: if base_config: - shutil.copyfile(base_config_path, os.path.join(cfg_dir, 'base_config')) + shutil.copyfile(base_config_path, os.path.join(cfg_dir, "base_config")) if user_config: - shutil.copyfile(user_config_path, os.path.join(cfg_dir, 'user_config')) + shutil.copyfile(user_config_path, os.path.join(cfg_dir, "user_config")) if full_config: - shutil.copyfile(full_config_path, os.path.join(cfg_dir, 'full_config')) + shutil.copyfile(full_config_path, os.path.join(cfg_dir, "full_config")) if config_mods and not running_from_gui: - shutil.copyfile(config_mods, os.path.join(cfg_dir, 'config_mods')) + shutil.copyfile(config_mods, os.path.join(cfg_dir, "config_mods")) # Extract coordinate definitions from configuration. defines = C.extract_defines(configs) maxdeflen = max(map(len, defines.keys())) -deflines = [("'" + d + "':").ljust(maxdeflen + 4) + str(defines[d]) - for d in defines.keys()] -deflines[0] = 'coordvars = { ' + deflines[0] +deflines = [ + ("'" + d + "':").ljust(maxdeflen + 4) + str(defines[d]) for d in defines.keys() +] +deflines[0] = "coordvars = { " + deflines[0] for i in range(1, len(deflines)): - deflines[i] = ' ' + deflines[i] -for i in range(len(deflines)-1): - deflines[i] += ',' -deflines[-1] += ' }' + deflines[i] = " " + deflines[i] +for i in range(len(deflines) - 1): + deflines[i] += "," +deflines[-1] += " }" # Set up timestepping and restart options: this is only done if we @@ -308,79 +357,91 @@ def safe_mkdir(p): # configurations already include timestepping options. if len(configs) > 1: - tsopts = C.timestepping_options(run_length, defines, t100=t100, - quiet=running_from_gui) + tsopts = C.timestepping_options( + run_length, defines, t100=t100, quiet=running_from_gui + ) rstopts = C.restart_options(restart) configs = [configs[0], tsopts, rstopts] + configs[1:] # Create model version file for build. -with open(os.path.join(cfg_dir, 'model-version'), 'w') as fp: - if model_version == 'DEVELOPMENT': +with open(os.path.join(cfg_dir, "model-version"), "w") as fp: + if model_version == "DEVELOPMENT": try: - result = sp.run(['git', 'describe', '--tags', 'HEAD'], capture_output=True, text=True, check=True) + result = sp.run( + ["git", "describe", "--tags", "HEAD"], + capture_output=True, + text=True, + check=True, + ) rev = result.stdout.strip() - print(f'DEVELOPMENT:{rev}', file=fp) + print(f"DEVELOPMENT:{rev}", file=fp) except: - print('DEVELOPMENT:UNKNOWN', file=fp) + print("DEVELOPMENT:UNKNOWN", file=fp) else: print(model_version, file=fp) # Create "go" script for job. -shutil.copy(os.path.join(U.ctoaster_root, 'tools', 'go'), job_dir) -shutil.copy(os.path.join(U.ctoaster_root, 'tools', 'go.bat'), job_dir) +shutil.copy(os.path.join(U.ctoaster_root, "tools", "go"), job_dir) +shutil.copy(os.path.join(U.ctoaster_root, "tools", "go.bat"), job_dir) # Set up per-module extra data files (these are files that don't # appear in any configuration information...). -extra_data_files = { } -if 'embm' in modules and 'ents' in modules: - extra_data_files['embm'] = ['inv_linterp_matrix.dat', - 'NCEP_airt_monthly.dat', - 'NCEP_pptn_monthly.dat', - 'NCEP_RH_monthly.dat', - 'atm_albedo_monthly.dat', - 'uvic_windx.silo', - 'uvic_windy.silo', - 'monthly_windspd.silo'] -if 'ents' in modules: - extra_data_files['ents'] = ['ents_config.par', 'sealevel_config.par'] - -if 'sedgem' in modules: - extra_data_files['sedgem'] = ['lookup_calcite_4.dat', 'lookup_opal_5.dat'] +extra_data_files = {} +if "embm" in modules and "ents" in modules: + extra_data_files["embm"] = [ + "inv_linterp_matrix.dat", + "NCEP_airt_monthly.dat", + "NCEP_pptn_monthly.dat", + "NCEP_RH_monthly.dat", + "atm_albedo_monthly.dat", + "uvic_windx.silo", + "uvic_windy.silo", + "monthly_windspd.silo", + ] +if "ents" in modules: + extra_data_files["ents"] = ["ents_config.par", "sealevel_config.par"] + +if "sedgem" in modules: + extra_data_files["sedgem"] = ["lookup_calcite_4.dat", "lookup_opal_5.dat"] # Construct namelists and copy data files. configs.append(C.make_coordinates(defines)) -for m in modules + ['main', 'gem']: +for m in modules + ["main", "gem"]: minfo = C.lookup_module(m) - if minfo['flag_name'] == 'NONE': - nmlin = os.path.join(srcdir, m + '-defaults.nml') + if minfo["flag_name"] == "NONE": + nmlin = os.path.join(srcdir, m + "-defaults.nml") else: - nmlin = os.path.join(srcdir, m, m + '-defaults.nml') - nmlout = os.path.join(job_dir, 'data_' + minfo['nml_file']) + nmlin = os.path.join(srcdir, m, m + "-defaults.nml") + nmlout = os.path.join(job_dir, "data_" + minfo["nml_file"]) with open(nmlin) as fp: nml = C.Namelist(fp) - nml.merge(minfo['prefix'], configs) - with open(nmlout, 'w') as ofp: nml.write(ofp) - C.copy_data_files(m, nml, os.path.join(job_dir, 'input', m), - extra_data_files.get(m)) + nml.merge(minfo["prefix"], configs) + with open(nmlout, "w") as ofp: + nml.write(ofp) + C.copy_data_files( + m, nml, os.path.join(job_dir, "input", m), extra_data_files.get(m) + ) if restart: - C.copy_restart_files(m, nml, os.path.join(job_dir, 'restart', m), - restart_path) + C.copy_restart_files( + m, nml, os.path.join(job_dir, "restart", m), restart_path + ) # Extra data files for main program. -jobmaindatadir = os.path.join(job_dir, 'input', 'main') -srcmaindatadir = os.path.join(U.ctoaster_root, 'data', 'main') -for s in ['atm', 'ocn', 'sed']: - shutil.copy(os.path.join(srcmaindatadir, 'tracer_define.' + s), - jobmaindatadir) +jobmaindatadir = os.path.join(job_dir, "input", "main") +srcmaindatadir = os.path.join(U.ctoaster_root, "data", "main") +for s in ["atm", "ocn", "sed"]: + shutil.copy(os.path.join(srcmaindatadir, "tracer_define." + s), jobmaindatadir) + +if running_from_gui: + print("OK") -if running_from_gui: print('OK') \ No newline at end of file diff --git a/tools/run-cupcake.py b/tools/run-carrotcake.py similarity index 92% rename from tools/run-cupcake.py rename to tools/run-carrotcake.py index f2fca81d..78c82086 100755 --- a/tools/run-cupcake.py +++ b/tools/run-carrotcake.py @@ -8,7 +8,7 @@ sys.exit("cTOASTER not set up: run the setup.py script!") # Setup command line arguments using argparse -parser = argparse.ArgumentParser(description='Run a cupcake job with the specified configuration.') +parser = argparse.ArgumentParser(description='Run a carrotcake job with the specified configuration.') parser.add_argument('base_config', help='Base configuration name') parser.add_argument('config_dir', help='Directory containing the configuration') parser.add_argument('run_id', help='Unique identifier for the run') diff --git a/tools/setup-ctoaster.py b/tools/setup-ctoaster.py index b9200440..458c4bd3 100755 --- a/tools/setup-ctoaster.py +++ b/tools/setup-ctoaster.py @@ -37,11 +37,11 @@ def yesno(prompt, default): if config: print('Already set up...') else: - root = ask('Root directory', os.path.expanduser('~/ctoaster.cupcake')) + root = ask('Root directory', os.path.expanduser('~/ctoaster.carrotcake')) base = os.path.abspath(os.path.join(root, os.pardir)) - data = ask('Data directory', os.path.join(base, 'ctoaster.cupcake-data')) - test = ask('Test directory', os.path.join(base, 'ctoaster.cupcake-test')) - jobs = ask('Jobs directory', os.path.join(base, 'ctoaster.cupcake-jobs')) + data = ask('Data directory', os.path.join(base, 'ctoaster.carrotcake-data')) + test = ask('Test directory', os.path.join(base, 'ctoaster.carrotcake-test')) + jobs = ask('Jobs directory', os.path.join(base, 'ctoaster.carrotcake-jobs')) vers = ask('Default version (RETURN to accept / type "help" to see options)', default_version, str(versions)) with open(U.ctoaster_cfgfile, 'w') as fp: print(f'ctoaster_root: {root}', file=fp) diff --git a/tools/tests.py b/tools/tests.py index d19926a0..538699c0 100755 --- a/tools/tests.py +++ b/tools/tests.py @@ -149,7 +149,6 @@ def ensure_nccompare(): if result.returncode != 0: sys.exit('Could not build nccompare.exe program') - # Compare NetCDF files. def compare_nc(f1, f2, logfp): cmd = [nccompare, '-v', '-a', str(abstol), '-r', str(reltol), f1, f2] diff --git a/tools/travis-setup b/tools/travis-setup index 6c4e2a53..4004d957 100755 --- a/tools/travis-setup +++ b/tools/travis-setup @@ -1,7 +1,7 @@ #!/bin/bash root=`pwd` echo "ctoaster_root: ${root}" > ~/.ctoasterrc -echo "ctoaster_data: ${root/ctoaster.cupcake/ctoaster-data}" >> ~/.ctoasterrc -echo "ctoaster_test: ${root/ctoaster.cupcake/ctoaster-test}" >> ~/.ctoasterrc -echo "ctoaster_jobs: ${root/ctoaster.cupcake/ctoaster-jobs}" >> ~/.ctoasterrc +echo "ctoaster_data: ${root/ctoaster.carrotcake/ctoaster-data}" >> ~/.ctoasterrc +echo "ctoaster_test: ${root/ctoaster.carrotcake/ctoaster-test}" >> ~/.ctoasterrc +echo "ctoaster_jobs: ${root/ctoaster.carrotcake/ctoaster-jobs}" >> ~/.ctoasterrc echo "ctoaster_version: DEVELOPMENT" >> ~/.ctoasterrc diff --git a/tools/utils.py b/tools/utils.py index c6c013c7..6e9cd4b0 100755 --- a/tools/utils.py +++ b/tools/utils.py @@ -1,72 +1,100 @@ from __future__ import print_function -import errno, os, sys, shutil, platform, string -import re, hashlib, glob -import subprocess as sp -# NOTE: The platform module in Python includes tools to see the platform's hardware, operating system , and interpreter version information where the program is running. +import errno +import glob +import hashlib +import os +import platform +import re +import shutil +import string +import subprocess as sp +import sys +# Global variables +ctoaster_root = None +ctoaster_data = None +ctoaster_test = None +ctoaster_jobs = None +ctoaster_version = None -# Read ctoaster configuration. +# Configuration file path +ctoaster_cfgfile = os.path.expanduser(os.path.join("~", ".ctoasterrc")) -ctoaster_cfgfile = os.path.expanduser(os.path.join('~', '.ctoasterrc')) def read_ctoaster_config(): global ctoaster_root, ctoaster_data, ctoaster_test, ctoaster_jobs, ctoaster_version try: with open(ctoaster_cfgfile) as fp: for line in fp: - fs = line.strip().split(':') + fs = line.strip().split(":") k = fs[0] - v = ':'.join(fs[1:]).strip() - if k == 'ctoaster_root': ctoaster_root = v - elif k == 'ctoaster_data': ctoaster_data = v - elif k == 'ctoaster_test': ctoaster_test = v - elif k == 'ctoaster_jobs': ctoaster_jobs = v - elif k == 'ctoaster_version': ctoaster_version = v + v = ":".join(fs[1:]).strip() + if k == "ctoaster_root": + ctoaster_root = v + elif k == "ctoaster_data": + ctoaster_data = v + elif k == "ctoaster_test": + ctoaster_test = v + elif k == "ctoaster_jobs": + ctoaster_jobs = v + elif k == "ctoaster_version": + ctoaster_version = v return True except IOError as e: - if e.errno == errno.ENOENT: return False + if e.errno == errno.ENOENT: + return False raise - else: return False + else: + return False -# +# # could be: subprocess.Popen(cmd) instead + def fixe(file): - cmd = [os.path.join(ctoaster_root, 'tools', 'fix-exceptions.py'), file] - #os.system(cmd) + cmd = [os.path.join(ctoaster_root, "tools", "fix-exceptions.py"), file] + # os.system(cmd) sp.Popen(cmd) # Discover build platform. + def discover_platform(): def exists(p): - return os.path.exists(os.path.join(ctoaster_root, 'platforms', p)) + return os.path.exists(os.path.join(ctoaster_root, "platforms", p)) def discover(): - host = platform.node() # platform.node() returns the computer's network name. - os = platform.system().upper() # Returns the system/OS name. The upper() method returns a string where all characters are in upper case. - plat = platform.platform().split('-')[0].upper() - if exists(host): return host - if exists(os): return os - if exists(plat): return plat - sys.exit('Cannot find suitable build platform!') - - pfile = os.path.join('config', 'platform-name') + host = platform.node() # platform.node() returns the computer's network name. + os = ( + platform.system().upper() + ) # Returns the system/OS name. The upper() method returns a string where all characters are in upper case. + plat = platform.platform().split("-")[0].upper() + if exists(host): + return host + if exists(os): + return os + if exists(plat): + return plat + sys.exit("Cannot find suitable build platform!") + + pfile = os.path.join("config", "platform-name") if os.path.exists(pfile): with open(pfile) as fp: p = fp.readline().strip() else: p = discover() - if exists(p): return p - else: sys.exit('Build platform "' + p + '" not known!') + if exists(p): + return p + else: + sys.exit('Build platform "' + p + '" not known!') # Recognised build types. -build_types = ['normal', 'debug', 'ship', 'profile', 'bounds', 'coverage'] +build_types = ["normal", "debug", "ship", "profile", "bounds", "coverage"] # Model configuration information: model configuration is based on @@ -75,26 +103,32 @@ def discover(): # that model builds for different coordinate definitions, tracer # counts, etc. are correctly segregated). + class ModelConfig: # Assumes current working directory is the job directory. def __init__(self, build_type, dir=None): - if not build_type: build_type = 'normal' - vfile = os.path.join('config', 'model-version') # ('model-version' is a file in the config subdirectory of the job directory) - if dir: vfile = os.path.join(dir, vfile) + if not build_type: + build_type = "normal" + vfile = os.path.join( + "config", "model-version" + ) # ('model-version' is a file in the config subdirectory of the job directory) + if dir: + vfile = os.path.join(dir, vfile) with open(vfile) as fp: self.model_version = fp.readline().strip() self.display_model_version = self.model_version - if self.model_version.startswith('DEVELOPMENT:'): - self.display_model_version = self.model_version.split(':')[1] - self.model_version = 'DEVELOPMENT' + if self.model_version.startswith("DEVELOPMENT:"): + self.display_model_version = self.model_version.split(":")[1] + self.model_version = "DEVELOPMENT" self.platform = discover_platform() self.build_type = build_type # Determine the model directory for this configuration: these all # live under ctoaster_jobs/MODELS. def directory(self): - return os.path.join(ctoaster_jobs, 'MODELS', - self.model_version, self.platform, self.build_type) + return os.path.join( + ctoaster_jobs, "MODELS", self.model_version, self.platform, self.build_type + ) # Clean out model builds for a given model configuration -- this # removes all build types for this model configuration, just to @@ -102,41 +136,44 @@ def directory(self): def clean(self): # Go up one level to catch all build type directories. platd = os.path.abspath(os.path.join(self.directory(), os.pardir)) - for d in glob.iglob(os.path.join(platd, '*')): + for d in glob.iglob(os.path.join(platd, "*")): shutil.rmtree(d) - if os.path.exists(platd): os.removedirs(platd) + if os.path.exists(platd): + os.removedirs(platd) # Set up model build directory. def setup(self): d = self.directory() - if not os.path.exists(d): os.makedirs(d) - vfile = os.path.join(d, 'version.py') + if not os.path.exists(d): + os.makedirs(d) + vfile = os.path.join(d, "version.py") if not os.path.exists(vfile): - if self.model_version == 'DEVELOPMENT': + if self.model_version == "DEVELOPMENT": scons_dir = ctoaster_root else: - scons_dir = os.path.join(ctoaster_jobs, 'MODELS', 'REPOS', - self.model_version) - scons_srcdir = os.path.join(scons_dir, 'src') - scons_srcdir = scons_srcdir.replace('\\', '\\\\') - scriptdir = os.path.join(ctoaster_root, 'tools') - scriptdir = scriptdir.replace('\\', '\\\\') - with open(vfile, 'w') as fp: - print('# Model source directory', file=fp) + scons_dir = os.path.join( + ctoaster_jobs, "MODELS", "REPOS", self.model_version + ) + scons_srcdir = os.path.join(scons_dir, "src") + scons_srcdir = scons_srcdir.replace("\\", "\\\\") + scriptdir = os.path.join(ctoaster_root, "tools") + scriptdir = scriptdir.replace("\\", "\\\\") + with open(vfile, "w") as fp: + print("# Model source directory", file=fp) print("srcdir = '" + scons_srcdir + "'\n", file=fp) - print('# Model script directory', file=fp) + print("# Model script directory", file=fp) print("scriptdir = '" + scriptdir + "'\n", file=fp) - print('# Build type', file=fp) + print("# Build type", file=fp) print("build_type = '" + self.build_type + "'\n", file=fp) # copy SConstruct file from installation root - sfile = os.path.join(d, 'SConstruct') + sfile = os.path.join(d, "SConstruct") if not os.path.exists(sfile): - shutil.copy(os.path.join(scons_dir, 'SConstruct'), sfile) + shutil.copy(os.path.join(scons_dir, "SConstruct"), sfile) # copy utils.py file from tools sub-directory # (becasue SConstruct cannot find utils.py [import utils as U] ...) - sfile = os.path.join(d, 'utils.py') + sfile = os.path.join(d, "utils.py") if not os.path.exists(sfile): - shutil.copy(os.path.join(scriptdir, 'utils.py'), sfile) + shutil.copy(os.path.join(scriptdir, "utils.py"), sfile) # Determine list of available model versions. @@ -145,29 +182,46 @@ def setup(self): def available_versions(): - git_versions = sp.check_output(['git', 'tag', '-l']).splitlines() + git_versions = sp.check_output(["git", "tag", "-l"]).splitlines() str_git_versions = [x.decode("utf-8") for x in git_versions] - return ['DEVELOPMENT'] + str_git_versions + return ["DEVELOPMENT"] + str_git_versions ###return ['DEVELOPMENT'] + sp.check_output(['git', 'tag', '-l']).splitlines() # Set up repository clone for building model at explicitly # selected version tags. + def setup_version_repo(ver): - if ver == 'DEVELOPMENT': return + if ver == "DEVELOPMENT": + return if ver not in available_versions(): sys.exit('Invalid model version "' + ver + '"') - dst = os.path.join(ctoaster_jobs, 'MODELS', 'REPOS', ver) - if os.path.exists(dst): return dst - with open(os.devnull, 'w') as sink: + dst = os.path.join(ctoaster_jobs, "MODELS", "REPOS", ver) + if os.path.exists(dst): + return dst + with open(os.devnull, "w") as sink: # alternative hard-coded branch specification ###if sp.call(['git', 'clone', '-l', '--single-branch', '--branch', '_DEV_install', ### os.curdir, dst], stdout=sink, stderr=sink) != 0: - if sp.call(['git', 'clone', '-l', '--single-branch', '--branch', ver, - os.curdir, dst], stdout=sink, stderr=sink) != 0: - sys.exit('Failed to set up repository clone for version "' + - ver + '"') - with open(os.path.join(dst, 'repo-version'), 'w') as ofp: + if ( + sp.call( + [ + "git", + "clone", + "-l", + "--single-branch", + "--branch", + ver, + os.curdir, + dst, + ], + stdout=sink, + stderr=sink, + ) + != 0 + ): + sys.exit('Failed to set up repository clone for version "' + ver + '"') + with open(os.path.join(dst, "repo-version"), "w") as ofp: print(ver, file=ofp) return dst