diff --git a/src/CSET/cset_workflow/app/finish_website/bin/finish_website.py b/src/CSET/cset_workflow/app/finish_website/bin/finish_website.py index 2af3cde64..06a8f0a27 100755 --- a/src/CSET/cset_workflow/app/finish_website/bin/finish_website.py +++ b/src/CSET/cset_workflow/app/finish_website/bin/finish_website.py @@ -14,10 +14,11 @@ # limitations under the License. """ -Write finished status to website front page. +Create the CSET diagnostic viewing website. -Constructs the plot index, and does the final update to the workflow status on -the front page of the web interface. +Copies the static files that make up the web interface, constructs the plot +index, and updates the workflow status on the front page of the +web interface. """ import datetime @@ -28,71 +29,95 @@ from importlib.metadata import version from pathlib import Path -from CSET._common import combine_dicts, sort_dict +from CSET._common import sort_dict logging.basicConfig( level=os.getenv("LOGLEVEL", "INFO"), format="%(asctime)s %(levelname)s %(message)s" ) - - -def construct_index(): - """Construct the plot index. - - Index should has the form ``{"Category Name": {"recipe_id": "Plot Name"}}`` - where ``recipe_id`` is the name of the plot's directory. - """ - index = {} - plots_dir = Path(os.environ["CYLC_WORKFLOW_SHARE_DIR"]) / "web/plots" - # Loop over all diagnostics and append to index. - for metadata_file in plots_dir.glob("**/*/meta.json"): - try: - with open(metadata_file, "rt", encoding="UTF-8") as fp: - plot_metadata = json.load(fp) - - category = plot_metadata["category"] - case_date = plot_metadata.get("case_date", "") - relative_url = str(metadata_file.parent.relative_to(plots_dir)) - - record = { - category: { - case_date if case_date else "Aggregation": { - relative_url: plot_metadata["title"].strip() - } - } - } - except (json.JSONDecodeError, KeyError, TypeError) as err: - logging.error("%s is invalid, skipping.\n%s", metadata_file, err) - continue - index = combine_dicts(index, record) - - # Sort index of diagnostics. - index = sort_dict(index) - - # Write out website index. - with open(plots_dir / "index.json", "wt", encoding="UTF-8") as fp: - json.dump(index, fp, indent=2) - - -def update_workflow_status(): +logger = logging.getLogger(__name__) + + +def install_website_skeleton(www_root_link: Path, www_content: Path): + """Copy static website files and create symlink from web document root.""" + # Remove existing link to output ahead of creating new symlink. + logger.info("Removing any existing output link at %s.", www_root_link) + www_root_link.unlink(missing_ok=True) + + logger.info("Installing website files to %s.", www_content) + # Create directory for web content. + www_content.mkdir(parents=True, exist_ok=True) + # Copy static HTML/CSS/JS. + html_source = Path.cwd() / "html" + shutil.copytree(html_source, www_content, dirs_exist_ok=True) + # Create directory for plots. + plot_dir = www_content / "plots" + plot_dir.mkdir(exist_ok=True) + + logger.info("Linking %s to web content.", www_root_link) + # Ensure parent directories of WEB_DIR exist. + www_root_link.parent.mkdir(parents=True, exist_ok=True) + # Create symbolic link to web directory. + # NOTE: While good for space, it means `cylc clean` removes output. + www_root_link.symlink_to(www_content) + + +def construct_index(www_content: Path): + """Construct the plot index.""" + plots_dir = www_content / "plots" + with open(plots_dir / "index.jsonl", "wt", encoding="UTF-8") as index_fp: + # Loop over all diagnostics and append to index. The glob is sorted to + # ensure a consistent ordering. + for metadata_file in sorted(plots_dir.glob("**/*/meta.json")): + try: + with open(metadata_file, "rt", encoding="UTF-8") as plot_fp: + plot_metadata = json.load(plot_fp) + plot_metadata["path"] = str(metadata_file.parent.relative_to(plots_dir)) + # Remove keys that are not useful for the index. + plot_metadata.pop("description", None) + plot_metadata.pop("plots", None) + # Sort plot metadata. + plot_metadata = sort_dict(plot_metadata) + # Write metadata into website index. + json.dump(plot_metadata, index_fp, separators=(",", ":")) + index_fp.write("\n") + except (json.JSONDecodeError, KeyError, TypeError) as err: + logger.error("%s is invalid, skipping.\n%s", metadata_file, err) + continue + + +def update_workflow_status(www_content: Path): """Update the workflow status on the front page of the web interface.""" - web_dir = Path(os.environ["CYLC_WORKFLOW_SHARE_DIR"] + "/web") - with open(web_dir / "status.html", "wt") as fp: + with open(www_content / "placeholder.html", "r+t") as fp: + content = fp.read() finish_time = datetime.datetime.now().strftime("%Y-%m-%d %H:%M") - fp.write(f"

Completed at {finish_time} using CSET v{version('CSET')}

\n") + status = f"Completed at {finish_time} using CSET v{version('CSET')}" + new_content = content.replace( + '

Unknown

', + f'

{status}

', + ) + fp.seek(0) + fp.truncate() + fp.write(new_content) -def copy_rose_config(): +def copy_rose_config(www_content: Path): """Copy the rose-suite.conf file to add to output web directory.""" rose_suite_conf = Path(os.environ["CYLC_WORKFLOW_RUN_DIR"]) / "rose-suite.conf" - web_conf_file = Path(os.environ["CYLC_WORKFLOW_SHARE_DIR"]) / "web/rose-suite.conf" - shutil.copy(rose_suite_conf, web_conf_file) + web_conf_file = www_content / "rose-suite.conf" + shutil.copyfile(rose_suite_conf, web_conf_file) def run(): """Do the final steps to finish the website.""" - construct_index() - update_workflow_status() - copy_rose_config() + # Strip trailing slashes in case they have been added in the config. + # Otherwise they break the symlinks. + www_root_link = Path(os.environ["WEB_DIR"].rstrip("/")) + www_content = Path(os.environ["CYLC_WORKFLOW_SHARE_DIR"] + "/web") + + install_website_skeleton(www_root_link, www_content) + construct_index(www_content) + update_workflow_status(www_content) + copy_rose_config(www_content) if __name__ == "__main__": # pragma: no cover diff --git a/src/CSET/cset_workflow/app/install_website_skeleton/file/html/index.html b/src/CSET/cset_workflow/app/finish_website/file/html/index.html similarity index 70% rename from src/CSET/cset_workflow/app/install_website_skeleton/file/html/index.html rename to src/CSET/cset_workflow/app/finish_website/file/html/index.html index 66c16a0b6..dab72f7a5 100644 --- a/src/CSET/cset_workflow/app/install_website_skeleton/file/html/index.html +++ b/src/CSET/cset_workflow/app/finish_website/file/html/index.html @@ -15,10 +15,20 @@

CSET

+ + + + +
+ Search facets +
+
-
- +
diff --git a/src/CSET/cset_workflow/app/install_website_skeleton/file/html/placeholder.html b/src/CSET/cset_workflow/app/finish_website/file/html/placeholder.html similarity index 58% rename from src/CSET/cset_workflow/app/install_website_skeleton/file/html/placeholder.html rename to src/CSET/cset_workflow/app/finish_website/file/html/placeholder.html index 69f4cf6c1..ccd281175 100644 --- a/src/CSET/cset_workflow/app/install_website_skeleton/file/html/placeholder.html +++ b/src/CSET/cset_workflow/app/finish_website/file/html/placeholder.html @@ -22,35 +22,16 @@

Select a plot from the sidebar

Your processed diagnostics can be accessed via the buttons on the sidebar.

Workflow status

-
-

Unknown

-
+

Unknown

CSET configuration file

rose-suite.conf

-

Send feedback

+

Send feedback

- CSET is a new system, and we would love to hear about your - experiences. Please tell us your highlights, issues, or - suggestions: + CSET is a new system, and we would love to hear about your experiences. + Please tell us your highlights, issues, or suggestions: Feedback via GitHub | Feedback via email

- diff --git a/src/CSET/cset_workflow/app/finish_website/file/html/plots/parser.py b/src/CSET/cset_workflow/app/finish_website/file/html/plots/parser.py new file mode 100755 index 000000000..b250addfe --- /dev/null +++ b/src/CSET/cset_workflow/app/finish_website/file/html/plots/parser.py @@ -0,0 +1,544 @@ +#!/usr/bin/env python3 + +"""Search query lexer and parser. + +EBNF to implement: + +query = expression ; + +expression = condition + | expression , combiner ? , expression + | "NOT" , expression + | "(" , expression , ")" ; + +combiner = "AND" + | "OR" ; + +condition = facet ? , operator ? , value ; + +facet = LITERAL , ":" ; + +value = LITERAL ; + +operator = NOT + | GREATER_THAN + | GREATER_THAN_OR_EQUALS + | LESS_THAN + | LESS_THAN_OR_EQUALS + | NOT_EQUALS + | EQUALS ; +""" + +import re +from collections.abc import Callable, Iterable +from enum import Enum, auto + + +class Combiner(Enum): + """Enum of combiners.""" + + NOT = auto() + AND = auto() + OR = auto() + + +class Operator(Enum): + """Enum of operators.""" + + IN = auto() + NOT_IN = auto() + EQUALS = auto() + NOT_EQUALS = auto() + GREATER_THAN = auto() + GREATER_THAN_OR_EQUALS = auto() + LESS_THAN = auto() + LESS_THAN_OR_EQUALS = auto() + + +class Parenthesis(Enum): + """Enum of parenthesis.""" + + BEGIN = auto() + END = auto() + + +class LexOnly(Enum): + """Enum of tokens converted to richer types during lexing.""" + + WHITESPACE = auto() + FACET = auto() + LITERAL = auto() + + +class LiteralToken: + """A literal value.""" + + value: str + + def __init__(self, value: str): + self.value = value + + def __str__(self) -> str: + """Return str(self).""" + return f"LiteralToken[{self.value}]" + + def __repr__(self) -> str: + """Return repr(self).""" + return f"LiteralToken({repr(self.value)})" + + +class Facet: + """A facet value.""" + + value: str + + def __init__(self, value: str): + self.value = value + + def __str__(self) -> str: + """Return str(self).""" + return f"Facet[{self.value}]" + + def __repr__(self) -> str: + """Return repr(self).""" + return f"Facet({repr(self.value)})" + + +Token = LiteralToken | Facet | Parenthesis | Combiner | Operator + +TOKEN_SPEC = { + Parenthesis.BEGIN: r"\(", + Parenthesis.END: r"\)", + Operator.GREATER_THAN_OR_EQUALS: r"<=", + Operator.GREATER_THAN: r"<", + Operator.LESS_THAN_OR_EQUALS: r">=", + Operator.LESS_THAN: r">", + Operator.NOT_EQUALS: r"!=", + Operator.EQUALS: r"=", + Operator.NOT_IN: r"!", + Combiner.NOT: r"\bnot\b", + Combiner.AND: r"\band\b", + Combiner.OR: r"\bor\b", + LexOnly.WHITESPACE: r"[ \t]+", + LexOnly.FACET: r"[a-z_\-]+[ \t]*:", + LexOnly.LITERAL: r"""'[^']*'|"[^"]*"|[^ \t\(\)]+""", +} +TOKEN_REGEX = re.compile( + "|".join( + f"(?P<{str(key).replace('.', '_')}>{val})" for key, val in TOKEN_SPEC.items() + ), + flags=re.IGNORECASE, +) +TOKEN_NAME_MAPPING = {str(key).replace(".", "_"): key for key in TOKEN_SPEC.keys()} + + +def lexer(s: str) -> Iterable[Token]: + """Lex input string into tokens.""" + for match in re.finditer(TOKEN_REGEX, s): + # Get the Enum object from token_spec matching the capture group name. + if match.lastgroup is None: + raise ValueError("Query did not consist of valid tokens.") + kind = TOKEN_NAME_MAPPING[match.lastgroup] + value = match.group() + match kind: + case None: + raise ValueError("Oh no!") + case LexOnly.WHITESPACE: + continue + case LexOnly.FACET: + facet_name = value.rstrip(" \t:") + yield Facet(facet_name) + case LexOnly.LITERAL: + if (value.startswith("'") and value.endswith("'")) or ( + value.startswith('"') and value.endswith('"') + ): + value = value[1:-1] + yield LiteralToken(value) + case _: + yield kind + + +class Condition: + """A condition.""" + + func: Callable + + def __init__( + self, + value: LiteralToken | Callable, + facet: Facet = Facet("title"), # noqa: B008 + operator: Operator = Operator.IN, + ): + """Create a condition. + + Arguments + --------- + value: LiteralToken | Callable + The value to check for within the facet. May also be a callable to + determine this, in which case other arguments are ignored. + facet: Facet, optional + The facet to check. Defaults to title. + operator: Operator, optional + The operation to check with. One of the values of the Operator enum. + Defaults to IN. + + Returns + ------- + Condition + A function implementing the condition. It may raise a KeyError if + the facet is not present, so calling code should capture that. + """ + if callable(value): + self.func = value + return + + v = value.value + f = facet.value + + match operator: + case Operator.IN: + + def condition(d: dict[str, str]) -> bool: + return v in d[f] + case Operator.NOT_IN: + + def condition(d: dict[str, str]) -> bool: + return v not in d[f] + case Operator.EQUALS: + + def condition(d: dict[str, str]) -> bool: + return v == d[f] + case Operator.NOT_EQUALS: + + def condition(d: dict[str, str]) -> bool: + return v != d[f] + case Operator.GREATER_THAN: + + def condition(d: dict[str, str]) -> bool: + return v > d[f] + case Operator.GREATER_THAN_OR_EQUALS: + + def condition(d: dict[str, str]) -> bool: + return v >= d[f] + case Operator.LESS_THAN: + + def condition(d: dict[str, str]) -> bool: + return v < d[f] + case Operator.LESS_THAN_OR_EQUALS: + + def condition(d: dict[str, str]) -> bool: + return v <= d[f] + case _: + raise ValueError(f"Invalid operator: {operator}") + + self.func = condition + + def __repr__(self) -> str: + """Return repr(self).""" + return f"" + + def __call__(self, d: dict[str, str]) -> bool: + """Test whether a dictionary matches this condition.""" + return self.func(d) + + def __and__(self, other): + """Implement self & other.""" + if not isinstance(other, Condition): + return NotImplemented + + def combined(d: dict[str, str]) -> bool: + return self(d) and other(d) + + return Condition(combined) + + def __or__(self, other): + """Implement self | other.""" + if not isinstance(other, Condition): + return NotImplemented + + def combined(d: dict[str, str]) -> bool: + return self(d) or other(d) + + return Condition(combined) + + def __invert__(self): + """Implement ~self.""" + + def combined(d: dict[str, str]) -> bool: + return not self(d) + + return Condition(combined) + + +def parse_grouped_expression(tokens: list[Token]) -> tuple[int, Condition | None]: + """Parse a grouped expression from a stream of tokens. + + Arguments + --------- + tokens: list[Token] + List of tokens, starting from the potential grouped expression. + + Returns + ------- + offset: int + How many tokens were consumed by the grouped expression. A value of 0 + indicates it was not a grouped expression. + Condition | None + The Condition function for this expression. None if there was not a + grouped expression. + + Raises + ------ + ValueError + If the parentheses are unmatched. + """ + if len(tokens) < 2 or tokens[0] != Parenthesis.BEGIN: + return 0, None + offset = 1 + depth = 1 + while depth > 0 and offset < len(tokens): + match tokens[offset]: + case Parenthesis.BEGIN: + depth += 1 + case Parenthesis.END: + depth -= 1 + offset += 1 + if depth != 0: + raise ValueError("Unmatched parenthesis.") + # Recursively parse the grouped expression. + inner_expression = parse_expression(tokens[1 : offset - 1]) + return offset, inner_expression + + +def parse_condition(tokens: list[Token]) -> tuple[int, Condition | None]: + """Parse a condition from a stream of tokens. + + Arguments + --------- + tokens: list[Token] + List of tokens, starting from the potential condition. + + Returns + ------- + offset: int + How many tokens were consumed by the condition. A value of 0 indicates + it was not a condition. + Condition | None + The Condition function for this condition. None if there was not a + condition. + """ + match tokens[:3]: + case [lt, *_] if isinstance(lt, LiteralToken): + # Just a value to search for. + return 1, Condition(lt) + case [op, lt, *_] if isinstance(op, Operator) and isinstance(lt, LiteralToken): + # Value to search for with operator. + return 2, Condition(lt, operator=op) + case [fc, lt, *_] if isinstance(fc, Facet) and isinstance(lt, LiteralToken): + # Value to search for in facet. + return 2, Condition(lt, facet=fc) + case [fc, op, lt] if ( + isinstance(fc, Facet) + and isinstance(op, Operator) + and isinstance(lt, LiteralToken) + ): + # Value to search for in facet with operator. + return 3, Condition(lt, facet=fc, operator=op) + case _: + # Not matched as a condition. + return 0, None + + +def evaluate_not(conditions): + """Collapse all NOTs in a list of conditions.""" + negated_conditions = [] + index = 0 + while index < len(conditions): + match conditions[index : index + 2]: + case [Combiner.NOT, Combiner.NOT]: + # Skip double NOTs, as they negate each other. + index += 2 + case [Combiner.NOT, right] if isinstance(right, Condition): + negated_conditions.append(~right) + index += 2 + case [left, *_] if left != Combiner.NOT: + negated_conditions.append(left) + index += 1 + case _: + raise ValueError("Unprocessable NOT.") + return negated_conditions + + +def evaluate_and(conditions): + """Collapse all explicit and implicit ANDs in a list of conditions.""" + anded_conditions = [] + index = 0 + while index < len(conditions): + left = anded_conditions.pop() if anded_conditions else None + match conditions[index : index + 2]: + case [Combiner.AND, right] if isinstance(left, Condition) and isinstance( + right, Condition + ): + anded_conditions.append(left & right) + index += 2 + case [right, *_] if isinstance(left, Condition) and isinstance( + right, Condition + ): + anded_conditions.append(left & right) + index += 1 + case [right, *_] if right != Combiner.AND: + if left is not None: + anded_conditions.append(left) + anded_conditions.append(right) + index += 1 + case _: + raise ValueError("Unprocessable AND.") + return anded_conditions + + +def evaluate_or(conditions): + """Collapse all ORs in a list of conditions.""" + ored_conditions = [] + index = 0 + while index < len(conditions): + match conditions[index : index + 3]: + case [left, Combiner.OR, right] if isinstance( + left, Condition + ) and isinstance(right, Condition): + ored_conditions.append(left | right) + index += 3 + case [left, *_] if left != Combiner.OR: + ored_conditions.append(left) + index += 1 + case _: + raise ValueError("Unprocessable OR.") + return ored_conditions + + +def parse_expression(tokens: list[Token]) -> Condition: + """Parse an expression into a single Condition function. + + Pairs of conditions without an explicit combiner are treated as AND. + + The order of operations is: + 1. Evaluate NOTs first, left to right. + 2. Evaluate ANDs (explicit and implicit) second, left to right. + 3. Evaluate ORs third, left to right. + + Arguments + --------- + tokens: list[Token] + List of tokens to parse. + + Returns + ------- + Condition + The condition represented by the tokens. + + Raises + ------ + ValueError + If the tokens do not form a valid expression. + """ + conditions: list[Condition | Combiner] = [] + index = 0 + while index < len(tokens): + # Accounts for AND/OR/NOT. + if isinstance(combiner := tokens[index], Combiner): + conditions.append(combiner) + index += 1 + continue + + # Accounts for parentheses. + offset, condition = parse_grouped_expression(tokens[index:]) + if offset > 0: + assert condition is not None, "Only an offset of 0 returns None." + conditions.append(condition) + index += offset + continue + + # Accounts for Facets, Operators, and Literals. + offset, condition = parse_condition(tokens[index:]) + if offset > 0: + assert condition is not None, "Only an offset of 0 returns None." + conditions.append(condition) + index += offset + continue + + raise ValueError(f"Unexpected token in expression: {tokens[index]}") + + # TODO: Investigate Pratt parsing for handling combiner precedence in a + # single pass. It should allow parsing them in the while loop above. + + # Evaluate NOTs first, left to right. + conditions = evaluate_not(conditions) + + # Evaluate ANDs second, left to right. + conditions = evaluate_and(conditions) + + # Evaluate ORs third, left to right. + conditions = evaluate_or(conditions) + + # Verify we have collapsed down to a single condition at this point. + if len(conditions) != 1 or not isinstance(conditions[0], Condition): + raise ValueError("Collapse should produce a single condition.") + + return conditions[0] + + +def query2condition(query: str) -> Condition: + """Convert a query string into a query function, or error.""" + try: + tokens = list(lexer(query)) + return parse_expression(tokens) + except ValueError as err: + raise ValueError("Query failed to parse.") from err + + +if __name__ == "__main__": + query = "((histogram AND field : temperature) OR (time_series AND field:humidity)) date:>= 2025-09-25T15:22Z ((!foo))" + # query = "NOT NOT NOT NOT NOT foo" + # query = "temperature NOT(!foo)" + tokens = list(lexer(query)) + for token in tokens: + print(token) + + print("-" * 50) + + query_func = parse_expression(tokens) + + print("-" * 50) + + for diagnostic in [ + { + "title": "temperature_histogram_foo", + "field": "temperature", + "date": "2025-10-02T23:09Z", + "show": "NO", + }, + { + "title": "temperature_histogram", + "field": "temperature", + "date": "2025-10-02T23:09Z", + "show": "YES", + }, + { + "title": "old_temperature_histogram", + "field": "temperature", + "date": "2021-10-02T23:09Z", + "show": "NO", + }, + { + "title": "humidity_histogram", + "field": "humidity", + "date": "2025-10-02T23:09Z", + "show": "NO", + }, + { + "title": "humidity_time_series", + "field": "humidity", + "date": "2025-10-02T23:09Z", + "show": "YES", + }, + ]: + print(f"{query_func(diagnostic)}\t{diagnostic}") diff --git a/src/CSET/cset_workflow/app/finish_website/file/html/plots/querys b/src/CSET/cset_workflow/app/finish_website/file/html/plots/querys new file mode 100644 index 000000000..79e62f7cc --- /dev/null +++ b/src/CSET/cset_workflow/app/finish_website/file/html/plots/querys @@ -0,0 +1,37 @@ +# Example queries and their meaning. + +histogram + "histogram" in (is a substring of) title. + +field:temperature_at_screen_level + "temperature_at_screen_level" in field. + +field: temperature + "temperature" in field. + +field:=temperature + "temperature" exactly matches field. + +field : = temperature + "temperature" exactly matches field. Whitespace is optional around symbol tokens. + +field:x_wind OR field:y_wind + "x_wind" or "y_wind" in field. Whitespace is required around OR token. + +(histogram AND field:temperature) OR (time_series AND field:humidity) + Histograms of temperature and Time series of humidity. Parenthesis indicate precedence. + +histogram AND field:temperature OR time_series field:humidity + Histograms of temperature and Time series of humidity. + +NOT temperature + Not "temperature" in title. + +!temperature + "temperature" not in title. + +field:!temperature + "temperature" not in field. + +NOT field:temperature + "temperature" not in field. diff --git a/src/CSET/cset_workflow/app/finish_website/file/html/plots/search.py b/src/CSET/cset_workflow/app/finish_website/file/html/plots/search.py new file mode 100755 index 000000000..4609f1b82 --- /dev/null +++ b/src/CSET/cset_workflow/app/finish_website/file/html/plots/search.py @@ -0,0 +1,25 @@ +#!/usr/bin/env python3 + +"""Interactive search utility to test the parser.""" + +import json +import sys + +from parser import query2condition + +if len(sys.argv) < 2: + print("Usage: search.py FILE [QUERY]") + sys.exit(1) + +if len(sys.argv) == 2: + query = input("Query: ") +else: + query = " ".join(sys.argv[2:]) + +condition = query2condition(query) + +with open(sys.argv[1], "rt") as file: + for line in file: + d = json.loads(line) + if condition(d): + print(d["title"]) diff --git a/src/CSET/cset_workflow/app/install_website_skeleton/file/html/static/favicon.ico b/src/CSET/cset_workflow/app/finish_website/file/html/static/favicon.ico similarity index 100% rename from src/CSET/cset_workflow/app/install_website_skeleton/file/html/static/favicon.ico rename to src/CSET/cset_workflow/app/finish_website/file/html/static/favicon.ico diff --git a/src/CSET/cset_workflow/app/finish_website/file/html/static/script.js b/src/CSET/cset_workflow/app/finish_website/file/html/static/script.js new file mode 100644 index 000000000..0b52a5030 --- /dev/null +++ b/src/CSET/cset_workflow/app/finish_website/file/html/static/script.js @@ -0,0 +1,717 @@ +// JavaScript code that is used by the pages. Plots should not rely on this +// file, as it will not be stable. + +/** Search query lexer and parser. + * + * EBNF to implement: + * + * query = expression ; + * + * expression = condition + * | expression , combiner ? , expression + * | "NOT" , expression + * | "(" , expression , ")" ; + * + * combiner = "AND" + * | "OR" ; + * + * condition = facet ? , operator ? , value ; + * + * facet = LITERAL , ":" ; + * + * value = LITERAL ; + * + * operator = NOT + * | GREATER_THAN + * | GREATER_THAN_OR_EQUALS + * | LESS_THAN + * | LESS_THAN_OR_EQUALS + * | NOT_EQUALS + * | EQUALS ; + */ + +class Literal { + constructor(value) { + this.value = value; + } +} + +class Facet { + constructor(value) { + this.value = value; + } +} + +const TOKEN_SPEC = new Map([ + ["Parenthesis_BEGIN", "\\("], + ["Parenthesis_END", "\\)"], + ["Operator_GREATER_THAN_OR_EQUALS", "<="], + ["Operator_GREATER_THAN", "<"], + ["Operator_LESS_THAN_OR_EQUALS", ">="], + ["Operator_LESS_THAN", ">"], + ["Operator_NOT_EQUALS", "!="], + ["Operator_EQUALS", "="], + ["Operator_NOT_IN", "!"], + ["Combiner_NOT", "\\bnot\\b"], + ["Combiner_AND", "\\band\\b"], + ["Combiner_OR", "\\bor\\b"], + ["LexOnly_WHITESPACE", "[ \\t]+"], + ["LexOnly_FACET", "[a-z_\\-]+[ \\t]*:"], + ["LexOnly_LITERAL", `'[^']*'|"[^"]*"|[^ \\t\\(\\)]+`], +]); + +const TOKEN_REGEX = RegExp( + Array.from( + TOKEN_SPEC.entries().map((pair) => { + return `(?<${pair[0]}>${pair[1]})`; + }) + ).join("|"), + "ig" +); + +// Lex input string into tokens. +function lexer(query) { + const tokens = []; + for (const match of query.matchAll(TOKEN_REGEX)) { + // Get the Enum object from TOKEN_SPEC matching the capture group name. + if (!match.groups) { + throw new SyntaxError("Query did not consist of valid tokens."); + } + let [kind, value] = Object.entries(match.groups).filter( + (pair) => pair[1] !== undefined + )[0]; + + switch (kind) { + case "LexOnly_WHITESPACE": + continue; + case "LexOnly_FACET": + const facet_name = value.replace(/[ \t]*:$/, ""); + tokens.push(new Facet(facet_name)); + break; + case "LexOnly_LITERAL": + if (/^".*"$|^'.+'$/.test(value)) { + value = value.slice(1, -1); + } + tokens.push(new Literal(value)); + break; + default: + tokens.push(kind); + break; + } + } + return tokens; +} + +class Condition { + constructor(value, facet = new Facet("title"), operator = "Operator_IN") { + if (typeof value == "function") { + this.func = value; + return; + } + + const v = value.value; + const f = facet.value; + + let condition_function = null; + + if (operator === "Operator_IN") { + function cond(d) { + return d[f].includes(v); + } + condition_function = cond; + } else if (operator === "Operator_NOT_IN") { + function cond(d) { + return !d[f].includes(v); + } + condition_function = cond; + } else if (operator === "Operator_EQUALS") { + function cond(d) { + return v == d[f]; + } + condition_function = cond; + } else if (operator === "Operator_NOT_EQUALS") { + function cond(d) { + return v != d[f]; + } + condition_function = cond; + } else if (operator === "Operator_GREATER_THAN") { + function cond(d) { + return v > d[f]; + } + condition_function = cond; + } else if (operator === "Operator_GREATER_THAN_OR_EQUALS") { + function cond(d) { + return v >= d[f]; + } + condition_function = cond; + } else if (operator === "Operator_LESS_THAN") { + function cond(d) { + return v < d[f]; + } + condition_function = cond; + } else if (operator === "Operator_LESS_THAN_OR_EQUALS") { + function cond(d) { + return v <= d[f]; + } + condition_function = cond; + } else { + throw new Error(`Invalid operator: ${operator}`); + } + this.func = condition_function; + } + + test(d) { + return this.func(d); + } + + // Implement self & other. + and(other) { + return new Condition((d) => this.test(d) && other.test(d)); + } + + // Implement self | other. + or(other) { + return new Condition((d) => this.test(d) || other.test(d)); + } + + // Implement ~self. + invert() { + return new Condition((d) => !this.test(d)); + } +} + +// Parse a grouped expression from a stream of tokens. +function parse_grouped_expression(tokens) { + if (tokens.length < 2 || tokens[0] !== "Parenthesis_BEGIN") { + return [0, null]; + } + let offset = 1; + let depth = 1; + while (depth > 0 && offset < tokens.length) { + switch (tokens[offset]) { + case "Parenthesis_BEGIN": + depth += 1; + break; + case "Parenthesis_END": + depth -= 1; + break; + } + offset += 1; + } + if (depth != 0) { + throw new Error("Unmatched parenthesis."); + } + // Recursively parse the grouped expression. + inner_expression = parse_expression(tokens.slice(1, offset - 1)); + return [offset, inner_expression]; +} + +// Parse a condition from a stream of tokens. +function parse_condition(tokens) { + if (tokens[0] instanceof Literal) { + // Just a value to search for. + const lt = tokens[0]; + return [1, new Condition(lt)]; + } else if ( + typeof tokens[0] === "string" && + tokens[0].startsWith("Operator.") && + tokens[1] instanceof Literal + ) { + // Value to search for with operator. + const op = tokens[0]; + const lt = tokens[1]; + return [2, new Condition(lt, op)]; + } else if (tokens[0] instanceof Facet && tokens[1] instanceof Literal) { + // Value to search for in facet. + const fc = tokens[0]; + const lt = tokens[1]; + return [2, new Condition(lt, (facet = fc))]; + } else if ( + tokens[0] instanceof Facet && + tokens[1].startsWith("Operator.") && + tokens[1] instanceof Literal + ) { + // Value to search for in facet with operator. + return [3, new Condition(lt, (facet = fc), (operator = op))]; + } else { + // Not matched as a condition. + return [0, null]; + } +} + +// Collapse all NOTs in a list of conditions. +function evaluate_not(conditions) { + const negated_conditions = []; + let index = 0; + while (index < conditions.length) { + if ( + conditions[index] == "Combiner_NOT" && + conditions[index + 1] == "Combiner_NOT" + ) { + // Skip double NOTs, as they negate each other. + index += 2; + } else if ( + conditions[index] == "Combiner_NOT" && + conditions[index + 1] instanceof Condition + ) { + const right = conditions[index + 1]; + negated_conditions.push(right.invert()); + index += 2; + } else if (conditions[index] != "Combiner_NOT") { + negated_conditions.push(conditions[index]); + index += 1; + } else { + throw new Error("Unprocessable NOT."); + } + } + return negated_conditions; +} + +// Collapse all explicit and implicit ANDs in a list of conditions. +function evaluate_and(conditions) { + const anded_conditions = []; + let index = 0; + while (index < conditions.length) { + let left; + if (anded_conditions.length) { + left = anded_conditions.pop(); + } else { + left = null; + } + + if ( + left instanceof Condition && + conditions[index] == "Combiner_AND" && + conditions[index + 1] instanceof Condition + ) { + const right = conditions[index + 1]; + anded_conditions.push(left.and(right)); + index += 2; + } else if (left instanceof Condition && conditions[index] instanceof Condition) { + const right = conditions[index]; + anded_conditions.push(left.and(right)); + index += 2; + } else if (conditions[index] != "Combiner_AND") { + if (left !== null) { + anded_conditions.push(left); + } + const right = conditions[index]; + anded_conditions.push(right); + index += 1; + } else { + throw new Error("Unprocessable AND."); + } + } + return anded_conditions; +} + +// Collapse all ORs in a list of conditions. +function evaluate_or(conditions) { + const ored_conditions = []; + let index = 0; + while (index < conditions.length) { + if ( + conditions[index] instanceof Condition && + conditions[index + 1] === "Combiner_OR" && + conditions[index + 2] + ) { + const left = conditions[index]; + const right = conditions[index + 2]; + ored_conditions.push(left.or(right)); + index += 3; + } else if (conditions[index] !== "Combiner_OR") { + ored_conditions.push(conditions[index]); + index += 1; + } else { + throw new Error("Unprocessable OR."); + } + } + return ored_conditions; +} + +// Parse an expression into a single Condition function. +function parse_expression(tokens) { + console.log(tokens); + let conditions = []; + let index = 0; + while (index < tokens.length) { + console.log(conditions); + console.log(index); + // Accounts for AND/OR/NOT. + if (typeof tokens[index] === "string" && tokens[index].startsWith("Combiner_")) { + conditions.push(tokens[index]); + index += 1; + continue; + } + + // Accounts for parentheses. + let [offset, condition] = parse_grouped_expression(tokens.slice(index)); + if (offset > 0 && condition !== null) { + conditions.push(condition); + index += offset; + continue; + } + + // Accounts for Facets, Operators, and Literals. + [offset, condition] = parse_condition(tokens.slice(index)); + if (offset > 0 && condition !== null) { + conditions.push(condition); + index += offset; + continue; + } + + console.error(tokens[index]); + throw new Error(`Unexpected token in expression: ${tokens[index]}`); + } + + // TODO: Investigate Pratt parsing for handling combiner precedence in a + // single pass. It should allow parsing them in the while loop above. + + // Evaluate NOTs first, left to right. + conditions = evaluate_not(conditions); + + // Evaluate ANDs second, left to right. + conditions = evaluate_and(conditions); + + // Evaluate ORs third, left to right. + conditions = evaluate_or(conditions); + + // Verify we have collapsed down to a single condition at this point. + if (conditions.length !== 1 || !conditions[0] instanceof Condition) { + throw new Error("Collapse should produce a single condition."); + } + + return conditions[0]; +} + +// Parse the query, returning a comparison function. +function query2condition(query) { + try { + const tokens = lexer(query); + if (tokens.length === 0) { + return new Condition((_) => true); + } + return parse_expression(tokens); + } catch (error) { + console.error("Query failed to parse."); + console.error(error); + // TODO: Add invalid class to input, so user gets feedback. + } +} + +/** + * End of query parser. + */ + +// Toggle display of the extended description for plots. Global variable so it +// can be referenced at plot insertion time. +let description_shown = true; + +function enforce_description_toggle() { + const description_toggle_button = document.getElementById("description-toggle"); + if (description_shown) { + description_toggle_button.textContent = "⇲ Hide description"; + } else { + description_toggle_button.textContent = "⇱ Show description"; + } + label: for (plot_frame of document.querySelectorAll("iframe")) { + const description_container = plot_frame.contentDocument.getElementById( + "description-container" + ); + // Skip doing anything if plot not loaded. + if (!description_container) { + continue label; + } + // Hide the description if it is exists and is shown, and show if hidden. + // Explicitly add and remove rather than toggle class to prevent the plots + // getting out of sync. + if (description_shown) { + description_container.classList.remove("hidden"); + } else { + description_container.classList.add("hidden"); + } + } +} + +// Hook up button. +function setup_description_toggle_button() { + const description_toggle_button = document.getElementById("description-toggle"); + // Skip if there is no description toggle on page. + if (!description_toggle_button) { + return; + } + description_toggle_button.addEventListener("click", () => { + description_shown = !description_shown; + enforce_description_toggle(); + }); + // Ensure the description toggle persists across changing the frame content. + for (const plot_frame of document.querySelectorAll("iframe")) { + plot_frame.addEventListener("load", () => { + enforce_description_toggle(); + }); + } +} + +// Display a single plot frame on the page. +function ensure_single_frame() { + const single_frame = document.getElementById("single-frame"); + const dual_frame = document.getElementById("dual-frame"); + dual_frame.classList.add("hidden"); + single_frame.classList.remove("hidden"); +} + +// Display two side-by-side plot frames on the page. +function ensure_dual_frame() { + const single_frame = document.getElementById("single-frame"); + const dual_frame = document.getElementById("dual-frame"); + single_frame.classList.add("hidden"); + dual_frame.classList.remove("hidden"); +} + +function add_to_sidebar(record, facet_values) { + const diagnostics_list = document.getElementById("diagnostics"); + + // Add entry's display name. + const entry_title = document.createElement("h2"); + entry_title.textContent = record["title"]; + + // Create card for diagnostic. + const facets = document.createElement("dl"); + for (const facet in record) { + if (facet != "title" && facet != "path") { + const facet_node = document.createElement("div"); + const facet_name = document.createElement("dt"); + const facet_value = document.createElement("dd"); + facet_name.textContent = facet; + facet_value.textContent = record[facet]; + facet_node.append(facet_name, facet_value); + facets.append(facet_node); + // Record facet values. + if (!(facet in facet_values)) { + facet_values[facet] = new Set(); + } + const values = facet_values[facet]; + values.add(record[facet]); + } + } + + // Container element for plot position chooser buttons. + const position_chooser = document.createElement("div"); + position_chooser.classList.add("plot-position-chooser"); + + // Bind path to name in this scope to ensure it sticks around for callbacks. + const path = record["path"]; + // Button icons. + const icons = { left: "◧", full: "▣", right: "◨", popup: "↗" }; + + // Add buttons for each position. + for (const position of ["left", "full", "right", "popup"]) { + // Create button. + const button = document.createElement("button"); + button.classList.add(position); + button.textContent = icons[position]; + + // Add a callback updating the iframe when the link is clicked. + button.addEventListener("click", (event) => { + event.preventDefault(); + // Open new window for popup. + if (position == "popup") { + window.open(`plots/${path}`, "_blank", "popup,width=800,height=600"); + return; + } + // Set the appropriate frame layout. + position == "full" ? ensure_single_frame() : ensure_dual_frame(); + document.getElementById(`plot-frame-${position}`).src = `plots/${path}`; + }); + + // Add button to chooser. + position_chooser.append(button); + } + + // Create entry. + const entry = document.createElement("li"); + + // Add name, facets, and position chooser to entry. + entry.append(entry_title, facets, position_chooser); + + // Join entry to the DOM. + diagnostics_list.append(entry); +} + +function add_facet_dropdowns(facet_values) { + const fieldset = document.getElementById("filter-facets"); + + for (const facet in facet_values) { + const label = document.createElement("label"); + label.setAttribute("for", `facet-${facet}`); + label.textContent = facet; + const select = document.createElement("select"); + select.id = `facet-${facet}`; + select.name = facet; + const null_option = document.createElement("option"); + null_option.value = ""; + null_option.defaultSelected = true; + null_option.textContent = "--- Any ---"; + select.append(null_option); + // Sort facet values. + const values = Array.from(facet_values[facet]); + values.sort(); + for (const value of values) { + const option = document.createElement("option"); + option.textContent = value; + select.append(option); + } + select.addEventListener("change", updateFacetQuery); + + // Add to DOM. + fieldset.append(label, select); + } +} + +// Update query based on facet dropdown value. +function updateFacetQuery(e) { + const facet = e.target.name; + const value = e.target.value; + const queryElem = document.getElementById("filter-query"); + const query = queryElem.value; + let new_query; + // Construct regular expression matching facet condition. + const pattern = RegExp(`${facet}:\\s*('[^']*'|"[^"]*"|[^ \\t\\(\\)]+)`, "i"); + if (value == "") { + // Facet unselected, remove from query. + new_query = query.replace(pattern, ""); + } else if (pattern.test(query)) { + // Facet value selected, update the query. + new_query = query.replace(pattern, `${facet}:"${value}"`); + } else { + // Facet value selected, add the query. + new_query = query + ` ${facet}:"${value}"`; + } + queryElem.value = new_query.trim(); + doSearch(); +} + +// Plot selection sidebar. +function setup_plots_sidebar() { + // Skip if there is no sidebar on page. + if (!document.getElementById("plot-selector")) { + return; + } + // Loading of plot index file, and adding them to the sidebar. + fetch("plots/index.jsonl") + .then((response) => { + // Display a message and stop if the fetch fails. + if (!response.ok) { + const message = `There was a problem fetching the index. Status Code: ${response.status}`; + console.warn(message); + window.alert(message); + return; + } + response.text().then((data) => { + const facet_values = {}; + // Remove throbber now download has finished. + document.querySelector("#diagnostics > loading-throbber").remove(); + for (let line of data.split("\n")) { + line = line.trim(); + // Skip blank lines. + if (line.length) { + add_to_sidebar(JSON.parse(line), facet_values); + } + } + add_facet_dropdowns(facet_values); + // Do search if we already have a query specified in the URL. + const search = document.getElementById("filter-query"); + const params = new URLSearchParams(document.location.search); + const initial_query = params.get("q"); + if (initial_query) { + search.value = initial_query; + doSearch(); + } + }); + }) + .catch((err) => { + // Catch non-HTTP fetch errors. + console.error("Plot index could not be retrieved: ", err); + }); +} + +function setup_clear_view_button() { + // Reset frames to placeholder view. + function clear_frames() { + for (plot_frame of document.querySelectorAll("iframe")) { + plot_frame.src = "placeholder.html"; + } + ensure_single_frame(); + } + + const clear_view_button = document.getElementById("clear-plots"); + clear_view_button.addEventListener("click", clear_frames); +} + +function setup_clear_search_button() { + const clear_search_button = document.getElementById("clear-query"); + clear_search_button.addEventListener("click", () => { + document.getElementById("filter-query").value = ""; + doSearch(); + }); +} + +// Filter the displayed diagnostics by the query. +function doSearch() { + const query = document.getElementById("filter-query").value; + // Update URL in address bar to match current query, deleting if blank. + const url = new URL(document.location.href); + query ? url.searchParams.set("q", query) : url.searchParams.delete("q"); + // Updates the URL without reloading the page. + history.pushState(history.state, "", url.href); + + console.log("Search query:", query); + const condition = query2condition(query); + + // Filter all entries. + for (const entryElem of document.querySelectorAll("#diagnostics > li")) { + const entry = {}; + entry["title"] = entryElem.querySelector("h2").textContent; + for (const facet_node of entryElem.querySelector("dl").children) { + const facet = facet_node.firstChild.textContent; + const value = facet_node.lastChild.textContent; + entry[facet] = value; + } + + // Show entries matching filter and hide entries that don't. + if (condition.test(entry)) { + entryElem.classList.remove("hidden"); + } else { + entryElem.classList.add("hidden"); + } + } +} + +// For performance don't search on every keystroke immediately. Instead wait +// until quarter of a second of no typing has elapsed. To maximised perceived +// responsiveness immediately perform the search if a space is typed, as that +// indicates a completed search term. +let searchTimeoutID = undefined; +function debounce(e) { + clearTimeout(searchTimeoutID); + if (e.data == " ") { + doSearch(); + } else { + searchTimeoutID = setTimeout(doSearch, 250); + } +} + +// Diagnostic filtering searchbar. +function setup_search() { + const search = document.getElementById("filter-query"); + search.addEventListener("input", debounce); + // Immediately search if input is unfocused. + search.addEventListener("change", doSearch); +} + +// Run everything. +setup_description_toggle_button(); +setup_clear_view_button(); +setup_clear_search_button(); +setup_plots_sidebar(); +setup_search(); diff --git a/src/CSET/cset_workflow/app/finish_website/file/html/static/style.css b/src/CSET/cset_workflow/app/finish_website/file/html/static/style.css new file mode 100644 index 000000000..4dad69cf7 --- /dev/null +++ b/src/CSET/cset_workflow/app/finish_website/file/html/static/style.css @@ -0,0 +1,224 @@ +/* Inherit fonts for inputs and buttons */ +input, +button, +textarea, +address, +select { + font: inherit; +} + +body { + font-family: sans-serif; + line-height: 1.5; + height: 100vh; + margin: 0; +} + +a:focus, +a:hover { + text-decoration-skip-ink: none; + text-decoration-thickness: max(3px, 0.12em); +} + +nav { + display: inline; + float: left; + width: 35em; + height: 100%; + overflow-y: scroll; + background-color: whitesmoke; + border-right: 1px solid black; + + >header { + margin: 8px; + + >h1 { + font-size: xx-large; + margin: 0; + } + + >button { + margin: 4px 0; + } + + >search { + #filter-query { + width: 100%; + padding: 0.5em 1em; + margin: 8px 0; + border-radius: 3em; + } + + fieldset select { + display: block; + margin-bottom: 4px; + } + } + } + + >ul { + list-style: none; + padding: 0; + margin-top: 8px; + + >li { + background-color: lightgrey; + margin: 8px 0; + padding: 4px; + border-top: 1px solid black; + border-bottom: 1px solid black; + overflow-wrap: break-word; + + >h2 { + margin: 0; + font-size: medium; + } + + >dl { + font-size: small; + margin: 4px 0; + + dt { + display: inline; + } + + dt:after { + content: ": "; + } + + dd { + display: inline; + margin-left: 0; + } + } + + .plot-position-chooser { + display: flex; + margin: 4px auto; + font-size: medium; + + button { + margin: 0 4px; + cursor: pointer; + width: 33%; + + &.left { + background-color: #b8dcfd; + border-top-left-radius: 3em; + border-bottom-left-radius: 3em; + } + + &.full { + background-color: #cae387; + } + + &.right { + background-color: #fdcabb; + border-top-right-radius: 3em; + border-bottom-right-radius: 3em; + } + + &.popup { + background-color: #e2dd92; + border-radius: 3em; + } + + &:focus, + &:hover { + background-color: #1a1a1a; + color: white; + } + } + } + } + } +} + +.vsplit { + display: grid; + grid-template-columns: 1fr 1fr; + justify-content: stretch; + align-items: stretch; + height: 100%; +} + +.hsplit { + display: grid; + grid-template-rows: 1fr 1fr; + flex-direction: column; + justify-content: stretch; + align-items: stretch; +} + +.websplit-container { + display: flex; + flex-direction: column; + outline: solid 1px black; +} + +main { + height: 100%; +} + +main article { + display: grid; + height: 100%; +} + +main article>iframe { + border: none; + height: 100%; + width: 100%; +} + +.hidden { + display: none; +} + +/* Loading throbber from https://cssloaders.github.io/ + +MIT License + +Copyright (c) 2020 Vineeth.TR + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +*/ +loading-throbber { + display: block; + margin: 48px auto; + width: 48px; + height: 48px; + border: 5px solid black; + border-bottom-color: transparent; + border-radius: 50%; + box-sizing: border-box; + animation: rotation 1s linear infinite; +} + +@keyframes rotation { + 0% { + transform: rotate(0deg); + } + + 100% { + transform: rotate(360deg); + } +} + +/* End loading throbber. */ diff --git a/src/CSET/cset_workflow/app/install_website_skeleton/bin/install-website.sh b/src/CSET/cset_workflow/app/install_website_skeleton/bin/install-website.sh deleted file mode 100755 index bac5a3d13..000000000 --- a/src/CSET/cset_workflow/app/install_website_skeleton/bin/install-website.sh +++ /dev/null @@ -1,32 +0,0 @@ -#!/usr/bin/env bash - -# Copies the static files for the web interface into the correct location, and -# creates a symbolic link under the web server's document root. - -set -euo pipefail -IFS="$(printf '\n\t')" - -# Strip trailing slashes in case they have been added in the config. Otherwise -# they break the symlinks. -WEB_DIR="${WEB_DIR%/}" - -# Remove existing output ahead of creating new symlink. -echo "Removing any existing output link at $WEB_DIR" -rm -vfr -- "$WEB_DIR" - -echo "Installing website files to $WEB_DIR" -# If we end up needing a build step for the website, here is where to run it. - -# Create directory for web content. -mkdir -v "${CYLC_WORKFLOW_SHARE_DIR}/web" -# Copy static HTML/CSS/JS. -cp -rv html/* "${CYLC_WORKFLOW_SHARE_DIR}/web" -# Create directory for plots. -mkdir -p "${CYLC_WORKFLOW_SHARE_DIR}/web/plots" - -# Ensure parent directories of WEB_DIR exist. -mkdir -p "$(dirname "$WEB_DIR")" - -# Create symbolic link to web directory. -# NOTE: While good for space, it means `cylc clean` removes output. -ln -s "${CYLC_WORKFLOW_SHARE_DIR}/web" "$WEB_DIR" diff --git a/src/CSET/cset_workflow/app/install_website_skeleton/file/html/static/script.js b/src/CSET/cset_workflow/app/install_website_skeleton/file/html/static/script.js deleted file mode 100644 index 3301413ed..000000000 --- a/src/CSET/cset_workflow/app/install_website_skeleton/file/html/static/script.js +++ /dev/null @@ -1,182 +0,0 @@ -// JavaScript code that is used by the pages. Plots should not rely on this -// file, as it will not be stable. - -// Toggle display of the extended description for plots. Global variable so it -// can be referenced at plot insertion time. -let description_shown = true; - -function enforce_description_toggle() { - const description_toggle_button = document.getElementById("description-toggle"); - if (description_shown) { - description_toggle_button.textContent = "⇲ Hide description"; - } else { - description_toggle_button.textContent = "⇱ Show description"; - } - label: for (plot_frame of document.querySelectorAll("iframe")) { - const description_container = plot_frame.contentDocument.getElementById( - "description-container" - ); - // Skip doing anything if plot not loaded. - if (!description_container) { - continue label; - } - // Hide the description if it is exists and is shown, and show if hidden. - // Explicitly add and remove rather than toggle class to prevent the plots - // getting out of sync. - if (description_shown) { - description_container.classList.remove("hidden"); - } else { - description_container.classList.add("hidden"); - } - } -} - -// Hook up button. -function setup_description_toggle_button() { - const description_toggle_button = document.getElementById("description-toggle"); - // Skip if there is no description toggle on page. - if (!description_toggle_button) { - return; - } - description_toggle_button.addEventListener("click", () => { - description_shown = !description_shown; - enforce_description_toggle(); - }); - // Ensure the description toggle persists across changing the frame content. - for (const plot_frame of document.querySelectorAll("iframe")) { - plot_frame.addEventListener("load", () => { - enforce_description_toggle(); - }); - } -} - -// Display a single plot frame on the page. -function ensure_single_frame() { - const single_frame = document.getElementById("single-frame"); - const dual_frame = document.getElementById("dual-frame"); - dual_frame.classList.add("hidden"); - single_frame.classList.remove("hidden"); -} - -// Display two side-by-side plot frames on the page. -function ensure_dual_frame() { - const single_frame = document.getElementById("single-frame"); - const dual_frame = document.getElementById("dual-frame"); - single_frame.classList.add("hidden"); - dual_frame.classList.remove("hidden"); -} - -function construct_sidebar_from_data(data) { - const sidebar = document.getElementById("plot-selector"); - // Button icons. - const icons = { left: "◧", full: "▣", right: "◨" }; - - for (const category in data) { - // Details element for category. - const category_details = document.createElement("details"); - - // Title for category (summary element). - const category_summary = document.createElement("summary"); - category_summary.textContent = category; - category_details.append(category_summary); - - // Add each case date into category. - for (const case_date in data[category]) { - // Details element for case_date. - const case_details = document.createElement("details"); - - // Title for case_date. - const case_summary = document.createElement("summary"); - case_summary.textContent = case_date; - case_details.append(case_summary); - - // Menu of plots for this category and case_date. - const case_menu = document.createElement("menu"); - - // Add each plot. - for (const plot in data[category][case_date]) { - // Menu entry for plot. - const list_item = document.createElement("li"); - list_item.textContent = data[category][case_date][plot]; - - // Container element for plot position chooser buttons. - const position_chooser = document.createElement("div"); - position_chooser.classList.add("plot-position-chooser"); - - // Add buttons for each position. - for (const position of ["left", "full", "right"]) { - // Create button. - const button = document.createElement("button"); - button.classList.add(position); - button.textContent = icons[position]; - - // Add a callback updating the iframe when the link is clicked. - button.addEventListener("click", (event) => { - event.preventDefault(); - // Set the appropriate frame layout. - position == "full" ? ensure_single_frame() : ensure_dual_frame(); - document.getElementById(`plot-frame-${position}`).src = `plots/${plot}`; - }); - - // Add button to chooser. - position_chooser.append(button); - } - - // Add position chooser to entry. - list_item.append(position_chooser); - - // Add entry to the menu. - case_menu.append(list_item); - } - - // Finish constructing this case and add to its category. - case_details.append(case_menu); - category_details.append(case_details); - } - - // Join category to the DOM. - sidebar.append(category_details); - } -} - -// Plot selection sidebar -function setup_plots_sidebar() { - // Skip if there is no sidebar on page. - if (!document.getElementById("plot-selector")) { - return; - } - // Loading of plot index file, and adding them to the sidebar. - fetch("plots/index.json") - .then((response) => { - // Display a message and stop if the fetch fails. - if (!response.ok) { - const message = `There was a problem fetching the index. Status Code: ${response.status}`; - console.warn(message); - window.alert(message); - return; - } - response.json().then(construct_sidebar_from_data); - }) - .catch((err) => { - // Catch non-HTTP fetch errors. - console.error("Plot index could not be retrieved: ", err); - }); -} - -function setup_clear_view_button() { - // Reset frames to placeholder view. - function clear_frames() { - for (plot_frame of document.querySelectorAll("iframe")) { - plot_frame.src = "placeholder.html"; - } - ensure_single_frame(); - } - - const clear_view_button = document.getElementById("clear-plots"); - clear_view_button.addEventListener("click", clear_frames); -} - -// Run everything. -setup_description_toggle_button(); -setup_clear_view_button(); -setup_plots_sidebar(); diff --git a/src/CSET/cset_workflow/app/install_website_skeleton/file/html/static/style.css b/src/CSET/cset_workflow/app/install_website_skeleton/file/html/static/style.css deleted file mode 100644 index f8e929844..000000000 --- a/src/CSET/cset_workflow/app/install_website_skeleton/file/html/static/style.css +++ /dev/null @@ -1,141 +0,0 @@ -/* Inherit fonts for inputs and buttons */ -input, -button, -textarea, -address, -select { - font: inherit; -} - -body { - font-family: sans-serif; - line-height: 1.5; - height: 100vh; - margin: 0; -} - -a:focus, -a:hover { - text-decoration-skip-ink: none; - text-decoration-thickness: max(3px, 0.12em); -} - -nav > header { - margin: 8px; -} - -nav > header > h1 { - font-size: xx-large; - margin: 0; -} - -nav > header > button { - margin: 4px 0; -} - -nav { - display: inline; - float: left; - width: 15em; - height: 100%; - overflow-x: hidden; - overflow-y: scroll; - background-color: whitesmoke; - border-right: 1px solid black; -} - -nav summary { - font-weight: bold; -} - -nav menu { - list-style: none; - padding: 0; - margin: 0; -} - -nav details { - margin: 8px; -} - -nav menu > li { - background-color: lightgrey; - margin: 8px 0; - padding: 0 4px; - border: 1px solid black; - font-size: small; - overflow-wrap: break-word; -} - -.plot-position-chooser { - display: flex; - margin-bottom: 0.25em; - font-size: medium; -} - -.plot-position-chooser button { - margin: 0 4px; - cursor: pointer; - width: 33%; -} - -.plot-position-chooser button.left { - background-color: #b8dcfd; - border-top-left-radius: 25% 50%; - border-bottom-left-radius: 25% 50%; -} -.plot-position-chooser button.full { - background-color: #c5e836; -} -.plot-position-chooser button.right { - background-color: #fdcabb; - border-top-right-radius: 25% 50%; - border-bottom-right-radius: 25% 50%; -} - -.plot-position-chooser button:focus, -.plot-position-chooser button:hover { - background-color: #1a1a1a; - color: white; -} - -.vsplit { - display: grid; - grid-template-columns: 1fr 1fr; - justify-content: stretch; - align-items: stretch; - height: 100%; -} - -.hsplit { - display: grid; - grid-template-rows: 1fr 1fr; - flex-direction: column; - justify-content: stretch; - align-items: stretch; -} - -.websplit-container { - display: flex; - flex-direction: column; - outline: solid 1px black; -} - -main { - height: 100%; -} - -main article { - display: grid; - height: 100%; -} - -main article > iframe { - border: none; - height: 100%; - width: 100%; -} - -.hidden { - display: none; -} diff --git a/src/CSET/cset_workflow/app/install_website_skeleton/file/html/status.html b/src/CSET/cset_workflow/app/install_website_skeleton/file/html/status.html deleted file mode 100644 index 0a1f012f4..000000000 --- a/src/CSET/cset_workflow/app/install_website_skeleton/file/html/status.html +++ /dev/null @@ -1 +0,0 @@ -

Running

diff --git a/src/CSET/cset_workflow/app/install_website_skeleton/rose-app.conf b/src/CSET/cset_workflow/app/install_website_skeleton/rose-app.conf deleted file mode 100644 index 56303e0af..000000000 --- a/src/CSET/cset_workflow/app/install_website_skeleton/rose-app.conf +++ /dev/null @@ -1,2 +0,0 @@ -[command] -default=install-website.sh diff --git a/src/CSET/cset_workflow/flow.cylc b/src/CSET/cset_workflow/flow.cylc index 435d2b848..bc2febf58 100644 --- a/src/CSET/cset_workflow/flow.cylc +++ b/src/CSET/cset_workflow/flow.cylc @@ -30,7 +30,6 @@ final cycle point = {{CSET_TRIAL_END_DATE}} [[graph]] # Only runs on the first cycle. R1/^ = """ - validate_environment => install_website_skeleton => setup_complete validate_environment => assign_model_colours => setup_complete """ @@ -142,11 +141,6 @@ final cycle point = {{CSET_TRIAL_END_DATE}} [[validate_environment]] # Checks the environment works and the cset command is available. - [[install_website_skeleton]] - # Copies the static files that make up the web interface. - [[[environment]]] - WEB_DIR = {{WEB_DIR}} - [[assign_model_colours]] # Copies the style file, inserting per-model colour definitions to ensure # consistency between the many runs. @@ -213,7 +207,9 @@ final cycle point = {{CSET_TRIAL_END_DATE}} HOUSEKEEPING_MODE = {{HOUSEKEEPING_MODE}} [[finish_website]] - # Updates the workflow info in the web interface. + # Create the diagnostic viewing website. + [[[environment]]] + WEB_DIR = {{WEB_DIR}} [[send_email]] # Send email to notify that the workflow is complete. diff --git a/tests/workflow_utils/test_finish_website.py b/tests/workflow_utils/test_finish_website.py index b4148447e..071bf2cab 100644 --- a/tests/workflow_utils/test_finish_website.py +++ b/tests/workflow_utils/test_finish_website.py @@ -17,10 +17,25 @@ import json import logging import re +from pathlib import Path from CSET.cset_workflow.app.finish_website.bin import finish_website +def test_install_website_skeleton(monkeypatch, tmp_path): + """Check static files are copied correctly.""" + www_content = tmp_path / "web" + www_root_link = tmp_path / "www/CSET" + monkeypatch.chdir("src/CSET/cset_workflow/app/finish_website/file") + finish_website.install_website_skeleton(www_root_link, www_content) + assert www_content.is_dir() + assert (www_content / "index.html").is_file() + assert (www_content / "static/script.js").is_file() + assert (www_content / "plots").is_dir() + assert www_root_link.is_symlink() + assert www_root_link.resolve() == www_content.resolve() + + def test_copy_rose_config(monkeypatch, tmp_path): """Copy rose-suite.conf to web dir.""" rose_suite_conf = tmp_path / "rose-suite.conf" @@ -29,23 +44,23 @@ def test_copy_rose_config(monkeypatch, tmp_path): web_dir = tmp_path / "web" web_dir.mkdir() monkeypatch.setenv("CYLC_WORKFLOW_RUN_DIR", str(tmp_path)) - monkeypatch.setenv("CYLC_WORKFLOW_SHARE_DIR", str(tmp_path)) - finish_website.copy_rose_config() + finish_website.copy_rose_config(web_dir) with open(web_dir / "rose-suite.conf", "rt", encoding="UTF-8") as fp: assert fp.read() == "Test rose-suite.conf file\n" -def test_write_workflow_status(monkeypatch, tmp_path): - """Workflow finish status gets written to status file.""" +def test_write_workflow_status(tmp_path): + """Workflow finish status gets written to placeholder file.""" web_dir = tmp_path / "web" web_dir.mkdir() - monkeypatch.setenv("CYLC_WORKFLOW_SHARE_DIR", str(tmp_path)) - finish_website.update_workflow_status() - with open(web_dir / "status.html", "rt", encoding="UTF-8") as fp: - content = fp.read() + with open(web_dir / "placeholder.html", "wt") as fp: + fp.write('

Workflow status

\n

Unknown

\n') + finish_website.update_workflow_status(web_dir) # Check status is written correctly. - pattern = r"

Completed at \d{4}-\d\d-\d\d \d\d:\d\d using CSET v.+

\n" - assert re.fullmatch(pattern, content) + pattern = r"Completed at \d{4}-\d\d-\d\d \d\d:\d\d using CSET v\d+" + with open(web_dir / "placeholder.html", "rt", encoding="UTF-8") as fp: + content = fp.read() + assert re.search(pattern, content) def test_construct_index(monkeypatch, tmp_path): @@ -68,20 +83,22 @@ def test_construct_index(monkeypatch, tmp_path): static_resource.touch() # Construct index. - finish_website.construct_index() + finish_website.construct_index(plots_dir.parent) # Check index. - index_file = plots_dir / "index.json" + index_file = plots_dir / "index.jsonl" assert index_file.is_file() with open(index_file, "rt", encoding="UTF-8") as fp: - index = json.load(fp) - expected = {"Category": {"20250101": {"p1": "P1", "p2": "P2"}}} + index = fp.read() + expected = ( + '{"case_date":"20250101","category":"Category","path":"p1","title":"P1"}\n' + '{"case_date":"20250101","category":"Category","path":"p2","title":"P2"}\n' + ) assert index == expected -def test_construct_index_aggregation_case(monkeypatch, tmp_path): +def test_construct_index_aggregation_case(tmp_path): """Construct the index from a diagnostics without a case date.""" - monkeypatch.setenv("CYLC_WORKFLOW_SHARE_DIR", str(tmp_path)) plots_dir = tmp_path / "web/plots" plots_dir.mkdir(parents=True) @@ -91,20 +108,43 @@ def test_construct_index_aggregation_case(monkeypatch, tmp_path): plot1.write_text('{"category": "Category", "title": "P1"}') # Construct index. - finish_website.construct_index() + finish_website.construct_index(plots_dir.parent) # Check index. - index_file = plots_dir / "index.json" + index_file = plots_dir / "index.jsonl" assert index_file.is_file() with open(index_file, "rt", encoding="UTF-8") as fp: index = json.load(fp) - expected = {"Category": {"Aggregation": {"p1": "P1"}}} + expected = {"category": "Category", "path": "p1", "title": "P1"} assert index == expected -def test_construct_index_invalid(monkeypatch, tmp_path, caplog): +def test_construct_index_remove_keys(tmp_path): + """Unneeded keys are removed from the index.""" + plots_dir = tmp_path / "web/plots" + plots_dir.mkdir(parents=True) + + # Plot directories. + plot1 = plots_dir / "p1/meta.json" + plot1.parent.mkdir() + plot1.write_text( + '{"category": "Category", "title": "P1", "case_date": "20250101", "plots": ["a.png"], "description": "Foo"}' + ) + + # Construct index. + finish_website.construct_index(plots_dir.parent) + + # Check index. + index_file = plots_dir / "index.jsonl" + assert index_file.is_file() + with open(index_file, "rt", encoding="UTF-8") as fp: + index = json.loads(fp.readline()) + assert "plots" not in index + assert "description" not in index + + +def test_construct_index_invalid(tmp_path, caplog): """Test constructing index when metadata is invalid.""" - monkeypatch.setenv("CYLC_WORKFLOW_SHARE_DIR", str(tmp_path)) plots_dir = tmp_path / "web/plots" plots_dir.mkdir(parents=True) @@ -114,35 +154,40 @@ def test_construct_index_invalid(monkeypatch, tmp_path, caplog): plot.write_text('"Not JSON!"') # Construct index. - finish_website.construct_index() + finish_website.construct_index(plots_dir.parent) # Check log message. _, level, message = caplog.record_tuples[0] assert level == logging.ERROR assert "p1/meta.json is invalid, skipping." in message - index_file = plots_dir / "index.json" + index_file = plots_dir / "index.jsonl" assert index_file.is_file() - with open(index_file, "rt", encoding="UTF-8") as fp: - index = json.load(fp) - expected = {} - assert index == expected + assert index_file.stat().st_size == 0 def test_entrypoint(monkeypatch): """Test running the finish_website module.""" # Count the number of times the other functions are run, to ensure they - # are both run. + # are all run. counter = 0 - def increment_counter(): + def check_args(www_root_link: Path, www_content: Path): + assert www_root_link == Path("/var/www/cset") + assert www_content == Path("/share/web") + increment_counter() + + def increment_counter(*args, **kwargs): nonlocal counter counter += 1 + monkeypatch.setattr(finish_website, "install_website_skeleton", check_args) monkeypatch.setattr(finish_website, "construct_index", increment_counter) monkeypatch.setattr(finish_website, "update_workflow_status", increment_counter) monkeypatch.setattr(finish_website, "copy_rose_config", increment_counter) + monkeypatch.setenv("WEB_DIR", "/var/www/cset") + monkeypatch.setenv("CYLC_WORKFLOW_SHARE_DIR", "/share") - # Just check that it runs all the needed subfunctions. + # Check that it runs all the needed subfunctions. finish_website.run() - assert counter == 3 + assert counter == 4