Coverage for src / CSET / cset_workflow / app / finish_website / bin / finish_website.py: 100%

68 statements  

« prev     ^ index     » next       coverage.py v7.13.1, created at 2026-01-12 16:15 +0000

1#!/usr/bin/env python3 

2# © Crown copyright, Met Office (2022-2025) and CSET contributors. 

3# 

4# Licensed under the Apache License, Version 2.0 (the "License"); 

5# you may not use this file except in compliance with the License. 

6# You may obtain a copy of the License at 

7# 

8# http://www.apache.org/licenses/LICENSE-2.0 

9# 

10# Unless required by applicable law or agreed to in writing, software 

11# distributed under the License is distributed on an "AS IS" BASIS, 

12# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 

13# See the License for the specific language governing permissions and 

14# limitations under the License. 

15 

16""" 

17Create the CSET diagnostic viewing website. 

18 

19Copies the static files that make up the web interface, constructs the plot 

20index, and updates the workflow status on the front page of the 

21web interface. 

22""" 

23 

24import json 

25import logging 

26import os 

27import shutil 

28import time 

29from importlib.metadata import version 

30from pathlib import Path 

31 

32from CSET._common import sort_dict 

33 

34logging.basicConfig( 

35 level=os.getenv("LOGLEVEL", "INFO"), format="%(asctime)s %(levelname)s %(message)s" 

36) 

37logger = logging.getLogger(__name__) 

38 

39 

40def install_website_skeleton(www_root_link: Path, www_content: Path): 

41 """Copy static website files and create symlink from web document root.""" 

42 # Remove existing link to output ahead of creating new symlink. 

43 logger.info("Removing any existing output link at %s.", www_root_link) 

44 www_root_link.unlink(missing_ok=True) 

45 

46 logger.info("Installing website files to %s.", www_content) 

47 # Create directory for web content. 

48 www_content.mkdir(parents=True, exist_ok=True) 

49 # Copy static HTML/CSS/JS. 

50 html_source = Path.cwd() / "html" 

51 shutil.copytree(html_source, www_content, dirs_exist_ok=True) 

52 # Create directory for plots. 

53 plot_dir = www_content / "plots" 

54 plot_dir.mkdir(exist_ok=True) 

55 

56 logger.info("Linking %s to web content.", www_root_link) 

57 # Ensure parent directories of WEB_DIR exist. 

58 www_root_link.parent.mkdir(parents=True, exist_ok=True) 

59 # Create symbolic link to web directory. 

60 # NOTE: While good for space, it means `cylc clean` removes output. 

61 www_root_link.symlink_to(www_content) 

62 

63 

64def construct_index(www_content: Path): 

65 """Construct the plot index.""" 

66 plots_dir = www_content / "plots" 

67 with open(plots_dir / "index.jsonl", "wt", encoding="UTF-8") as index_fp: 

68 # Loop over all diagnostics and append to index. The glob is sorted to 

69 # ensure a consistent ordering. 

70 for metadata_file in sorted(plots_dir.glob("**/*/meta.json")): 

71 try: 

72 with open(metadata_file, "rt", encoding="UTF-8") as plot_fp: 

73 plot_metadata = json.load(plot_fp) 

74 plot_metadata["path"] = str(metadata_file.parent.relative_to(plots_dir)) 

75 # Remove keys that are not useful for the index. 

76 plot_metadata.pop("description", None) 

77 plot_metadata.pop("plots", None) 

78 # Sort plot metadata. 

79 plot_metadata = sort_dict(plot_metadata) 

80 # Write metadata into website index. 

81 json.dump(plot_metadata, index_fp, separators=(",", ":")) 

82 index_fp.write("\n") 

83 except (json.JSONDecodeError, KeyError, TypeError) as err: 

84 logging.error("%s is invalid, skipping.\n%s", metadata_file, err) 

85 continue 

86 

87 

88def bust_cache(www_content: Path): 

89 """Add a unique query string to static requests to avoid stale caches. 

90 

91 We only need to do this for static resources referenced from the index page, 

92 as each plot already uses a unique filename based on the recipe. 

93 """ 

94 # Search and replace the string "CACHEBUSTER". 

95 CACHEBUSTER = str(int(time.time())) 

96 with open(www_content / "index.html", "r+t") as fp: 

97 content = fp.read() 

98 new_content = content.replace("CACHEBUSTER", CACHEBUSTER) 

99 fp.seek(0) 

100 fp.truncate() 

101 fp.write(new_content) 

102 

103 # Move plots directory so it has a unique name. 

104 os.rename(www_content / "plots", www_content / f"plots-{CACHEBUSTER}") 

105 

106 

107def update_workflow_status(www_content: Path): 

108 """Update the workflow status on the front page of the web interface.""" 

109 with open(www_content / "placeholder.html", "r+t") as fp: 

110 content = fp.read() 

111 finish_time = time.strftime("%Y-%m-%d %H:%M", time.localtime()) 

112 status = f"Completed at {finish_time} using CSET v{version('CSET')}" 

113 new_content = content.replace( 

114 '<p id="workflow-status">Unknown</p>', 

115 f'<p id="workflow-status">{status}</p>', 

116 ) 

117 fp.seek(0) 

118 fp.truncate() 

119 fp.write(new_content) 

120 

121 

122def copy_rose_config(www_content: Path): 

123 """Copy the rose-suite.conf file to add to output web directory.""" 

124 rose_suite_conf = Path(os.environ["CYLC_WORKFLOW_RUN_DIR"]) / "rose-suite.conf" 

125 web_conf_file = www_content / "rose-suite.conf" 

126 shutil.copyfile(rose_suite_conf, web_conf_file) 

127 

128 

129def run(): 

130 """Do the final steps to finish the website.""" 

131 # Strip trailing slashes in case they have been added in the config. 

132 # Otherwise they break the symlinks. 

133 www_root_link = Path(os.environ["WEB_DIR"].rstrip("/")) 

134 www_content = Path(os.environ["CYLC_WORKFLOW_SHARE_DIR"] + "/web") 

135 

136 install_website_skeleton(www_root_link, www_content) 

137 copy_rose_config(www_content) 

138 construct_index(www_content) 

139 bust_cache(www_content) 

140 update_workflow_status(www_content) 

141 

142 

143if __name__ == "__main__": # pragma: no cover 

144 run()