Coverage for src / CSET / cset_workflow / app / finish_website / bin / finish_website.py: 100%

70 statements  

« prev     ^ index     » next       coverage.py v7.13.4, created at 2026-02-18 09:58 +0000

1#!/usr/bin/env python3 

2# © Crown copyright, Met Office (2022-2025) and CSET contributors. 

3# 

4# Licensed under the Apache License, Version 2.0 (the "License"); 

5# you may not use this file except in compliance with the License. 

6# You may obtain a copy of the License at 

7# 

8# http://www.apache.org/licenses/LICENSE-2.0 

9# 

10# Unless required by applicable law or agreed to in writing, software 

11# distributed under the License is distributed on an "AS IS" BASIS, 

12# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 

13# See the License for the specific language governing permissions and 

14# limitations under the License. 

15 

16""" 

17Create the CSET diagnostic viewing website. 

18 

19Copies the static files that make up the web interface, constructs the plot 

20index, and updates the workflow status on the front page of the 

21web interface. 

22""" 

23 

24import json 

25import logging 

26import os 

27import shutil 

28import sys 

29import time 

30from importlib.metadata import version 

31from pathlib import Path 

32 

33from CSET._common import sort_dict 

34 

35logging.basicConfig( 

36 level=os.getenv("LOGLEVEL", "INFO"), 

37 format="%(asctime)s %(levelname)s %(message)s", 

38 stream=sys.stdout, 

39) 

40logger = logging.getLogger(__name__) 

41 

42 

43def install_website_skeleton(www_root_link: Path, www_content: Path): 

44 """Copy static website files and create symlink from web document root.""" 

45 # Remove existing link to output ahead of creating new symlink. 

46 logger.info("Removing any existing output link at %s.", www_root_link) 

47 www_root_link.unlink(missing_ok=True) 

48 

49 logger.info("Installing website files to %s.", www_content) 

50 # Create directory for web content. 

51 www_content.mkdir(parents=True, exist_ok=True) 

52 # Copy static HTML/CSS/JS. 

53 html_source = Path.cwd() / "html" 

54 shutil.copytree(html_source, www_content, dirs_exist_ok=True) 

55 # Create directory for plots. 

56 plot_dir = www_content / "plots" 

57 plot_dir.mkdir(exist_ok=True) 

58 

59 logger.info("Linking %s to web content.", www_root_link) 

60 # Ensure parent directories of WEB_DIR exist. 

61 www_root_link.parent.mkdir(parents=True, exist_ok=True) 

62 # Create symbolic link to web directory. 

63 # NOTE: While good for space, it means `cylc clean` removes output. 

64 www_root_link.symlink_to(www_content) 

65 

66 

67def construct_index(www_content: Path): 

68 """Construct the plot index.""" 

69 plots_dir = www_content / "plots" 

70 with open(plots_dir / "index.jsonl", "wt", encoding="UTF-8") as index_fp: 

71 # Loop over all diagnostics and append to index. The glob is sorted to 

72 # ensure a consistent ordering. 

73 for metadata_file in sorted(plots_dir.glob("**/*/meta.json")): 

74 try: 

75 with open(metadata_file, "rt", encoding="UTF-8") as plot_fp: 

76 plot_metadata = json.load(plot_fp) 

77 plot_metadata["path"] = str(metadata_file.parent.relative_to(plots_dir)) 

78 # Remove keys that are not useful for the index. 

79 removed_index_keys = [ 

80 "description", 

81 "plot_resolution", 

82 "plots", 

83 "skip_write", 

84 "SUBAREA_EXTENT", 

85 "SUBAREA_TYPE", 

86 ] 

87 for key in removed_index_keys: 

88 plot_metadata.pop(key, None) 

89 # Sort plot metadata. 

90 plot_metadata = sort_dict(plot_metadata) 

91 # Write metadata into website index. 

92 json.dump(plot_metadata, index_fp, separators=(",", ":")) 

93 index_fp.write("\n") 

94 except (json.JSONDecodeError, KeyError, TypeError) as err: 

95 logging.error("%s is invalid, skipping.\n%s", metadata_file, err) 

96 continue 

97 

98 

99def bust_cache(www_content: Path): 

100 """Add a unique query string to static requests to avoid stale caches. 

101 

102 We only need to do this for static resources referenced from the index page, 

103 as each plot already uses a unique filename based on the recipe. 

104 """ 

105 # Search and replace the string "CACHEBUSTER". 

106 CACHEBUSTER = str(int(time.time())) 

107 with open(www_content / "index.html", "r+t") as fp: 

108 content = fp.read() 

109 new_content = content.replace("CACHEBUSTER", CACHEBUSTER) 

110 fp.seek(0) 

111 fp.truncate() 

112 fp.write(new_content) 

113 

114 # Move plots directory so it has a unique name. 

115 os.rename(www_content / "plots", www_content / f"plots-{CACHEBUSTER}") 

116 

117 

118def update_workflow_status(www_content: Path): 

119 """Update the workflow status on the front page of the web interface.""" 

120 with open(www_content / "placeholder.html", "r+t") as fp: 

121 content = fp.read() 

122 finish_time = time.strftime("%Y-%m-%d %H:%M", time.localtime()) 

123 status = f"Completed at {finish_time} using CSET v{version('CSET')}" 

124 new_content = content.replace( 

125 '<p id="workflow-status">Unknown</p>', 

126 f'<p id="workflow-status">{status}</p>', 

127 ) 

128 fp.seek(0) 

129 fp.truncate() 

130 fp.write(new_content) 

131 

132 

133def copy_rose_config(www_content: Path): 

134 """Copy the rose-suite.conf file to add to output web directory.""" 

135 rose_suite_conf = Path(os.environ["CYLC_WORKFLOW_RUN_DIR"]) / "rose-suite.conf" 

136 web_conf_file = www_content / "rose-suite.conf" 

137 shutil.copyfile(rose_suite_conf, web_conf_file) 

138 

139 

140def run(): 

141 """Do the final steps to finish the website.""" 

142 # Strip trailing slashes in case they have been added in the config. 

143 # Otherwise they break the symlinks. 

144 www_root_link = Path(os.environ["WEB_DIR"].rstrip("/")) 

145 www_content = Path(os.environ["CYLC_WORKFLOW_SHARE_DIR"] + "/web") 

146 

147 install_website_skeleton(www_root_link, www_content) 

148 copy_rose_config(www_content) 

149 construct_index(www_content) 

150 bust_cache(www_content) 

151 update_workflow_status(www_content) 

152 

153 

154if __name__ == "__main__": # pragma: no cover 

155 run()