Coverage for src / CSET / cset_workflow / app / finish_website / bin / finish_website.py: 100%

70 statements  

« prev     ^ index     » next       coverage.py v7.13.1, created at 2026-01-12 09:16 +0000

1#!/usr/bin/env python3 

2# © Crown copyright, Met Office (2022-2025) and CSET contributors. 

3# 

4# Licensed under the Apache License, Version 2.0 (the "License"); 

5# you may not use this file except in compliance with the License. 

6# You may obtain a copy of the License at 

7# 

8# http://www.apache.org/licenses/LICENSE-2.0 

9# 

10# Unless required by applicable law or agreed to in writing, software 

11# distributed under the License is distributed on an "AS IS" BASIS, 

12# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 

13# See the License for the specific language governing permissions and 

14# limitations under the License. 

15 

16""" 

17Create the CSET diagnostic viewing website. 

18 

19Copies the static files that make up the web interface, constructs the plot 

20index, and updates the workflow status on the front page of the 

21web interface. 

22""" 

23 

24import json 

25import logging 

26import os 

27import shutil 

28import time 

29from importlib.metadata import version 

30from pathlib import Path 

31 

32from CSET._common import combine_dicts, sort_dict 

33 

34logging.basicConfig( 

35 level=os.getenv("LOGLEVEL", "INFO"), format="%(asctime)s %(levelname)s %(message)s" 

36) 

37logger = logging.getLogger(__name__) 

38 

39 

40def install_website_skeleton(www_root_link: Path, www_content: Path): 

41 """Copy static website files and create symlink from web document root.""" 

42 # Remove existing link to output ahead of creating new symlink. 

43 logger.info("Removing any existing output link at %s.", www_root_link) 

44 www_root_link.unlink(missing_ok=True) 

45 

46 logger.info("Installing website files to %s.", www_content) 

47 # Create directory for web content. 

48 www_content.mkdir(parents=True, exist_ok=True) 

49 # Copy static HTML/CSS/JS. 

50 html_source = Path.cwd() / "html" 

51 shutil.copytree(html_source, www_content, dirs_exist_ok=True) 

52 # Create directory for plots. 

53 plot_dir = www_content / "plots" 

54 plot_dir.mkdir(exist_ok=True) 

55 

56 logger.info("Linking %s to web content.", www_root_link) 

57 # Ensure parent directories of WEB_DIR exist. 

58 www_root_link.parent.mkdir(parents=True, exist_ok=True) 

59 # Create symbolic link to web directory. 

60 # NOTE: While good for space, it means `cylc clean` removes output. 

61 www_root_link.symlink_to(www_content) 

62 

63 

64def construct_index(www_content: Path): 

65 """Construct the plot index.""" 

66 plots_dir = www_content / "plots" 

67 index = {} 

68 # Loop over all diagnostics and append to index. 

69 for metadata_file in plots_dir.glob("**/*/meta.json"): 

70 try: 

71 with open(metadata_file, "rt", encoding="UTF-8") as fp: 

72 plot_metadata = json.load(fp) 

73 

74 category = plot_metadata["category"] 

75 case_date = plot_metadata.get("case_date", "") 

76 relative_url = str(metadata_file.parent.relative_to(plots_dir)) 

77 

78 record = { 

79 category: { 

80 case_date if case_date else "Aggregation": { 

81 relative_url: plot_metadata["title"].strip() 

82 } 

83 } 

84 } 

85 except (json.JSONDecodeError, KeyError, TypeError) as err: 

86 logging.error("%s is invalid, skipping.\n%s", metadata_file, err) 

87 continue 

88 index = combine_dicts(index, record) 

89 

90 # Sort index of diagnostics. 

91 index = sort_dict(index) 

92 

93 # Write out website index. 

94 with open(plots_dir / "index.json", "wt", encoding="UTF-8") as fp: 

95 json.dump(index, fp, indent=2) 

96 

97 

98def bust_cache(www_content: Path): 

99 """Add a unique query string to static requests to avoid stale caches. 

100 

101 We only need to do this for static resources referenced from the index page, 

102 as each plot already uses a unique filename based on the recipe. 

103 """ 

104 # Search and replace the string "CACHEBUSTER". 

105 CACHEBUSTER = str(int(time.time())) 

106 with open(www_content / "index.html", "r+t") as fp: 

107 content = fp.read() 

108 new_content = content.replace("CACHEBUSTER", CACHEBUSTER) 

109 fp.seek(0) 

110 fp.truncate() 

111 fp.write(new_content) 

112 

113 # Move plots directory so it has a unique name. 

114 os.rename(www_content / "plots", www_content / f"plots-{CACHEBUSTER}") 

115 

116 

117def update_workflow_status(www_content: Path): 

118 """Update the workflow status on the front page of the web interface.""" 

119 with open(www_content / "placeholder.html", "r+t") as fp: 

120 content = fp.read() 

121 finish_time = time.strftime("%Y-%m-%d %H:%M", time.localtime()) 

122 status = f"Completed at {finish_time} using CSET v{version('CSET')}" 

123 new_content = content.replace( 

124 '<p id="workflow-status">Unknown</p>', 

125 f'<p id="workflow-status">{status}</p>', 

126 ) 

127 fp.seek(0) 

128 fp.truncate() 

129 fp.write(new_content) 

130 

131 

132def copy_rose_config(www_content: Path): 

133 """Copy the rose-suite.conf file to add to output web directory.""" 

134 rose_suite_conf = Path(os.environ["CYLC_WORKFLOW_RUN_DIR"]) / "rose-suite.conf" 

135 web_conf_file = www_content / "rose-suite.conf" 

136 shutil.copyfile(rose_suite_conf, web_conf_file) 

137 

138 

139def run(): 

140 """Do the final steps to finish the website.""" 

141 # Strip trailing slashes in case they have been added in the config. 

142 # Otherwise they break the symlinks. 

143 www_root_link = Path(os.environ["WEB_DIR"].rstrip("/")) 

144 www_content = Path(os.environ["CYLC_WORKFLOW_SHARE_DIR"] + "/web") 

145 

146 install_website_skeleton(www_root_link, www_content) 

147 copy_rose_config(www_content) 

148 construct_index(www_content) 

149 bust_cache(www_content) 

150 update_workflow_status(www_content) 

151 

152 

153if __name__ == "__main__": # pragma: no cover 

154 run()