-
Notifications
You must be signed in to change notification settings - Fork 34
/
build.py
287 lines (228 loc) · 10.3 KB
/
build.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
import argparse
import os
from typing import Dict, Tuple, List
from watchdog.observers import Observer
from watchdog.events import FileSystemEvent, FileSystemEventHandler
import queue
from pylib.calculator_producer import calculator_producers
from pylib.editor_producer import editor_producers
from pylib.gz_compressor_producer import gz_compressor_producers
from pylib.imagepack import item_image_producers
from pylib.js_rollup_producer import js_rollup_producer
from pylib.landing_page_producer import landing_page_producers
from pylib.producer import Scheduler, SingleFile, GenericProducer, copy_file, copy_file_with_hash
from pylib.producer_plugins import plugins_producers
from pylib.typescript_producer import typescript_producer
from pylib.uglifyjs import uglify_js_producer
from pylib.yaml_linter_producer import resource_list_parser_producers
# CLI Argument Flags
# FLAG_skip_js_lint = False
# FLAG_skip_index = False
# FLAG_skip_gz_compression = False
# FLAG_skip_image_compress = False
# FLAG_force_image = False
# FLAG_skip_plugins = False
FLAG_skip_js_minify = False
################################################################################
# core_resource_producers
#
# Create the producers definitions for all of the core resources found in the
# `./core` folder. These are essentially static files that might go through
# a small amount of processing, such as minification or compilation, but are
# not dynamic as most of the other files are.
################################################################################
def core_resource_producers() -> List[GenericProducer]:
hashed_copyfiles = [
"core/calculator.css",
"core/add_game.png",
]
# Files that should be copied out of the "core" folder.
copyfiles = [
"core/logo.png",
"core/.htaccess",
"core/ads.txt",
"core/favicon.ico",
]
# Typescript Projects that should be compiled into javascript.
ts_project_configs = [
"core/src/tsconfig.json"
]
js_rollup_targets = {
"cache/calculatorjs/calculator.js": "cache/calculator.js"
}
# Javascript files that should be minified for production.
minify_js_files = [
"cache/calculator.js",
"core/yaml_export.js",
]
core_producers: List[GenericProducer] = []
# Add a producer for each file that will be copied over to output/.
for copyfile in hashed_copyfiles:
core_producers.append(
copy_file_with_hash(
name=f"Hash Copy {copyfile}",
input_file_pattern="^{}$".format(copyfile),
output_file_template="output/{filename}-{filehash}{extension}",
metadata_file_template="cache/{filename}{extension}.json",
)
)
# Add a producer for each file that will be copied over to output/.
for copyfile in copyfiles:
core_producers.append(
copy_file(
name=f"Copy {copyfile}",
target_file=copyfile,
destination_file=os.path.join("output", os.path.basename(copyfile))
)
)
# Add a producer for each of the typescript project files.
for ts_project_config in ts_project_configs:
core_producers += typescript_producer(ts_project_config)
# Add a producer to rollup each javascript library into a single file
for js_target_file, js_destination_file in js_rollup_targets.items():
core_producers += js_rollup_producer(js_target_file, js_destination_file)
# Add a producer for each javascript file to minify.
for minify_js_file in minify_js_files:
input_file = minify_js_file
output_file = os.path.join("output", os.path.basename(minify_js_file))
if FLAG_skip_js_minify:
core_producers.append(copy_file(
name=f"Copy File {input_file}",
target_file=input_file,
destination_file=output_file
))
continue
core_producers.append(
uglify_js_producer(
input_file=input_file,
output_file=output_file,
)
)
return core_producers
################################################################################
# core_resource_paths
#
# The paths generator for all of the core resources that get copied over from
# the core/ folder to the output/ folder directly.
################################################################################
def core_resource_paths(input_files: SingleFile, groups: Dict[str, str]) -> Tuple[SingleFile, SingleFile]:
return (
input_files,
{
"file": os.path.join("output", os.path.basename(input_files["file"]))
}
)
################################################################################
# main
#
# The main process for the build.py script. Handles argument parsing and
# starting up the generator process.
################################################################################
def main() -> None:
parser = argparse.ArgumentParser(
description='Compile resourcecalculator.com html pages.'
)
parser.add_argument('limit_files', nargs='*', help="Speed up dev-builds by only building a specific set of one or more calculators")
parser.add_argument('--watch', action='store_true', help="Watch source files and automatically rebuild when they change")
parser.add_argument('--draft', action='store_true', help="Enable all speed up flags for dev builds")
# # parser.add_argument('--no-jslint', action='store_true', help="Speed up dev-builds by skipping linting javascript files")
parser.add_argument('--no-js-minify', action='store_true', help="Speed up dev-builds by skipping javascript compression")
# parser.add_argument('--no-gz', action='store_true', help="Speed up dev-builds by skipping gz text compression")
# parser.add_argument('--no-index', action='store_true', help="Speed up dev-builds by skipping building the index page")
# parser.add_argument('--no-image-compress', action='store_true', help="Speed up dev-builds by skipping the image compresson")
# parser.add_argument('--no-plugins', action='store_true', help="Skip plugin publication to get only the plain calculators")
# parser.add_argument('--force-html', action='store_true', help="Force the html pages to be rebuilt even if they are newer then their source files")
# parser.add_argument('--force-image', action='store_true', help="Force images to be rebuilt even if they are newer then their source files")
# global FLAG_skip_index
# # global FLAG_skip_js_lint
# global FLAG_skip_gz_compression
# global FLAG_skip_image_compress
# global FLAG_force_image
# global FLAG_skip_plugins
args = parser.parse_args()
# if (args.watch):
# pass
# # if args.no_jslint or args.draft:
# # FLAG_skip_js_lint = True
if args.no_js_minify or args.draft:
global FLAG_skip_js_minify
FLAG_skip_js_minify = True
# if args.no_gz or args.draft:
# FLAG_skip_gz_compression = True
# if args.no_image_compress or args.draft:
# FLAG_skip_image_compress = True
# if args.no_index or args.draft:
# FLAG_skip_index = True
# if args.force_image:
# FLAG_force_image = True
# if args.no_plugins or args.draft:
# FLAG_skip_plugins = True
# calculator_page_sublist = []
calculator_dir_regex = r"[a-z_ ]+"
if len(args.limit_files) >= 1:
calculator_page_sublist = args.limit_files
calculator_dir_regex = "|".join(calculator_page_sublist)
producers: List[GenericProducer] = []
producers += core_resource_producers()
producers += resource_list_parser_producers(calculator_dir_regex)
producers += item_image_producers(calculator_dir_regex)
producers += calculator_producers(calculator_dir_regex)
producers += editor_producers(calculator_dir_regex)
producers += landing_page_producers(calculator_dir_regex)
producers += plugins_producers(calculator_dir_regex)
producers += gz_compressor_producers()
scheduler = Scheduler(
producer_list=producers,
initial_filepaths=Scheduler.all_paths_in_dir(
base_dir=".",
ignore_paths=["venv_docker", "venv", ".git", "node_modules", "output_master"]
)
)
watch_directory = "."
if args.watch:
q: queue.Queue[Tuple[str, str]] = queue.Queue()
observer = Observer()
event_handler = Handler(q)
observer.schedule(event_handler, watch_directory, recursive=True) # type: ignore [no-untyped-call]
observer.start() # type: ignore [no-untyped-call]
try:
while True:
event_type, src_path = q.get(True)
# TODO: Use .get_nowait after a successful "get" so we can bundle
# anything in the queue together into a single operation to the
# scheduler objects, instead of sending each file one by one.
if event_type == 'created' or event_type == 'modified':
scheduler.add_or_update_files([src_path])
elif event_type == 'deleted':
# scheduler.delete_files([src_path])
pass
elif event_type == 'closed':
# A file was closed, does not seem as useful as modified
pass
else:
print("Unknown Event", event_type)
except Exception:
observer.stop() # type: ignore [no-untyped-call]
print("Observer Stopped")
observer.join()
class Handler(FileSystemEventHandler):
event_queue: queue.Queue[Tuple[str, str]]
def __init__(self, event_queue: queue.Queue[Tuple[str, str]]):
self.event_queue = event_queue
def on_any_event(self, event: FileSystemEvent) -> None:
if event.is_directory:
return
self.event_queue.put((event.event_type, event.src_path[2:]))
PROFILE = False
if __name__ == "__main__":
if PROFILE:
import cProfile
import pstats
with cProfile.Profile() as pr:
main()
stats = pstats.Stats(pr)
stats.sort_stats(pstats.SortKey.TIME)
stats.dump_stats(filename="profiledata.prof")
# Useful to use snakeviz to display profile data `snakeviz profiledata.prof`
else:
main()