-
Notifications
You must be signed in to change notification settings - Fork 0
/
_targets.R
85 lines (75 loc) · 2.94 KB
/
_targets.R
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
# Created by use_targets().
# Follow the comments below to fill in this target script.
# Then follow the manual to check and run the pipeline:
# https://books.ropensci.org/targets/walkthrough.html#inspect-the-pipeline
# Load packages required to define the pipeline:
library(targets)
library(tarchetypes) # Load other packages as needed.
# Set target options:
tar_option_set(
packages = unique(renv::dependencies(quiet = TRUE)$Package) # packages that your targets need to run
# format = "qs", # Optionally set the default storage format. qs is fast.
#
# For distributed computing in tar_make(), supply a {crew} controller
# as discussed at https://books.ropensci.org/targets/crew.html.
# Choose a controller that suits your needs. For example, the following
# sets a controller with 2 workers which will run as local R processes:
#
# controller = crew::crew_controller_local(workers = 2)
#
# Alternatively, if you want workers to run on a high-performance computing
# cluster, select a controller from the {crew.cluster} package. The following
# example is a controller for Sun Grid Engine (SGE).
#
# controller = crew.cluster::crew_controller_sge(
# workers = 50,
# # Many clusters install R as an environment module, and you can load it
# # with the script_lines argument. To select a specific verison of R,
# # you may need to include a version string, e.g. "module load R/4.3.0".
# # Check with your system administrator if you are unsure.
# script_lines = "module load R"
# )
#
# Set other options as needed.
)
# tar_make_clustermq() is an older (pre-{crew}) way to do distributed computing
# in {targets}, and its configuration for your machine is below.
options(clustermq.scheduler = "multicore")
# tar_make_future() is an older (pre-{crew}) way to do distributed computing
# in {targets}, and its configuration for your machine is below.
# Install packages {{future}}, {{future.callr}}, and {{future.batchtools}} to allow use_targets() to configure tar_make_future() options.
# Run the R scripts in the R/ folder with your custom functions:
tar_source()
# source("other_functions.R") # Source other scripts as needed.
# Replace the target list below with your own:
list(
tar_target(
name = file,
command = "data/lipidomics.csv",
format = "file"
),
tar_target(
name = lipidomics,
command = readr::read_csv(file, show_col_types = FALSE)
),
tar_target(
name = df_stats_by_metabolite,
command = descriptive_stats(lipidomics)
),
tar_target(
name = fig_metabolite_distribution,
command = plot_distributions(lipidomics)
),
tar_quarto(
name = quarto_website,
path = "."
),
tar_target(
name = df_model_estimates,
command = calculate_estimates(lipidomics)
),
tar_target(
name = fig_model_estimates,
command = plot_estimates(df_model_estimates)
)
)