forked from PrincetonUniversity/BrainPipe
-
Notifications
You must be signed in to change notification settings - Fork 1
/
run_tracing.py.save
executable file
·157 lines (143 loc) · 7.03 KB
/
run_tracing.py.save
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Created on Sun Mar 6 13:50:08 2016
@author: tpisano
"""
import os
import sys
import shutil
from xvfbwrapper import Xvfb
print(os.getcwd())
from tools.utils.directorydeterminer import directorydeterminer
from tools.registration.register import elastix_wrapper
from tools.imageprocessing import preprocessing
vdisplay = Xvfb()
vdisplay.start()
systemdirectory = directorydeterminer()
# systemdirectory = "/home/emilyjanedennis"
# set paths to data
# inputdictionary stucture: key=pathtodata value=list["xx", "##"] where
# xx=regch, injch, or cellch and ##=two digit channel number
# "regch" = channel to be used for registration, assumption is all other
# channels are signal
# "cellch" = channel(s) to apply cell detection
# "injch" = channels(s) to quantify injection site
# e.g.: inputdictionary={path_1: [["regch", "00"]], path_2: [["cellch", "00"],
# ["injch", "01"]]} ###create this dictionary variable BEFORE params
inputdictionary = {
"/jukebox/LightSheetData/lightserv/ejdennis/2021_01_reimage_2female_brains/2021_01_reimage_2female_brains-001/imaging_request_1/rawdata/resolution_1.1x/210104_T107_1_1x_488_016na_1hfds_z10um_50msec_25povlp_15-19-01/":
[["regch", "00"]],
"/jukebox/LightSheetData/lightserv/ejdennis/2021_01_reimage_2female_brains/2021_01_reimage_2female_brains-001/imaging_request_1/rawdata/resolution_1.1x/210104_T107_1_1x_488_016na_1hfds_z10um_50msec_25povlp_15-19-01/":
m [["cellch","01"]]
}
# Required inputs
params = {
"systemdirectory": systemdirectory, # don"t need to touch
"inputdictionary": inputdictionary, # don"t need to touch
"outputdirectory": os.path.join(systemdirectory, "scratch/ejdennis/t107"),
# (5.0,5.0,3), #micron/pixel: 5.0um/pix for 1.3x; 1.63um/pix for 4x
"xyz_scale": (5.91, 5.91, 10),
"tiling_overlap": 0.25, # percent overlap taken during tiling
"stitchingmethod": "terastitcher", # "terastitcher" or "blending"
# "AtlasFile": os.path.join(systemdirectory, "LightSheetData/brodyatlas/atlas/for_registration_to_lightsheet/WHS_SD_rat_T2star_v1.01_atlas.tif"),
# path to annotation file for structures
# "annotationfile": os.path.join(systemdirectory, "LightSheetData/brodyatlas/atlas/for_registration_to_lightsheet/WHS_SD_rat_atlas_v3_annotation.tif"),
"AtlasFile": "/jukebox/LightSheetData/brodyatlas/atlas/for_registration_to_lightsheet/WHS_SD_rat_T2star_v1.01_atlas.tif",
"annotationfile": "/jukebox/LightSheetData/brodyatlas/atlas/for_registration_to_lightsheet/WHS_SD_rat_atlas_v3_annoatation.tif",
"blendtype": "sigmoidal", # False/None, "linear", or "sigmoidal"
# blending between tiles, usually sigmoidal;
# False or None for images where blending would be detrimental
# True = calculate mean intensity of overlap between tiles shift higher
# of two towards lower - useful for images where relative intensity
# is not important (i.e. tracing=True, cFOS=False)
"intensitycorrection": True,
"resizefactor": 3, # in x and y #normally set to 5 for 4x objective,
# 3 for 1.3x obj
"rawdata": True, # set to true if raw data is taken from scope and
# images need to be flattened; functionality for
# rawdata =False has not been tested**
# Used to account for different orientation between brain and atlas.
# Assumes XYZ ("0","1","2) orientation.
# Pass strings NOT ints. "-0" = reverse the order of the xaxis.
# For better description see docstring from
# tools.imageprocessing.orientation import fix_orientation;
# ("2","1","0") for horizontal to sagittal,
# Order of operations is reversing of axes BEFORE swapping axes.
"finalorientation": ("2", "1", "0"),
"slurmjobfactor": 50,
# number of array iterations per arrayjob
# since max job array on SPOCK is 1000
"transfertype": "copy",
# for rat brains that need different reg params
"parameterfolder": os.path.join(systemdirectory, "LightSheetData/brodyatlas/atlas/for_registration_to_lightsheet/rat_registration_parameter_folder")
}
print("outputdirectory")
# stitchingemthod
# "terastitcher": computationally determine overlap.
# See .py file and http://abria.github.io/TeraStitcher/ for details.
# NOTE THIS REQUIRES COMPILED SOFTWARE.
# if testing terastitcher I strongly suggest adding to the parameter file
# transfertype="copy", despite doubling data size this protects original
# data while testing
# "blending: using percent overlap to determine pixel overlap.
# Then merges using blendtype, intensitycorrection, blendfactor.
# This is not a smart algorithm
# additional optional params
# "parameterfolder" :
# "atlas_scale": (25, 25, 25), #micron/pixel, ABA is likely (25,25,25)
# "swapaxes" : (0,2),
# "maskatlas": {"x": all, "y": "125:202", "z": "75:125"};
# "cropatlas": {"x": all, "y": "125:202", "z": "75:125"};
# "blendfactor" :
# "bitdepth":
# "secondary_registration"
# run scipt portions
if __name__ == "__main__":
# get jobids from SLURM or argv
print(sys.argv)
stepid = int(sys.argv[1])
if systemdirectory != "/home/emilyjanedennis/":
print(os.environ["SLURM_ARRAY_TASK_ID"])
jobid = int(os.environ["SLURM_ARRAY_TASK_ID"])
else:
jobid = int(sys.argv[2])
# Make parameter dictionary and setup destination
if stepid == 0:
# make parameter dictionary and pickle file:
# e.g. single job assuming directory_determiner function has
# been properly set
preprocessing.generateparamdict(os.getcwd(), **params)
# preprocessing.updateparams("/", svnm = "param_dict_local.p",**params)
# make a local copy
if not os.path.exists(os.path.join(params["outputdirectory"],
"lightsheet")):
shutil.copytree(os.getcwd(), os.path.join(
params["outputdirectory"],
"lightsheet"),
ignore=shutil.ignore_patterns(*(
".pyc", "CVS",
".git", "tmp", ".svn",
"TeraStitcher-Qt4-standalone-1.10.11-Linux")))
# copy run folder into output to save run info
# Stitch and preprocess each z plane
elif stepid == 1:
if params["stitchingmethod"] not in ["terastitcher"]:
# stitch based on percent overlap only ("dumb stitching"),
# and save files; showcelldetection=True:
# save out cells contours ovelaid on images
# process zslice numbers equal to
# slurmjobfactor*jobid thru (jobid+1)*slurmjobfactor
print("not terastitcher")
preprocessing.arrayjob(jobid, cores=6, compression=1, **params)
else:
# Stitch using Terastitcher "smart stitching"
from tools.imageprocessing.stitch import terastitcher_from_params
terastitcher_from_params(jobid=jobid, cores=6, **params)
# Consolidate for Registration
elif stepid == 2:
# combine downsized ch and ch+cell files
preprocessing.tiffcombiner(jobid, cores=10, **params)
elif stepid == 3:
elastix_wrapper(jobid, cores=12, **params) # run elastix
vdisplay.stop()