-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathdata_processing_v2.sh
More file actions
580 lines (476 loc) · 24.8 KB
/
data_processing_v2.sh
File metadata and controls
580 lines (476 loc) · 24.8 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
#!/usr/bin/env bash
set -Eeuo pipefail
set -o errtrace
PS4='+ ${BASH_SOURCE}:${LINENO}:${FUNCNAME[0]}: '
trap 'code=$?; echo "ERROR: command \"${BASH_COMMAND}\" exited $code at ${BASH_SOURCE[0]}:${LINENO}"; exit $code' ERR
# --- ensure we're using bash, not sh ---
if [[ -z "${BASH_VERSION:-}" ]]; then
echo "Please run this script with bash (not sh)." >&2
exit 1
fi
need() { command -v "$1" >/dev/null 2>&1 || { echo "ERROR: '$1' not found in PATH." >&2; exit 127; }; }
need curl; need awk; need python3; command -v column >/dev/null 2>&1 || true
need mktemp
# Nice glob behaviour for optional files like Static_SCM*.nii.gz
shopt -s nullglob
# --- Expand user paths like ~, $HOME, relative → absolute (safe; no eval) ---
expand_path() {
python3 - "$1" <<'PY_EXPAND'
import os, sys
p = sys.argv[1] if len(sys.argv)>1 else ''
print(os.path.abspath(os.path.expanduser(os.path.expandvars(p or '.'))))
PY_EXPAND
}
# --- Portable readline prompt with default (works on macOS Bash 3.2) ---
# Usage: read_default "Prompt text" "DEFAULT" varname
read_default() {
local _prompt="$1" _default="$2" _outvar="$3"
if help read 2>/dev/null | grep -q ' -i '; then
read -e -r -p "${_prompt} [${_default}]: " -i "${_default}" REPLY || true
else
read -e -r -p "${_prompt} [${_default}]: " REPLY || true
fi
printf -v "${_outvar}" '%s' "${REPLY:-${_default}}"
}
# --- Robust GitHub sourcing helper (Bash files only) ---
gh_source() {
local fname="$1"
local repo_base="https://raw.githubusercontent.com/njainmpi/fMRI_analysis_pipeline/main"
local try_paths=(
"$fname"
"individual_project_based_scripts/$fname"
"toolbox/$fname"
"scripts/$fname"
)
local url status tmp="/tmp/ghsrc.$$"
for p in "${try_paths[@]}"; do
url="$repo_base/$p"
status="$(curl -sS -L -w '%{http_code}' -o "$tmp" "$url" || echo 000)"
if [[ "$status" == "200" ]] && [[ -s "$tmp" ]]; then
# shellcheck source=/dev/null
source "$tmp"
rm -f "$tmp"
echo "Sourced: $p"
return 0
fi
done
echo "ERROR: Could not fetch '$fname' from any known path. Tried: ${try_paths[*]}" >&2
exit 2
}
# --- Run a Python file from local or GitHub ---
# Usage: gh_py_exec make_static_maps_and_movie.py --args...
gh_py_exec() {
local fname="$1"; shift || true
local local_py="./$fname"
if [[ -f "$local_py" ]]; then
echo "Running local Python: $local_py $*"
python3 "$local_py" "$@"
return $?
fi
local repo_base="https://raw.githubusercontent.com/njainmpi/amplify/main"
local try_paths=(
"$fname"
"individual_project_based_scripts/$fname"
"toolbox/$fname"
"scripts/$fname"
)
local url tmp status
tmp="$(mktemp)"; mv "$tmp" "${tmp}.py"; tmp="${tmp}.py"
for p in "${try_paths[@]}"; do
url="$repo_base/$p"
status="$(curl -sS -L -w '%{http_code}' -o "$tmp" "$url" || echo 000)"
if [[ "$status" == "200" && -s "$tmp" ]]; then
echo "Running Python from GitHub: $p $*"
python3 "$tmp" "$@"; local rc=$?
rm -f "$tmp"
return "$rc"
fi
done
rm -f "$tmp"
echo "ERROR: Could not fetch Python '$fname' from known paths." >&2
return 2
}
# --- Source all helpers (your list) ---
gh_source toolbox_name.sh
gh_source log_execution.sh
gh_source missing_run.sh
gh_source folder_existence_function.sh
gh_source func_parameters_extraction.sh
gh_source temporal_smoothing.sh
gh_source check_spikes.sh
gh_source coregistration.sh
gh_source data_conversion.sh
gh_source motion_correction.sh
gh_source quality_check.sh
gh_source signal_change_map.sh
gh_source smoothing_using_fsl.sh
gh_source temporal_snr_using_afni.sh
gh_source temporal_snr_using_fsl.sh
gh_source scm_visual.sh
gh_source print_function.sh
gh_source static_map.sh
gh_source moving_results.sh
gh_source scm_from_coregsitered_functional_v1.sh
# Python is executed via gh_py_exec (not sourced)
# --- Color print fallbacks (if helper didn't provide them) ---
if ! declare -F PRINT_CYAN >/dev/null; then PRINT_CYAN() { printf "\033[36m%s\033[0m\n" "$*"; }; fi
if ! declare -F PRINT_YELLOW >/dev/null; then PRINT_YELLOW() { printf "\033[33m%s\033[0m\n" "$*"; }; fi
if ! declare -F PRINT_RED >/dev/null; then PRINT_RED() { printf "\033[31m%s\033[0m\n" "$*"; }; fi
# --- resolve script dir for local files like path_definition.txt ---
SCRIPT_DIR="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)"
identity="$(whoami)@$(hostname)"
default_root="/Volumes/Extreme_Pro/fMRI"
# ---- prompt/CLI for root_location ----
root_location="${1:-}"
if [[ "${root_location:-}" == "--root" ]]; then
shift
root_location="${1:-}"
shift || true
fi
if [[ -z "${root_location:-}" ]]; then
echo
read_default "Root location" "${default_root}" root_location_input
root_location="${root_location_input}"
fi
# Expand ~, $VARS, and relative paths → absolute
root_location="$(expand_path "$root_location")"
if [[ ! -d "$root_location" ]]; then
echo "ERROR: root location '$root_location' does not exist." >&2
exit 1
fi
echo "Using root location: $root_location"
# ---- CSV config (use absolute path!) ----
csv="Animal_Experiments_Sequences_v1.csv"
csv_path="$root_location/RawData/$csv"
header_lines=2
DELIM=$'\x1f'
if [[ ! -f "$csv_path" ]]; then
echo "ERROR: $csv not found at $csv_path"
exit 1
fi
# ---- show available rows (first 8 columns) ----
echo -e "\n== Available rows (CSV line | first 8 columns) =="
python3 - "$csv_path" "$header_lines" <<'PY_AVAIL' | { column -t -s'|' || cat; }
import csv, sys
path = sys.argv[1]; skip_n = int(sys.argv[2])
def try_decode(b):
for enc in ('utf-8-sig','cp1252','latin-1'):
try: return b.decode(enc)
except UnicodeDecodeError: continue
return b.decode('utf-8', errors='replace')
def clean(s): return (s or '').replace('|','¦').strip()
print("Line|Col1|Col2|Col3|Col4|Col5|Col6|Col7|Col8")
with open(path,'rb') as f:
for lineno, raw in enumerate(f, start=1):
if lineno <= skip_n: continue
if not raw.strip(): continue
row = next(csv.reader([try_decode(raw)]))
row = [clean(c) for c in row]
first8 = (row + [""]*8)[:8]
print("|".join([str(lineno)] + first8))
PY_AVAIL
# ---- ask for multiple line numbers / ranges ----
echo
echo "You can choose multiple CSV lines, e.g.: 5,7,10-12"
read -rp "Enter CSV LINE NUMBERS (or q to quit): " sel
case "$sel" in q|Q) echo "Aborted."; exit 0 ;; esac
[[ -n "$sel" ]] || { echo "No selection."; exit 1; }
# ---- expand comma/range list to a unique, sorted list of integers > header_lines ----
expand_lines() {
local input="$1" part a b n
IFS=',' read -r -a parts <<< "$input"
for part in "${parts[@]}"; do
part="${part//[[:space:]]/}"
if [[ "$part" =~ ^[0-9]+-[0-9]+$ ]]; then
a="${part%-*}"; b="${part#*-}"
if (( a > b )); then n="$a"; a="$b"; b="$n"; fi
for (( n=a; n<=b; n++ )); do echo "$n"; done
elif [[ "$part" =~ ^[0-9]+$ ]]; then
echo "$part"
else
echo "ERROR: invalid token '$part' in selection." >&2
exit 1
fi
done | awk -v hdr="$header_lines" '($1>hdr){print $1}' | sort -n | uniq
}
# ---- macOS-friendly read into array (no mapfile on Bash 3.2) ----
LINES=()
if [[ -n "${BASH_VERSINFO:-}" && "${BASH_VERSINFO[0]}" -ge 4 ]]; then
mapfile -t LINES < <(expand_lines "$sel")
else
while IFS= read -r ln; do
[[ -n "$ln" ]] && LINES+=("$ln")
done < <(expand_lines "$sel")
fi
((${#LINES[@]})) || { echo "No valid data lines selected (remember header lines are 1..$header_lines)."; exit 1; }
# ---- helpers ----
is_int() { [[ "$1" =~ ^-?[0-9]+$ ]]; }
prompt_if_unset() {
local __var="$1"; shift
local __prompt="$1"; shift || true
local __def="${1:-}"
if [[ -z "${!__var:-}" ]]; then
if [[ -n "$__def" ]]; then
read -rp "$__prompt [$__def]: " __ans
printf -v "$__var" "%s" "${__ans:-$__def}"
else
read -rp "$__prompt: " __ans
printf -v "$__var" "%s" "$__ans"
fi
fi
}
# ---- function: process one CSV line number (SANDBOXED in subshell) ----
process_csv_line() {
local line_no="$1"
(
PRINT_CYAN "=== Processing CSV line $line_no ==="
local parsed
parsed="$(
python3 - "$csv_path" "$line_no" <<'PY_PARSE'
import csv, sys
US = '\x1f'; path = sys.argv[1]; target = int(sys.argv[2])
def try_decode(b):
for enc in ('utf-8-sig','cp1252','latin-1'):
try: return b.decode(enc)
except UnicodeDecodeError: continue
return b.decode('utf-8', errors='replace')
with open(path,'rb') as f:
for lineno, raw in enumerate(f, start=1):
if lineno == target:
txt = try_decode(raw)
if not raw.strip() or txt.strip(', \t\r\n') == '':
sys.stdout.write(''); sys.exit(0)
row = next(csv.reader([txt]))
row = [(c or '').replace(US, ' ') for c in row]
sys.stdout.write(US.join(row)); break
PY_PARSE
)"
[[ -n "$parsed" ]] || { echo "Selected line $line_no is blank. Skipping."; exit 0; }
local project_name sub_project_name dataset_name structural_name functional_name struc_coregistration baseline_duration injection_duration
local injection_on_left_side injection_on_right_side
get_n_field(){ echo "$parsed" | cut -d"$DELIM" -f"$1"; }
trim(){ printf '%s' "$1" | xargs; }
project_name=$(trim "$(get_n_field 3)")
sub_project_name=$(trim "$(get_n_field 4)")
dataset_name=$(trim "$(get_n_field 2)")
structural_name=$(trim "$(get_n_field 5)")
functional_name=$(trim "$(get_n_field 6)")
struc_coregistration=$(trim "$(get_n_field 7)")
baseline_duration=$(trim "$(get_n_field 8)")
injection_duration=$(trim "$(get_n_field 9)")
injection_on_left_side=$(trim "$(get_n_field 22)")
injection_on_right_side=$(trim "$(get_n_field 23)")
export Project_Name="$project_name"
export Sub_project_Name="$sub_project_name"
export Dataset_Name="$dataset_name"
export structural_run="$structural_name"
export run_number="$functional_name"
export str_for_coreg="$struc_coregistration"
export baseline_duration_in_min="$baseline_duration"
export injection_duration_in_min="$injection_duration"
export injected_liquid_on_left_side="$injection_on_left_side"
export injected_liquid_on_right_side="$injection_on_right_side"
echo -e "\n== Selection summary =="
echo "CSV line: $line_no"
echo "Project_Name: $Project_Name"
echo "Sub_project_Name: $Sub_project_Name"
echo "Dataset_Name: $Dataset_Name"
echo "First Structural Run: $structural_run"
echo "Functional Run Number: $run_number"
echo "Structural Data used for Coregistration: $str_for_coreg"
echo "Baseline Duration (in min): $baseline_duration_in_min"
echo "Injection Duration (in min): $injection_duration_in_min"
echo "Liquid injected on Left Side: $injected_liquid_on_left_side"
echo "Liquid injected on Right Side: $injected_liquid_on_right_side"
local Path_Raw_Data="$root_location/RawData/$project_name/$sub_project_name"
local Path_Analysed_Data="$root_location/AnalysedData/$project_name/$sub_project_name/$Dataset_Name"
local datapath
datapath="$(find "$Path_Raw_Data" -type d -name "*${Dataset_Name}*" 2>/dev/null | head -n1 || true)"
[[ -n "${datapath:-}" ]] || { echo "ERROR: raw data dir for '$Dataset_Name' not found under $Path_Raw_Data"; exit 1; }
echo "Raw dataset path: $datapath"
echo
echo "Analysed Data folder check..."
if [[ -d "$Path_Analysed_Data" ]]; then
echo "Analysed Data folder exists, Proceeding to Analyse the data"
else
echo "Creating: $Path_Analysed_Data"
mkdir -p "$Path_Analysed_Data"
fi
# ---------------- STRUCTURAL ----------------
FUNC_PARAM_EXTRACT "$datapath/$structural_run"
: "${SequenceName:?FUNC_PARAM_EXTRACT did not set SequenceName}"
local struct_dir="$Path_Analysed_Data/${structural_run}${SequenceName}"
mkdir -p "$struct_dir"
cd "$struct_dir"
CHECK_FILE_EXISTENCE "$struct_dir" || true
run_if_missing "anatomy.nii.gz" -- BRUKER_to_NIFTI "$datapath" "$structural_run" "$datapath/$structural_run/method"
cp -f G1_cp.nii.gz anatomy.nii.gz || echo "WARNING: G1_cp.nii.gz not found for structural; continuing."
# 3dresample -orient LPI -inset G1_cp.nii.gz -prefix G1_cp.nii.gz -overwrite
# ---------------- STRUCTURAL FOR COREGISTRATION ----------------
FUNC_PARAM_EXTRACT "$datapath/$str_for_coreg"
: "${SequenceName:?FUNC_PARAM_EXTRACT did not set SequenceName}"
local struct_coreg_dir="$Path_Analysed_Data/${str_for_coreg}${SequenceName}"
mkdir -p "$struct_coreg_dir"
cd "$struct_coreg_dir"
CHECK_FILE_EXISTENCE "$struct_coreg_dir" || true
run_if_missing "anatomy.nii.gz" -- BRUKER_to_NIFTI "$datapath" "$str_for_coreg" "$datapath/$str_for_coreg/method"
cp -f G1_cp.nii.gz anatomy.nii.gz || echo "WARNING: G1_cp.nii.gz not found for coreg structural; continuing."
# 3dresample -orient LPI -inset G1_cp.nii.gz -prefix G1_cp.nii.gz -overwrite
# ---------------- FUNCTIONAL ----------------
FUNC_PARAM_EXTRACT "$datapath/$run_number"
: "${SequenceName:?FUNC_PARAM_EXTRACT did not set SequenceName}"
local func_dir="$Path_Analysed_Data/${run_number}${SequenceName}"
mkdir -p "$func_dir"
cd "$func_dir"
CHECK_FILE_EXISTENCE "$func_dir" || true
run_if_missing "G1_cp.nii.gz" -- BRUKER_to_NIFTI "$datapath" "$run_number" "$datapath/$run_number/method"
# 3dresample -orient LPI -inset G1_cp.nii.gz -prefix G1_cp.nii.gz -overwrite
# ===============================================================================================
# Step 1: Motion Correction (Using AFNI)
PRINT_YELLOW "Performing Step 1: Motion Correction"
# ===============================================================================================
: "${MiddleVolume:?FUNC_PARAM_EXTRACT (or motion helper) did not set MiddleVolume}"
run_if_missing "mc_func.nii.gz" "mc_func+orig.HEAD" "mc_func+orig.BRIK" -- MOTION_CORRECTION "$MiddleVolume" G1_cp.nii.gz mc_func
# ===============================================================================================
# Step 2: Cleaning the functional data (Masking)
PRINT_YELLOW "Performing Step 2: Cleaning the functional data by generating mask"
# ===============================================================================================
# Steo 2a: Temporal Smoothing of the functional data
smooth_movavg mc_func.nii.gz temporal_smoothed_mc_func.nii.gz 60 #temporal smoothing with window size of 60 seconds
# Step 2b: Always ask for indices; if a map exists, let the user choose reuse vs regenerate
fsleyes temporal_smoothed_mc_func.nii.gz
# Ask the user which volume they want
prompt_if_unset base_start "Enter baseline start Volume index"
prompt_if_unset base_end "Enter baseline end Volume index"
prompt_if_unset sig_start "Enter Signal start Volume index"
prompt_if_unset sig_end "Enter Signal end Volume index"
3dTstat -mean -prefix "mean_baseline_image_${base_start}_to_${base_end}.nii.gz" "temporal_smoothed_mc_func.nii.gz[${base_start}..${base_end}]"
if [ -f mask_mean_mc_func.nii.gz ]; then
echo "Mask Image exists."
else
PRINT_RED "Mask Image does not exist. Please create the mask and save it as mask_mean_mc_func.nii.gz"
fsleyes mean_baseline_image_${base_start}_to_${base_end}.nii.gz
fi
rm -f mean_baseline_image_${base_start}_to_${base_end}.nii.gz # removing a file that is not needed further
fslmaths temporal_smoothed_mc_func.nii.gz -mas mask_mean_mc_func.nii.gz cleaned_mc_func.nii.gz
# ===============================================================================================
# Step 3: tSNR Estimation (Using AFNI)
PRINT_YELLOW "Performing Step 3: Obtaining Mean func, Std func and tSNR Maps"
# ===============================================================================================
run_if_missing "tSNR_mc_func.nii.gz" "tSNR_mc_func+orig.HEAD" "tSNR_mc_func+orig.BRIK" -- TEMPORAL_SNR_using_FSL cleaned_mc_func.nii.gz
# ===============================================================================================
# Step 4: Generating Baseline Image and Creating Masks
PRINT_YELLOW "Performing Step 4: Generating Baseline Image and Creating Masks"
# ===============================================================================================
#Step 4a: Generating Baseline Image by applying smoorthing on cleaned functional data
rm -f baseline_image_*.nii.gz signal_image_*.nii.gz Static_Map_*.nii.gz Static_Map_coreg.nii.gz
fslmaths cleaned_mc_func.nii.gz -s 0.20 smoothed_cleaned_mc_func.nii.gz #spatially smoothing functional data for better static map generation
PRINT_YELLOW "Performing Step 4a: Generating Baseline and Signal Image"
3dTstat -mean -prefix "signal_image_${sig_start}_to_${sig_end}.nii.gz" "smoothed_cleaned_mc_func.nii.gz[${sig_start}..${sig_end}]"
3dTstat -mean -prefix "baseline_image_${base_start}_to_${base_end}.nii.gz" "smoothed_cleaned_mc_func.nii.gz[${base_start}..${base_end}]"
# Step 4b: Generating Static Maps
fslmaths signal_image_${sig_start}_to_${sig_end}.nii.gz \
-sub baseline_image_${base_start}_to_${base_end}.nii.gz \
-div baseline_image_${base_start}_to_${base_end}.nii.gz \
-mul 100 \
tmp_signal_change_map_${base_start}_to_${base_end}_and_${sig_start}_to_${sig_end}.nii.gz
fslmaths tmp_signal_change_map_${base_start}_to_${base_end}_and_${sig_start}_to_${sig_end}.nii.gz -mas mask_mean_mc_func.nii.gz signal_change_map_${base_start}_to_${base_end}_and_${sig_start}_to_${sig_end}.nii.gz
# Step 4c: Normalising Entire Time Series to estiamte Percent Signal Change
fslmaths smoothed_cleaned_mc_func.nii.gz \
-sub baseline_image_${base_start}_to_${base_end}.nii.gz \
-div baseline_image_${base_start}_to_${base_end}.nii.gz \
-mul 100 \
tmp_norm_cleaned_mc_func.nii.gz
fslmaths tmp_norm_cleaned_mc_func.nii.gz -mas mask_mean_mc_func.nii.gz norm_cleaned_mc_func.nii.gz
rm -f tmp_signal_change_map_${base_start}_to_${base_end}_and_${sig_start}_to_${sig_end}.nii.gz tmp_norm_cleaned_mc_func.nii.gz
# ===============================================================================================
# Step 5: Coregistration (Using AFNI)
PRINT_YELLOW "Performing Step 5: Coregistration of functional/static map to structural"
# ===============================================================================================
# Step 5a: Cleaning the structural image by masking it with a manually created mask
if [[ -f $struct_coreg_dir/cleaned_anatomy.nii.gz ]]; then
echo "Cleaned Anatomy Image exists."
else
PRINT_RED "Cleaned Anatomy does not exist. Please create the mask and save it as mask_anatomy.nii.gz"
fsleyes "$struct_coreg_dir/anatomy.nii.gz"
fslmaths $struct_coreg_dir/anatomy.nii.gz -mas $struct_coreg_dir/mask_anatomy.nii.gz $struct_coreg_dir/cleaned_anatomy.nii.gz
fi
# Step 5b: Creating a mask image that includes cannulas as well
fslmaths mc_func.nii.gz -Tmean tmp_mean_mc_func.nii.gz #create temporary mean functional image
cp mask_mean_mc_func.nii.gz mask_mean_mc_func_cannulas.nii.gz #copy original mask to a new file to be edited that includes cannulas
PRINT_RED "Please edit the mask_mean_mc_func_cannulas.nii.gz to include cannulas and save it."
fsleyes mask_mean_mc_func_cannulas.nii.gz tmp_mean_mc_func.nii.gz #open in fsleyes for editing
fslmaths tmp_mean_mc_func.nii.gz -mas mask_mean_mc_func_cannulas.nii.gz cleaned_mean_mc_func_cannulas.nii.gz #create cleaned mean functional image including cannulas
rm -f tmp_mean_mc_func.nii.gz #remove temporary file
#NOTE: Step 5c coregisters the static map created on functional data to structural image where as
# Step 5d coregisters only the functional data to structural image and creates a coregistered functional time series and a signal change map from that coregistered functional time series.
# Step 5c: Coregistering the mean functional image to structural image and saving the affine matrix
if [[ -f mean_func_struct_aligned.aff12.1D ]]; then
PRINT_GREEN "Affine Matrix to coregister Signal Change Map exists."
else
PRINT_RED "Affine Matrix to coregister Signal Change Map doesn not exist. 3dAllineate will be used to coregister the mean functional image to structural image now."
3dAllineate \
-base $struct_coreg_dir/cleaned_anatomy.nii.gz \
-input cleaned_mean_mc_func_cannulas.nii.gz \
-1Dmatrix_save mean_func_struct_aligned.aff12.1D \
-cost lpa \
-prefix mean_func_struct_aligned.nii.gz \
-1Dparam_save params.1D \
-twopass
fi
if [[ -f Static_Map_coreg.nii.gz ]]; then
PRINT_GREEN "Coregistered Static Map exists."
else
PRINT_RED "Static Map to be coregistered does not exist. 3dAllineate will be used to coregister the Static Map to structural image now."
3dAllineate \
-base "$struct_coreg_dir/cleaned_anatomy.nii.gz" \
-input "signal_change_map_${base_start}_to_${base_end}_and_${sig_start}_to_${sig_end}.nii.gz" \
-1Dmatrix_apply mean_func_struct_aligned.aff12.1D \
-master "$struct_coreg_dir/cleaned_anatomy.nii.gz" \
-final linear \
-prefix signal_change_map_coregistered_structural_space.nii.gz
fi
#Step 5d: Coregistering the motion corrected functional data to structural image using the saved affine matrix
if [[ -f sm_fMRI_coregistered_to_struct.nii.gz ]]; then
PRINT_GREEN "Coregistered functional data exists."
else
PRINT_RED "Coregistered functional data does not exist. 3dAllineate will be used to coregister the functional data to structural image now."
scm_coregsitered_functional smoothed_cleaned_mc_func.nii.gz "$struct_coreg_dir/cleaned_anatomy.nii.gz" "$base_start" "$base_end" "$sig_start" "$sig_end"
fi
fslmaths signal_change_map_coregistered_structural_space.nii.gz -mas mask_mean_fMRI_coregistered_to_struct.nii.gz cleaned_signal_change_map_coregistered_structural_space.nii.gz
PRINT_GREEN "cleaned_signal_change_map_coregistered_structural_space.nii.gz is the Signal Change Map generated from functional data and coregistered to structural image."
fslmaths sm_coreg_func_Static_Map*.nii.gz -mas mask_mean_fMRI_coregistered_to_struct.nii.gz cleaned_sm_cleaned_coreg_func_Static_Map.nii.gz
PRINT_GREEN "cleaned_sm_cleaned_coreg_func_Static_Map is the Signal Change Map generated from coregistered functional data to structural image."
# ===============================================================================================
# Step 6: Performing ROI Analysis
PRINT_YELLOW "Performing Step 6: Performing ROI Analysis"
# ===============================================================================================
#Marking and saving Pattern of ROIs+
PRINT_RED "Please create ROIs on the functional time series and save them in the following particular format:"
echo ""
PRINT_YELLOW "roi_{what protein/aav is there}_{is it direct injection or aav}_{analyte injeted}_{hemisphere side}.nii.gz"
PRINT_YELLOW "For Example: if GCaMP6f is directly injected in the left hemisphere and dopamine is injected in the right hemisphere following a viral injection, then the following ROIs should be created:"
echo ""
PRINT_RED "roi_GCaMP6f_direct_left.nii.gz or roi_dopamine_aav_right.nii.gz"
echo ""
#Convert smoothed coregistered functional data to percent signal change
# 3dTstat -mean -prefix "tmp_sm_coreg_func_baseline_image_${base_start}_to_${base_end}.nii.gz" "sm_fMRI_coregistered_to_struct.nii.gz[${base_start}..${base_end}]"
fsleyes mean_cleaned_mc_func.nii.gz signal_change_map_${base_start}_to_${base_end}_and_${sig_start}_to_${sig_end}.nii.gz norm_cleaned_mc_func.nii.gz smoothed_cleaned_mc_func.nii.gz
for roi in roi_*.nii.gz; do
fslmeants -i norm_cleaned_mc_func.nii.gz -m "$roi" -o "psc_${roi%.nii.gz}.txt"
done
echo "✔ Completed pipeline for CSV line $line_no."
move_results
)
}
# ---- iterate over all selected lines (each in its own subshell) ----
for ln in "${LINES[@]}"; do
process_csv_line "$ln"
done
echo
echo "All requested CSV lines processed."
# ===============================================================================================
# Step 7: Performing Group Data Analysis for Time Course Extraction
PRINT_YELLOW "Performing Step 7: Performing Group Data Analysis for Time Course Extraction"
# ===============================================================================================
python3 Group_data_Analysis.py $root_location/AnalysedData/$project_name/$sub_project_name --tr 1.0 --movmean 120 --verbose