@@ -78,15 +78,15 @@ def linear_regression(
7878 # from projection require no extra terms in variance
7979 # estimate for loop covariates (columns of G), which is
8080 # only true when an intercept is present.
81- XLPS = (XLP ** 2 ).sum (axis = 0 , keepdims = True ).T
81+ XLPS = (XLP ** 2 ).sum (axis = 0 , keepdims = True ).T
8282 assert XLPS .shape == (n_loop_covar , 1 )
8383 B = (XLP .T @ YP ) / XLPS
8484 assert B .shape == (n_loop_covar , n_outcome )
8585
8686 # Compute residuals for each loop covariate and outcome separately
8787 YR = YP [:, np .newaxis , :] - XLP [..., np .newaxis ] * B [np .newaxis , ...]
8888 assert YR .shape == (n_obs , n_loop_covar , n_outcome )
89- RSS = (YR ** 2 ).sum (axis = 0 )
89+ RSS = (YR ** 2 ).sum (axis = 0 )
9090 assert RSS .shape == (n_loop_covar , n_outcome )
9191 # Get t-statistics for coefficient estimates
9292 T = B / np .sqrt (RSS / dof / XLPS )
@@ -382,7 +382,7 @@ def regenie_loco_regression(
382382 Y -= Y .mean (axis = 0 )
383383 # Orthogonally project covariates out of phenotype matrix
384384 Y -= Q @ (Q .T @ Y )
385- Y_scale = da .sqrt (da .sum (Y ** 2 , axis = 0 ) / (Y .shape [0 ] - Q .shape [1 ]))
385+ Y_scale = da .sqrt (da .sum (Y ** 2 , axis = 0 ) / (Y .shape [0 ] - Q .shape [1 ]))
386386 # Scale
387387 Y /= Y_scale [None , :]
388388
0 commit comments