Skip to content

Commit 71d01a3

Browse files
committed
code cleaning
1 parent bea8225 commit 71d01a3

13 files changed

+111
-111
lines changed

DATA/IRIS_EMC/README.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ Then, select in the `DATA/Par_file`:
4949
```
5050
MODEL = EMC_model
5151
```
52-
to read in and use the EMC model.
52+
to read in and use the EMC model.
5353

5454
Additionally, for local EMC models the mesher `xmeshfem3D` can use parameters to produce local meshes, with a cut-off depth and local doubling layers.
5555
In `Par_file`, the following settings could be defined, for example in the Alaska model case:
@@ -95,7 +95,7 @@ which can be used as a guideline to select a stable `DT` size.
9595

9696
## Implementation
9797

98-
Currently, we support the above mentioned Alaska and CVM model files only.
98+
Currently, we support the above mentioned Alaska and CVM model files only.
9999
Other models might define different parameters and are likely not compatible at the moment.
100100

101101
More general support will hopefully come in future.

src/shared/broadcast_computed_parameters.f90

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -367,7 +367,7 @@ subroutine broadcast_computed_parameters()
367367
EMC_MODEL_TISO = bcast_logical(74)
368368
EMC_MODEL_QMU = bcast_logical(75)
369369
FULL_GRAVITY = bcast_logical(76)
370-
USE_SINSQ_STF = bcast_logical(77)
370+
USE_SINSQ_STF = bcast_logical(77)
371371
HDF5_ENABLED = bcast_logical(78)
372372
HDF5_FOR_MOVIES = bcast_logical(79)
373373
OUTPUT_SEISMOS_HDF5 = bcast_logical(80)

src/specfem3D/SIEM_prepare_solver.F90

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1636,13 +1636,13 @@ subroutine SIEM_prepare_seismos()
16361636

16371637
! additional scaling factors for gravity seismograms
16381638
scale_accel = scale_veloc * scale_t_inv ! [m / s^2]
1639-
scale_phi = scale_displ**2 * scale_t_inv**2 ! [m^2 / s^2]
1640-
scale_pgrav = scale_displ * scale_t_inv**2 ! [m / s^2]
1641-
1642-
! strain is defined as second time integral
1643-
! of grad grad phi here we just output
1644-
! grad grad phi so scale by units of 1/s^2
1645-
scale_hgrav = scale_t_inv**2 ! [1 / s^2]
1639+
scale_phi = scale_displ**2 * scale_t_inv**2 ! [m^2 / s^2]
1640+
scale_pgrav = scale_displ * scale_t_inv**2 ! [m / s^2]
1641+
1642+
! strain is defined as second time integral
1643+
! of grad grad phi here we just output
1644+
! grad grad phi so scale by units of 1/s^2
1645+
scale_hgrav = scale_t_inv**2 ! [1 / s^2]
16461646

16471647

16481648
! allocate seismogram array

src/specfem3D/SIEM_solver_petsc.F90

Lines changed: 54 additions & 54 deletions
Original file line numberDiff line numberDiff line change
@@ -80,23 +80,23 @@
8080
! call VecSetValues(bvec,neq,l2gdof(1:),y,ADD_VALUES,ierr)
8181
! well, until a better solution is found.
8282

83-
! 5/12/25 - WE: A further issue with this is the variables ix and y
84-
! for single values e.g. originally
83+
! 5/12/25 - WE: A further issue with this is the variables ix and y
84+
! for single values e.g. originally
8585
! call VecSetValues(interface_gvec1,1,igdof,rval,INSERT_VALUES,ierr)
86-
! needs to be written as
86+
! needs to be written as
8787
! call VecSetValues(interface_gvec1,1,[igdof],[rval],INSERT_VALUES,ierr)
88-
! also for MatSetValues(Mat mat, PetscInt m, const PetscInt idxm[],
89-
! PetscInt n, const PetscInt idxn[],
88+
! also for MatSetValues(Mat mat, PetscInt m, const PetscInt idxm[],
89+
! PetscInt n, const PetscInt idxn[],
9090
! const PetscScalar v[], InsertMode addv)
9191
! the variables idxm, idxn, and v.
92-
! Other examples in:
93-
! MatMPIAIJSetPreallocation --> requires PETSC_NULL_INTEGER_ARRAY
92+
! Other examples in:
93+
! MatMPIAIJSetPreallocation --> requires PETSC_NULL_INTEGER_ARRAY
9494
! instead of PETSC_NULL_INTEGER
9595

9696
! For MatSetValues - flattening
9797
! MatSetValues documentation suggests in f90 varray needs to be flattened
98-
! note that F90 is column major but C is row major.
99-
! It appears petsc updated VecGetArrayF90 back to VecGetArray in
98+
! note that F90 is column major but C is row major.
99+
! It appears petsc updated VecGetArrayF90 back to VecGetArray in
100100
! 3.23 (https://petsc.org/release/changes/323/)
101101

102102
module siem_solver_petsc
@@ -228,7 +228,7 @@ module siem_solver_petsc
228228
#define API_F90_SUFFIX 0
229229
#endif
230230

231-
! In version 3.23
231+
! In version 3.23
232232
! MatInfo changed from arrays to derived data types
233233
#if PETSC_VERSION_GE(3,22,0)
234234
#define PETSC_INFO_DERIVED_TYPE 1
@@ -282,7 +282,7 @@ module siem_solver_petsc
282282
type(tPC) :: pc
283283
PetscErrorCode :: ierr
284284
PetscInt :: nzeros_max,nzeros_min
285-
PetscInt :: ig0,ig1, ONE
285+
PetscInt :: ig0,ig1, ONE
286286
#endif
287287

288288
! public function
@@ -312,7 +312,7 @@ subroutine petsc_initialize1()
312312
use specfem_par, only: ADD_TRINF,SIMULATION_TYPE
313313

314314
use specfem_par_full_gravity, only: ggdof_ic1,ggdof_oc1,ggdof_cm1,ggdof_trinf1,ggdof_inf1
315-
315+
316316
implicit none
317317
type(tVec) :: nzeror_gvec1,nzeror_dvec1,nzeror_ovec1,iproc_gvec1, &
318318
interface_gvec1,ninterface_dvec1,ninterface_ovec1,nself_gvec1
@@ -618,9 +618,9 @@ subroutine petsc_initialize1()
618618

619619
#if API_F90_SUFFIX
620620
call VecGetArrayF90(nself_gvec1,rnself_array1,ierr)
621-
#else
621+
#else
622622
call VecGetArray(nself_gvec1,rnself_array1,ierr)
623-
#endif
623+
#endif
624624

625625
allocate(nself_array1(n))
626626
nself_array1 = int(rnself_array1(1:n))
@@ -629,9 +629,9 @@ subroutine petsc_initialize1()
629629

630630
#if API_F90_SUFFIX
631631
call VecRestoreArrayF90(nself_gvec1,rnself_array1,ierr)
632-
#else
632+
#else
633633
call VecRestoreArray(nself_gvec1,rnself_array1,ierr)
634-
#endif
634+
#endif
635635

636636
call VecDestroy(nself_gvec1,ierr)
637637

@@ -749,9 +749,9 @@ subroutine petsc_initialize1()
749749
call VecGetLocalSize(nzeror_dvec1,n,ierr)
750750
#if API_F90_SUFFIX
751751
call VecGetArrayF90(nzeror_dvec1,nzeror_darray1,ierr)
752-
#else
752+
#else
753753
call VecGetArray(nzeror_dvec1,nzeror_darray1,ierr)
754-
#endif
754+
#endif
755755

756756

757757
allocate(nnzero_diag1(n))
@@ -765,28 +765,28 @@ subroutine petsc_initialize1()
765765

766766
#if API_F90_SUFFIX
767767
call VecRestoreArrayF90(nzeror_dvec1,nzeror_darray1,ierr)
768-
#else
768+
#else
769769
call VecRestoreArray(nzeror_dvec1,nzeror_darray1,ierr)
770-
#endif
770+
#endif
771771

772772
call VecDestroy(nzeror_dvec1,ierr)
773773

774774
! off-diagonal matrix
775775
#if API_F90_SUFFIX
776776
call VecGetArrayF90(nzeror_ovec1,nzeror_oarray1,ierr)
777-
#else
777+
#else
778778
call VecGetArray(nzeror_ovec1,nzeror_oarray1,ierr)
779-
#endif
779+
#endif
780780

781781

782782
allocate(nnzero_offdiag1(n))
783783
nnzero_offdiag1(:) = int(nzeror_oarray1(1:n))
784784

785785
#if API_F90_SUFFIX
786786
call VecRestoreArrayF90(nzeror_ovec1,nzeror_oarray1,ierr)
787-
#else
787+
#else
788788
call VecRestoreArray(nzeror_ovec1,nzeror_oarray1,ierr)
789-
#endif
789+
#endif
790790

791791
call VecDestroy(nzeror_ovec1,ierr)
792792

@@ -795,9 +795,9 @@ subroutine petsc_initialize1()
795795
! 8 therefore to be safe we need to subtract this from all
796796
#if API_F90_SUFFIX
797797
call VecGetArrayF90(ninterface_dvec1,rninterface_darray1,ierr)
798-
#else
798+
#else
799799
call VecGetArray(ninterface_dvec1,rninterface_darray1,ierr)
800-
#endif
800+
#endif
801801

802802

803803
!where(rninterface_darray1>0.0 .and. rninterface_darray1 < 1.0)rninterface_darray1=1.0
@@ -807,9 +807,9 @@ subroutine petsc_initialize1()
807807

808808
#if API_F90_SUFFIX
809809
call VecRestoreArrayF90(ninterface_dvec1,rninterface_darray1,ierr)
810-
#else
810+
#else
811811
call VecRestoreArray(ninterface_dvec1,rninterface_darray1,ierr)
812-
#endif
812+
#endif
813813

814814
call VecDestroy(ninterface_dvec1,ierr)
815815

@@ -818,9 +818,9 @@ subroutine petsc_initialize1()
818818

819819
#if API_F90_SUFFIX
820820
call VecGetArrayF90(ninterface_ovec1,rninterface_oarray1,ierr)
821-
#else
821+
#else
822822
call VecGetArray(ninterface_ovec1,rninterface_oarray1,ierr)
823-
#endif
823+
#endif
824824

825825

826826
!where(rninterface_oarray1>0.0 .and. rninterface_oarray1 < 1.0)rninterface_oarray1=1.0
@@ -830,9 +830,9 @@ subroutine petsc_initialize1()
830830

831831
#if API_F90_SUFFIX
832832
call VecGetArrayF90(ninterface_ovec1,rninterface_oarray1,ierr)
833-
#else
833+
#else
834834
call VecGetArray(ninterface_ovec1,rninterface_oarray1,ierr)
835-
#endif
835+
#endif
836836

837837
call VecDestroy(ninterface_ovec1,ierr)
838838

@@ -867,18 +867,18 @@ subroutine petsc_initialize1()
867867
! non-zero array for diagonal/off-diagonal matrix?
868868
#if API_F90_SUFFIX
869869
call VecGetArrayF90(nzeror_gvec1,nzeror_array1,ierr); CHECK_PETSC_ERROR(ierr)
870-
#else
870+
#else
871871
call VecGetArray(nzeror_gvec1,nzeror_array1,ierr); CHECK_PETSC_ERROR(ierr)
872-
#endif
872+
#endif
873873

874874
allocate(inzeror_array1(n))
875875
inzeror_array1(:) = int(nzeror_array1(1:n))
876876

877877
#if API_F90_SUFFIX
878878
call VecRestoreArrayF90(nzeror_gvec1,nzeror_array1,ierr); CHECK_PETSC_ERROR(ierr)
879-
#else
879+
#else
880880
call VecRestoreArray(nzeror_gvec1,nzeror_array1,ierr); CHECK_PETSC_ERROR(ierr)
881-
#endif
881+
#endif
882882

883883
call VecDestroy(nzeror_gvec1,ierr); CHECK_PETSC_ERROR(ierr)
884884

@@ -914,8 +914,8 @@ subroutine petsc_initialize1()
914914
!
915915
! this seems to lead to a much faster petsc_set_matrix1() routine without the re-allocations.
916916
! however, the diagonal and in particular the off-diagonal estimate with nzeros_max might be still off.
917-
918-
917+
918+
919919
#if PETSC_ARRAY_NULL_API
920920
! Later version uses integer array
921921
call MatMPIAIJSetPreallocation(Amat1,nzeros_max, PETSC_NULL_INTEGER_ARRAY, &
@@ -1195,7 +1195,7 @@ subroutine petsc_set_matrix1()
11951195
!debugging
11961196
logical, parameter :: DEBUG_FILE_OUTPUT = .false.
11971197

1198-
! For the MatGetRow call
1198+
! For the MatGetRow call
11991199
!integer :: ncols
12001200
!integer,dimension(:),allocatable :: cols
12011201
PetscInt :: ncols
@@ -1213,9 +1213,9 @@ subroutine petsc_set_matrix1()
12131213
! matrix info
12141214
#if PETSC_INFO_DERIVED_TYPE
12151215
MatInfo :: info
1216-
#else
1216+
#else
12171217
double precision :: info(MAT_INFO_SIZE)
1218-
#endif
1218+
#endif
12191219
double precision :: mallocsval
12201220

12211221

@@ -1459,9 +1459,9 @@ subroutine petsc_set_matrix1()
14591459

14601460
#if PETSC_INFO_DERIVED_TYPE
14611461
mallocsval = info%mallocs
1462-
#else
1462+
#else
14631463
mallocsval = info(MAT_INFO_MALLOCS) ! number of mallocs during MatSetValues()
1464-
#endif
1464+
#endif
14651465
!memval = info(MAT_INFO_MEMORY) ! memory allocated - not provided
14661466
!nonzeros_allocated = info(MAT_INFO_NZ_ALLOCATED) ! nonzero entries allocated
14671467

@@ -1751,18 +1751,18 @@ subroutine scatter_globalvec1(global_vec,larray)
17511751

17521752
#if API_F90_SUFFIX
17531753
call VecGetArrayF90(local_vec1,array_data,ierr); CHECK_PETSC_ERROR(ierr)
1754-
#else
1754+
#else
17551755
call VecGetArray(local_vec1,array_data,ierr); CHECK_PETSC_ERROR(ierr)
1756-
#endif
1756+
#endif
17571757

17581758

17591759
larray(1:n) = array_data(1:n)
17601760

17611761
#if API_F90_SUFFIX
17621762
call VecRestoreArrayF90(local_vec1,array_data,ierr); CHECK_PETSC_ERROR(ierr)
1763-
#else
1763+
#else
17641764
call VecRestoreArray(local_vec1,array_data,ierr); CHECK_PETSC_ERROR(ierr)
1765-
#endif
1765+
#endif
17661766

17671767
end subroutine scatter_globalvec1
17681768

@@ -2140,9 +2140,9 @@ subroutine petsc_set_matrix()
21402140
! matrix info
21412141
#if PETSC_INFO_DERIVED_TYPE
21422142
MatInfo :: info
2143-
#else
2143+
#else
21442144
double precision :: info(MAT_INFO_SIZE)
2145-
#endif
2145+
#endif
21462146
double precision :: mallocsval
21472147

21482148
PetscLogDouble :: bytes
@@ -2252,7 +2252,7 @@ subroutine petsc_set_matrix()
22522252
do iflat = 1, ncount
22532253
do jflat = 1, ncount
22542254
varr((iflat-1)*ncount + jflat) = storekmat_crust_mantle(idof(iflat), idof(jflat), i_elmt)
2255-
enddo !jflat
2255+
enddo !jflat
22562256
enddo !iflat
22572257

22582258
call MatSetValues(Amat,ncount,igdof(1:ncount),ncount,igdof(1:ncount),varr,ADD_VALUES,ierr); CHECK_PETSC_ERROR(ierr)
@@ -2295,7 +2295,7 @@ subroutine petsc_set_matrix()
22952295
do iflat = 1, ncount
22962296
do jflat = 1, ncount
22972297
varr((iflat-1)*ncount + jflat) = storekmat_trinfinite(idof(iflat), idof(jflat), i_elmt)
2298-
enddo !jflat
2298+
enddo !jflat
22992299
enddo !iflat
23002300
call MatSetValues(Amat,ncount,igdof(1:ncount),ncount,igdof(1:ncount),varr,ADD_VALUES,ierr); CHECK_PETSC_ERROR(ierr)
23012301
deallocate(varr)
@@ -2334,7 +2334,7 @@ subroutine petsc_set_matrix()
23342334
do iflat = 1, ncount
23352335
do jflat = 1, ncount
23362336
varr((iflat-1)*ncount + jflat) = storekmat_infinite(idof(iflat), idof(jflat), i_elmt)
2337-
enddo !jflat
2337+
enddo !jflat
23382338
enddo !iflat
23392339
call MatSetValues(Amat,ncount,igdof(1:ncount),ncount,igdof(1:ncount),varr,ADD_VALUES,ierr); CHECK_PETSC_ERROR(ierr)
23402340
deallocate(varr)
@@ -2357,9 +2357,9 @@ subroutine petsc_set_matrix()
23572357

23582358
#if PETSC_INFO_DERIVED_TYPE
23592359
mallocsval = info%mallocs
2360-
#else
2360+
#else
23612361
mallocsval = info(MAT_INFO_MALLOCS) ! number of mallocs during MatSetValues()
2362-
#endif
2362+
#endif
23632363

23642364
! memory usage
23652365
call PetscMemoryGetCurrentUsage(bytes, ierr); CHECK_PETSC_ERROR(ierr)

0 commit comments

Comments
 (0)