-
Notifications
You must be signed in to change notification settings - Fork 2
/
MOM_particles_framework.F90
3934 lines (3412 loc) · 145 KB
/
MOM_particles_framework.F90
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
!> This is the core module for MOM drifter diagnostics
module MOM_particles_framework
! This file is part of MOM6. See LICENSE.md for the license.
use constants_mod, only: radius, pi, omega, HLF
use MOM_grid, only : ocean_grid_type
use MOM_time_manager, only : get_time
use MOM_file_parser, only : get_param
use mpp_mod, only: mpp_npes, mpp_pe, mpp_root_pe, mpp_sum, mpp_min, mpp_max, NULL_PE
use mpp_mod, only: mpp_send, mpp_recv, mpp_sync_self, mpp_pe, mpp_root_pe, mpp_chksum
use mpp_mod, only: COMM_TAG_1, COMM_TAG_2, COMM_TAG_3, COMM_TAG_4
use mpp_mod, only: COMM_TAG_5, COMM_TAG_6, COMM_TAG_7, COMM_TAG_8
use mpp_mod, only: COMM_TAG_9, COMM_TAG_10
use mpp_mod, only: mpp_clock_begin, mpp_clock_end, mpp_clock_id, input_nml_file
use mpp_mod, only: CLOCK_COMPONENT, CLOCK_SUBCOMPONENT, CLOCK_LOOP
use mpp_domains_mod, only: domain2D
use mpp_parameter_mod, only: SCALAR_PAIR, CGRID_NE, BGRID_NE, CORNER, AGRID,CENTER
use mpp_domains_mod, only: mpp_update_domains, mpp_define_domains
use mpp_domains_mod, only: mpp_get_compute_domain, mpp_get_data_domain, mpp_get_global_domain
use mpp_domains_mod, only: CYCLIC_GLOBAL_DOMAIN, FOLD_NORTH_EDGE
use mpp_domains_mod, only: mpp_get_neighbor_pe, NORTH, SOUTH, EAST, WEST
use mpp_domains_mod, only: mpp_define_io_domain
use fms_mod, only: stdlog, stderr, error_mesg, FATAL, WARNING
use fms_mod, only: open_namelist_file, check_nml_error, close_file
use fms_mod, only: clock_flag_default
use time_manager_mod, only: time_type, get_date, get_time, set_date, operator(-)
use diag_manager_mod, only: register_diag_field, register_static_field, send_data
use diag_manager_mod, only: diag_axis_init
implicit none ; private
integer :: buffer_width=19 ! size of buffer dimension for comms
integer :: buffer_width_traj=16
logical :: folded_north_on_pe = .false. !< If true, indicates the presence of the tri-polar grid
logical :: verbose=.false. !< Be verbose to stderr
logical :: debug=.false. !< Turn on debugging
logical :: really_debug=.false. !< Turn on debugging
logical :: parallel_reprod=.true. !< Reproduce across different PE decompositions
logical :: use_slow_find=.true. !< Use really slow (but robust) find_cell for reading restarts
logical :: ignore_ij_restart=.false. !< Read i,j location from restart if available (needed to use restarts on different grids)
logical :: use_roundoff_fix=.true. !< Use a "fix" for the round-off discrepancy between is_point_in_cell() and pos_within_cell()
logical :: old_bug_bilin=.true. !< If true, uses the inverted bilinear function (use False to get correct answer)
character(len=10) :: restart_input_dir = 'INPUT/' !< Directory to look for restart files
integer, parameter :: delta_buf=25 !< Size by which to increment buffers
real, parameter :: pi_180=pi/180. !< Converts degrees to radians
logical :: fix_restart_dates=.true. !< After a restart, check that parts were created before the current model date
logical :: do_unit_tests=.false. !< Conduct some unit tests
logical :: force_all_pes_traj=.false. !< Force all pes write trajectory files regardless of io_layout
!Public params !Niki: write a subroutine to expose these
public buffer_width,buffer_width_traj
public verbose, really_debug, debug, restart_input_dir,old_bug_bilin,use_roundoff_fix
public ignore_ij_restart, use_slow_find
public force_all_pes_traj
!Public types
public particles_gridded, xyt, particle, particles, buffer!, bond
!Public subs
public particles_framework_init
public send_parts_to_other_pes
public update_halo_particles
public pack_traj_into_buffer2, unpack_traj_from_buffer2
public increase_ibuffer
public add_new_part_to_list, count_out_of_order, check_for_duplicates
public insert_part_into_list, create_particle, delete_particle_from_list, destroy_particle
public print_fld,print_part, print_parts,record_posn, push_posn, append_posn, check_position
public move_trajectory, move_all_trajectories
public find_cell, find_cell_by_search, count_parts, is_point_in_cell, pos_within_cell
public find_layer, find_depth
public bilin, yearday, parts_chksum, list_chksum, count_parts_in_list
public linlinx, linliny
public checksum_gridded
public grd_chksum2,grd_chksum3
public fix_restart_dates, offset_part_dates
public move_part_between_cells
public find_individual_particle
public monitor_a_part
public is_point_within_xi_yj_bounds
public test_check_for_duplicate_ids_in_list
public check_for_duplicates_in_parallel
public split_id, id_from_2_ints, generate_id
!> Container for gridded fields
type :: particles_gridded
type(domain2D), pointer :: domain !< MPP parallel domain
integer :: halo !< Nominal halo width
integer :: isc !< Start i-index of computational domain
integer :: iec !< End i-index of computational domain
integer :: jsc !< Start j-index of computational domain
integer :: jec !< End j-index of computational domain
integer :: isd !< Start i-index of data domain
integer :: ied !< End i-index of data domain
integer :: jsd !< Start j-index of data domain
integer :: jed !< End j-index of data domain
integer :: isg !< Start i-index of global domain
integer :: ieg !< End i-index of global domain
integer :: jsg !< Start j-index of global domain
integer :: jeg !< End j-index of global domain
integer :: ke !< The number of layers in the vertical.
integer :: is_offset=0 !< add to i to recover global i-index
integer :: js_offset=0 !< add to j to recover global j-index
integer :: my_pe !< MPI PE index
integer :: pe_N !< MPI PE index of PE to the north
integer :: pe_S !< MPI PE index of PE to the south
integer :: pe_E !< MPI PE index of PE to the east
integer :: pe_W !< MPI PE index of PE to the west
logical :: grid_is_latlon !< Flag to say whether the coordinate is in lat-lon degrees, or meters
logical :: grid_is_regular !< Flag to say whether point in cell can be found assuming regular Cartesian grid
real :: Lx !< Length of the domain in x direction
real, dimension(:,:), allocatable :: lon !< Longitude of cell corners (degree E)
real, dimension(:,:), allocatable :: lat !< Latitude of cell corners (degree N)
real, dimension(:,:), allocatable :: lonc !< Longitude of cell centers (degree E)
real, dimension(:,:), allocatable :: latc !< Latitude of cell centers (degree N)
real, dimension(:,:), allocatable :: dx !< Length of cell edge (m)
real, dimension(:,:), allocatable :: dy !< Length of cell edge (m)
real, dimension(:,:), allocatable :: area !< Area of cell (m^2)
real, dimension(:,:), allocatable :: msk !< Ocean-land mask (1=ocean)
real, dimension(:,:), allocatable :: cos !< Cosine from rotation matrix to lat-lon coords
real, dimension(:,:), allocatable :: sin !< Sine from rotation matrix to lat-lon coords
real, dimension(:,:), allocatable :: ocean_depth !< Depth of ocean (m)
real, dimension(:,:,:), allocatable :: uo !< Ocean zonal flow (m/s)
real, dimension(:,:,:), allocatable :: vo !< Ocean meridional flow (m/s)
real, dimension(:,:,:), allocatable :: hdepth !< Cumulative thickness from ocen surf
real, dimension(:,:), allocatable :: tmp !< Temporary work space
real, dimension(:,:), allocatable :: tmpc !< Temporary work space
real, dimension(:,:), allocatable :: parity_x !< X component of vector point from i,j to i+1,j+1 (for detecting tri-polar fold)
real, dimension(:,:), allocatable :: parity_y !< Y component of vector point from i,j to i+1,j+1 (for detecting tri-polar fold)
integer, dimension(:,:), allocatable :: particle_counter_grd !< Counts particles created for naming purposes
!>@{
!! Diagnostic handle
integer :: id_uo=-1, id_vo=-1, id_unused=-1
integer :: id_count=-1, id_chksum=-1
!>@}
end type particles_gridded
!>xyt is a data structure containing particle position and velocity fields.
type :: xyt
real :: lon, lat, day !< Current position (degrees) and day
real :: lat_old, lon_old !< Previous position (degrees)
real :: uvel, vvel !< Current velocity components (m/s)
real :: uvel_old, vvel_old !< Previous velocity components (m/s)
real :: theta
integer :: year, particle_num !< Current year and particle number
integer(kind=8) :: id = -1 !< Particle Identifier
real :: k !<Current vertical level i which the particle resides
real :: depth !<Current depth of the particle
logical :: k_space !<Logical indicating whether particle is in k (vs z)
type(xyt), pointer :: next=>null() !< Pointer to the next position in the list
end type xyt
!>particle types are data structures describing a tracked particle
type :: particle
type(particle), pointer :: prev=>null(), next=>null()
! State variables (specific to the particle, needed for restarts)
real :: lon, lat, depth, uvel, vvel !< position (degrees) and zonal and meridional velocities (m/s)
real :: theta !<temperature at the location of the particle
real :: lon_old, lat_old, uvel_old, vvel_old !< previous position (degrees) and zonal
!< and meridional velocities (m/s)
real :: start_lon, start_lat, start_d, start_day !< origination position (degrees) and day
integer :: start_year !< origination year
real :: halo_part !< equal to zero for particles on the computational domain, and 1 for particles on the halo
integer(kind=8) :: id,drifter_num !< particle identifier
integer :: ine, jne !< nearest index in NE direction (for convenience)
real :: k !<vertical level of particle
logical :: k_space !<flag for whether depth is stored in kspace (vs z)
real :: xi, yj !< non-dimensional coords within current cell (0..1)
real :: uo, vo !< zonal and meridional ocean velocities experienced
real :: hdepth !<depth from surface at bottom of layer
!< by the particle (m/s)
type(xyt), pointer :: trajectory=>null()
end type particle
!>A buffer structure for message passing
type :: buffer
integer :: size=0
real, dimension(:,:), pointer :: data
end type buffer
!> A wrapper for the particle linked list (since an array of pointers is not allowed)
type :: linked_list
type(particle), pointer :: first=>null() !< Pointer to the beginning of a linked list of parts
end type linked_list
!> A grand data structure for the particles in the local MOM domain
type :: particles !; private
type(particles_gridded) :: grd !< Container with all gridded data
type(linked_list), dimension(:,:), allocatable :: list !< Linked list of particles
type(xyt), pointer :: trajectories=>null() !< A linked list for detached segments of trajectories
real :: dt !< Time-step between particle calls
integer :: current_year !< Current year (years)
real :: current_yearday !< Current year-day, 1.00-365.99, (days)
integer :: traj_sample_hrs !< Period between sampling for trajectories (hours)
integer :: traj_write_hrs !< Period between writing of trajectories (hours)
integer :: verbose_hrs !< Period between terminal status reports (hours)
!>@{
!! Handles for clocks
integer :: clock, clock_mom, clock_the, clock_int, clock_cal, clock_com, clock_ini, clock_ior, clock_iow, clock_dia
integer :: clock_trw, clock_trp
!>@}
logical :: restarted=.false. !< Indicate whether we read state from a restart or not
logical :: Runge_not_Verlet=.True. !< True=Runge-Kutta, False=Verlet.
logical :: ignore_missing_restart_parts=.False. !< True allows the model to ignore particles missing in the restart.
logical :: halo_debugging=.False. !< Use for debugging halos (remove when its working)
logical :: save_short_traj=.false. !< True saves only lon,lat,time,id in particle_trajectory.nc
logical :: ignore_traj=.False. !< If true, then model does not write trajectory data at all
logical :: initial_traj=.True. !< If true, then model will write trajectory data before starting the run
logical :: use_new_predictive_corrective =.False. !< Flag to use Bob's predictive corrective particle scheme- Added by Alon
integer(kind=8) :: debug_particle_with_id = -1 !< If positive, monitors a part with this id
type(buffer), pointer :: obuffer_n=>null() !< Buffer for outgoing parts to the north
type(buffer), pointer :: ibuffer_n=>null() !< Buffer for incoming parts from the north
type(buffer), pointer :: obuffer_s=>null() !< Buffer for outgoing parts to the south
type(buffer), pointer :: ibuffer_s=>null() !< Buffer for incoming parts from the south
type(buffer), pointer :: obuffer_e=>null() !< Buffer for outgoing parts to the east
type(buffer), pointer :: ibuffer_e=>null() !< Buffer for incoming parts from the east
type(buffer), pointer :: obuffer_w=>null() !< Buffer for outgoing parts to the west
type(buffer), pointer :: ibuffer_w=>null() !< Buffer for incoming parts from the west
type(buffer), pointer :: obuffer_io=>null() !< Buffer for outgoing parts during i/o
type(buffer), pointer :: ibuffer_io=>null() !< Buffer for incoming parts during i/o
end type particles
#ifdef _FILE_VERSION
character(len=128) :: version = _FILE_VERSION
#else
character(len=128) :: version = 'unknown'
#endif
!> Set a value in the buffer at position (counter,n) after incrementing counter
interface push_buffer_value
module procedure push_buffer_rvalue, push_buffer_ivalue
end interface
!> Get a value in the buffer at position (counter,n) after incrementing counter
interface pull_buffer_value
module procedure pull_buffer_rvalue, pull_buffer_ivalue
end interface
contains
! ##############################################################################
subroutine particles_framework_init(parts, Grid, Time, dt)
type(particles), pointer :: parts !< Particles to be allocated
type(ocean_grid_type), target, intent(in) :: Grid !< MOM6 grid
real, intent(in) :: dt !< Time step (s)
type(time_type), intent(in) :: Time !< Model time
! Namelist parameters (and defaults)
integer :: halo=4 ! Width of halo region
integer :: traj_sample_hrs=24 ! Period between sampling of position for trajectory storage
integer :: traj_write_hrs=24 ! Period between writing sampled trajectories to disk
integer :: verbose_hrs=24 ! Period between verbose messages
real :: Lx=360. ! Length of domain in x direction, used for periodicity (use a huge number for non-periodic)
logical :: Runge_not_Verlet=.True. ! True=Runge Kutta, False=Verlet.
logical :: grid_is_latlon=.True. ! True means that the grid is specified in lat lon, and uses to radius of the earth to convert to distance
logical :: grid_is_regular=.False. ! Flag to say whether point in cell can be found assuming regular Cartesian grid
logical :: ignore_missing_restart_parts=.False. ! True Allows the model to ignore particles missing in the restart.
logical :: halo_debugging=.False. ! Use for debugging halos (remove when its working)
logical :: save_short_traj=.false. ! True saves only lon,lat,time,id in particle_trajectory.nc
logical :: ignore_traj=.False. ! If true, then model does not traj trajectory data at all
logical :: initial_traj=.True. !< If true, then model will write trajectory data before starting the run
logical :: use_new_predictive_corrective =.False. ! Flag to use Bob's predictive corrective particle scheme- Added by Alon
logical :: do_unit_tests=.false. ! Conduct some unit tests
logical :: input_freq_distribution=.false. ! Flag to show if input distribution is freq or mass dist (=1 if input is a freq dist, =0 to use an input mass dist)
logical :: read_old_restarts=.false. ! Legacy option that does nothing
integer(kind=8) :: debug_particle_with_id = -1 ! If positive, monitors a part with this id
integer :: generate_days=-1 ! If positive, is the period in days between generation of new particles on a grid. If 0, generate once. Negative do nothing.
real :: generate_lons(3) ! Start,end and delta longitude, if generating particles
real :: generate_lats(3) ! Start,end and delta longitude, if generating particles
real :: generate_d(3) ! Start, end and delta depth if positive. Start end and delta k if negative
namelist /particles_nml/ verbose, halo, traj_sample_hrs, traj_write_hrs, save_short_traj, &
verbose_hrs, &
debug, really_debug, ignore_missing_restart_parts, &
parallel_reprod, use_slow_find, ignore_ij_restart, use_new_predictive_corrective, halo_debugging, &
fix_restart_dates, use_roundoff_fix, Runge_not_Verlet, &
restart_input_dir, old_bug_bilin,do_unit_tests, force_all_pes_traj, &
grid_is_latlon,Lx, &
grid_is_regular, &
generate_days, generate_lons, generate_lats, generate_d, &
ignore_traj, initial_traj, debug_particle_with_id, read_old_restarts
! Local variables
integer :: ierr, iunit, i, j, id_class, is, ie, js, je, np
integer :: iyr, imon, iday, ihr, imin, isec
type(particles_gridded), pointer :: grd
real :: lon_mod, big_number
logical :: lerr, lgenerate
integer :: stdlogunit, stderrunit
! Get the stderr and stdlog unit numbers
stderrunit=stderr()
stdlogunit=stdlog()
write(stdlogunit,*) "particles_framework: "//trim(version)
#ifdef INTERNAL_FILE_NML
read (input_nml_file, nml=particles_nml, iostat=ierr)
#else
iunit = open_namelist_file()
read (iunit, particles_nml,iostat=ierr)
call close_file (iunit)
#endif
ierr = check_nml_error(ierr,'particles_nml')
if (really_debug) debug=.true. ! One implies the other...
write (stdlogunit, particles_nml)
! Allocate memory
allocate(parts)
grd=>parts%grd
grd%domain => Grid%domain%mpp_domain
! Clocks
parts%clock=mpp_clock_id( 'Particles', flags=clock_flag_default, grain=CLOCK_COMPONENT )
parts%clock_com=mpp_clock_id( 'Particles-communication', flags=clock_flag_default, grain=CLOCK_SUBCOMPONENT )
parts%clock_ini=mpp_clock_id( 'Particles-initialization', flags=clock_flag_default, grain=CLOCK_SUBCOMPONENT )
parts%clock_ior=mpp_clock_id( 'Particles-I/O read', flags=clock_flag_default, grain=CLOCK_SUBCOMPONENT )
parts%clock_iow=mpp_clock_id( 'Particles-I/O write', flags=clock_flag_default, grain=CLOCK_SUBCOMPONENT )
call mpp_clock_begin(parts%clock)
call mpp_clock_begin(parts%clock_ini)
grd%isg = Grid%isg; grd%ieg = Grid%ieg
grd%jsg = Grid%jsg; grd%jeg = Grid%jeg
grd%isc = Grid%isc; grd%iec = Grid%iec
grd%jsc = Grid%jsc; grd%jec = Grid%jec
grd%isd = Grid%isd; grd%ied = Grid%ied
grd%jsd = Grid%jsd; grd%jed = Grid%jed
grd%ke = Grid%ke
grd%is_offset = Grid%idg_offset
grd%js_offset = Grid%jdg_offset
call mpp_get_neighbor_pe(grd%domain, NORTH, grd%pe_N)
call mpp_get_neighbor_pe(grd%domain, SOUTH, grd%pe_S)
call mpp_get_neighbor_pe(grd%domain, EAST, grd%pe_E)
call mpp_get_neighbor_pe(grd%domain, WEST, grd%pe_W)
folded_north_on_pe = ((Grid%Domain%y_flags == FOLD_NORTH_EDGE) .and. (grd%jec + grd%js_offset == grd%jeg))
! Allocate grid of pointers
allocate( parts%list(grd%isd:grd%ied, grd%jsd:grd%jed) )
do j = grd%jsd,grd%jed ; do i = grd%isd,grd%ied
parts%list(i,j)%first => null()
enddo ; enddo
big_number=1.0E15
allocate( grd%lon(grd%isd:grd%ied, grd%jsd:grd%jed) ); grd%lon(:,:)=big_number
allocate( grd%lat(grd%isd:grd%ied, grd%jsd:grd%jed) ); grd%lat(:,:)=big_number
allocate( grd%lonc(grd%isd:grd%ied, grd%jsd:grd%jed) );grd%lon(:,:)=big_number
allocate( grd%latc(grd%isd:grd%ied, grd%jsd:grd%jed) );grd%lat(:,:)=big_number
allocate( grd%dx(grd%isd:grd%ied, grd%jsd:grd%jed) ); grd%dx(:,:)=0.
allocate( grd%dy(grd%isd:grd%ied, grd%jsd:grd%jed) ); grd%dy(:,:)=0.
allocate( grd%area(grd%isd:grd%ied, grd%jsd:grd%jed) ); grd%area(:,:)=0.
allocate( grd%msk(grd%isd:grd%ied, grd%jsd:grd%jed) ); grd%msk(:,:)=0.
allocate( grd%cos(grd%isd:grd%ied, grd%jsd:grd%jed) ); grd%cos(:,:)=1.
allocate( grd%sin(grd%isd:grd%ied, grd%jsd:grd%jed) ); grd%sin(:,:)=0.
allocate( grd%ocean_depth(grd%isd:grd%ied, grd%jsd:grd%jed) ); grd%ocean_depth(:,:)=0.
allocate( grd%uo(grd%isd:grd%ied, grd%jsd:grd%jed,grd%ke) ); grd%uo(:,:,:)=0.
allocate( grd%vo(grd%isd:grd%ied, grd%jsd:grd%jed,grd%ke) ); grd%vo(:,:,:)=0.
allocate( grd%tmp(grd%isd:grd%ied, grd%jsd:grd%jed) ); grd%tmp(:,:)=0.
allocate( grd%tmpc(grd%isc:grd%iec, grd%jsc:grd%jec) ); grd%tmpc(:,:)=0.
allocate( grd%parity_x(grd%isd:grd%ied, grd%jsd:grd%jed) ); grd%parity_x(:,:)=1.
allocate( grd%parity_y(grd%isd:grd%ied, grd%jsd:grd%jed) ); grd%parity_y(:,:)=1.
allocate( grd%particle_counter_grd(grd%isd:grd%ied, grd%jsd:grd%jed) ); grd%particle_counter_grd(:,:)=0
is=grd%isc; ie=grd%iec; js=grd%jsc; je=grd%jec
grd%lon(is:ie,js:je)=Grid%geolonBu(is:ie,js:je)
grd%lat(is:ie,js:je)=Grid%geolatBu(is:ie,js:je)
grd%area(is:ie,js:je)=Grid%areaBu(is:ie,js:je) !sis2 has *(4.*pi*radius*radius)
grd%ocean_depth(is:ie,js:je) = Grid%bathyT(is:ie,js:je)
is=grd%isc; ie=grd%iec; js=grd%jsc; je=grd%jec
grd%dx(is:ie,js:je)=Grid%dxCu(is:ie,js:je)
grd%dy(is:ie,js:je)=Grid%dyBu(is:ie,js:je)
grd%msk(is:ie,js:je)=Grid%mask2dBu(is:ie,js:je)
grd%cos(is:ie,js:je)=Grid%cos_rot(is:ie,js:je)
grd%sin(is:ie,js:je)=Grid%sin_rot(is:ie,js:je)
call mpp_update_domains(grd%lon, grd%domain)
call mpp_update_domains(grd%lat, grd%domain)
call mpp_update_domains(grd%dy, grd%dx, grd%domain, gridtype=CGRID_NE, flags=SCALAR_PAIR)
call mpp_update_domains(grd%area, grd%domain)
call mpp_update_domains(grd%msk, grd%domain)
call mpp_update_domains(grd%cos, grd%domain, position=CENTER)
call mpp_update_domains(grd%sin, grd%domain, position=CENTER)
call mpp_update_domains(grd%ocean_depth, grd%domain)
call mpp_update_domains(grd%parity_x, grd%parity_y, grd%domain, gridtype=AGRID) ! If either parity_x/y is -ve, we need rotation of vectors
! Sanitize lon and lat in the southern halo
do j=grd%jsc-1,grd%jsd,-1; do i=grd%isd,grd%ied
if (grd%lon(i,j).ge.big_number) grd%lon(i,j)=grd%lon(i,j+1)
if (grd%lat(i,j).ge.big_number) grd%lat(i,j)=2.*grd%lat(i,j+1)-grd%lat(i,j+2)
enddo; enddo
! fix halos on edge of the domain
!1) South
do j=grd%jsc-1,grd%jsd,-1; do i=grd%isd,grd%ied
if (grd%lon(i,j).ge.big_number) grd%lon(i,j)=2.*grd%lon(i,j+1)-grd%lon(i,j+2)
if (grd%lat(i,j).ge.big_number) grd%lat(i,j)=2.*grd%lat(i,j+1)-grd%lat(i,j+2)
enddo; enddo
!2) North
do j=grd%jec+1,grd%jed; do i=grd%isd,grd%ied
if (grd%lon(i,j).ge.big_number) grd%lon(i,j)=2.*grd%lon(i,j-1)-grd%lon(i,j-2)
if (grd%lat(i,j).ge.big_number) grd%lat(i,j)=2.*grd%lat(i,j-1)-grd%lat(i,j-2)
enddo; enddo
!3) West
do i=grd%isc-1,grd%isd,-1; do j=grd%jsd,grd%jed
if (grd%lon(i,j).ge.big_number) grd%lon(i,j)=2.*grd%lon(i+1,j)-grd%lon(i+2,j)
if (grd%lat(i,j).ge.big_number) grd%lat(i,j)=2.*grd%lat(i+1,j)-grd%lat(i+2,j)
enddo; enddo
!4) East
do i=grd%iec+1,grd%ied; do j=grd%jsd,grd%jed
if (grd%lon(i,j).ge.big_number) grd%lon(i,j)=2.*grd%lon(i-1,j)-grd%lon(i-2,j)
if (grd%lat(i,j).ge.big_number) grd%lat(i,j)=2.*grd%lat(i-1,j)-grd%lat(i-2,j)
enddo; enddo
if ((Lx.gt.1E15 ) .and. (mpp_pe().eq.mpp_root_pe())) then
call error_mesg('particles, framework', 'Model does not enjoy the domain being larger than 1E15. Not sure why. Probably to do with floating point precision.', WARNING)
endif
if ((.not. grid_is_latlon) .and. (Lx.eq.360.)) then
if (mpp_pe().eq.mpp_root_pe()) then
call error_mesg('particles, framework', 'Since the lat/lon grid is off, the x-direction is being set as non-periodic. Set Lx not equal to 360 override.', WARNING)
endif
Lx=-1.
endif
!The fix to reproduce across PE layout change, from AJA
if (Lx>0.) then
j=grd%jsc; do i=grd%isc+1,grd%ied
lon_mod = apply_modulo_around_point(grd%lon(i,j),grd%lon(i-1,j),Lx)
if (abs(grd%lon(i,j)-lon_mod)>(Lx/2.)) &
grd%lon(i,j)= lon_mod
enddo
j=grd%jsc; do i=grd%isc-1,grd%isd,-1
lon_mod = apply_modulo_around_point(grd%lon(i,j),grd%lon(i+1,j) ,Lx)
if (abs(grd%lon(i,j)- lon_mod )>(Lx/2.)) &
grd%lon(i,j)= lon_mod
enddo
do j=grd%jsc+1,grd%jed; do i=grd%isd,grd%ied
lon_mod = apply_modulo_around_point(grd%lon(i,j),grd%lon(i,j-1) ,Lx)
if (abs(grd%lon(i,j)-(lon_mod ))>(Lx/2.)) &
grd%lon(i,j)= lon_mod
enddo; enddo
do j=grd%jsc-1,grd%jsd,-1; do i=grd%isd,grd%ied
lon_mod = apply_modulo_around_point(grd%lon(i,j),grd%lon(i,j+1) ,Lx)
if (abs(grd%lon(i,j)- lon_mod )>(Lx/2.)) &
grd%lon(i,j)= lon_mod
enddo; enddo
endif
is=grd%isd; ie=grd%ied; js=grd%jsd; je=grd%jed
grd%lon(is:ie,js:je)=Grid%geolonBu(is:ie,js:je)
grd%lat(is:ie,js:je)=Grid%geolatBu(is:ie,js:je)
! WE SHOULD JUST COPY geolonC,geolatC instead (MJH)
! lonc, latc used for searches
do j=grd%jsd+1,grd%jed; do i=grd%isd+1,grd%ied
grd%lonc(i,j)=0.25*( (grd%lon(i,j)+grd%lon(i-1,j-1)) &
+(grd%lon(i-1,j)+grd%lon(i,j-1)) )
grd%latc(i,j)=0.25*( (grd%lat(i,j)+grd%lat(i-1,j-1)) &
+(grd%lat(i-1,j)+grd%lat(i,j-1)) )
enddo; enddo
if (debug) then
write(stderrunit,'(a,i3,a,4i4,a,4f8.2)') 'particles, particles_init: (',mpp_pe(),') [ij][se]c=', &
grd%isc,grd%iec,grd%jsc,grd%jec, &
' [lon|lat][min|max]=', minval(grd%lon),maxval(grd%lon),minval(grd%lat),maxval(grd%lat)
endif
! Final check for NaN's in the latlon grid:
do j=grd%jsd+1,grd%jed; do i=grd%isd+1,grd%ied
if (grd%lat(i,j) .ne. grd%lat(i,j)) then
write(stderrunit,*) 'Lat not defined properly', mpp_pe(),i,j,grd%lat(i,j)
call error_mesg('particles,grid defining', 'Latitude contains NaNs', FATAL)
endif
if (grd%lon(i,j) .ne. grd%lon(i,j)) then
write(stderrunit,*) 'Lon not defined properly', mpp_pe(),i,j,grd%lon(i,j)
call error_mesg('particles, grid defining', 'Longatudes contains NaNs', FATAL)
endif
enddo; enddo
if (ignore_traj) buffer_width_traj=0 ! If this is true, then all traj files should be ignored
! Parameters
parts%dt=dt
parts%traj_sample_hrs=traj_sample_hrs
parts%traj_write_hrs=traj_write_hrs
parts%save_short_traj=save_short_traj
parts%ignore_traj=ignore_traj
parts%initial_traj=initial_traj
parts%verbose_hrs=verbose_hrs
parts%grd%halo=halo
parts%grd%Lx=Lx
parts%grd%grid_is_latlon=grid_is_latlon
parts%grd%grid_is_regular=grid_is_regular
parts%Runge_not_Verlet=Runge_not_Verlet
parts%ignore_missing_restart_parts=ignore_missing_restart_parts
parts%use_new_predictive_corrective=use_new_predictive_corrective !Alon
parts%debug_particle_with_id=debug_particle_with_id
if (do_unit_tests) then
if (unit_tests(parts)) call error_mesg('particles, particles_init', 'Unit tests failed!', FATAL)
endif
! Generate a grid of particles if requested
call get_date(Time, iyr, imon, iday, ihr, imin, isec)
if (3600*ihr + 60*imin +isec == 0) then ! Make sure we are on a day boundary
lgenerate = .false.
if (generate_days>0) then
if ( mod(365*(iyr-1)+iday-1, generate_days)==0 ) lgenerate = .true.
elseif (generate_days==0) then ! .and. 365*(iyr-1)+iday-1==0) then
lgenerate = .true.
endif
if (lgenerate) then
call generate_grid_of_particles(parts, &
generate_lons(1), generate_lons(2), generate_lons(3), &
generate_lats(1), generate_lats(2), generate_lats(3), &
generate_d(1), generate_d(2), generate_d(3))
endif
endif
call mpp_clock_end(parts%clock_ini)
call mpp_clock_end(parts%clock)
end subroutine particles_framework_init
!> Generate particles on a grid
subroutine generate_grid_of_particles(parts, lon_start, lon_end, dlon, lat_start, lat_end, dlat,d_start,d_end,dd)
! Arguments
type(particles), pointer :: parts !< Container for all types and memory
real, intent(in) :: lon_start !< Start longitude of grid of particles
real, intent(in) :: lon_end !< End longitude of grid of particles
real, intent(in) :: dlon !< Separation longitude of particles on grid
real, intent(in) :: lat_start !< Start latitude of grid of particles
real, intent(in) :: lat_end !< End latitude of grid of particles
real, intent(in) :: dlat !< Separation latitude of particles on grid
real, intent(in) :: d_start !< Start depth of grid of particles
real, intent(in) :: d_end !< End depth of grid of particles
real, intent(in) :: dd !< Separation depth of particles on grid
! Local variables
type(particles_gridded), pointer :: grd => null()
type(particle) :: localpart
integer :: i, j, d, ie, je, de
integer :: num
real :: lat_min, lat_max
logical :: lres
grd=>parts%grd
lat_min = minval( grd%lat(grd%isc-1:grd%iec,grd%jsc-1:grd%jec) )
lat_max = maxval( grd%lat(grd%isc-1:grd%iec,grd%jsc-1:grd%jec) )
! Adjust local grid range to match generating grid
lat_min = max( int((lat_min - lat_start)/dlat)*dlat + lat_start, lat_start)
lat_max = min( int((lat_max - lat_start)/dlat)*dlat + lat_start, lat_end)
ie = int( (lon_end-lon_start)/dlon + 0.5 )
je = int( (lat_end-lat_start)/dlat + 0.5 )
de = int( (abs(d_end)-abs(d_start))/abs(dd)+0.5 )
!CSJ hard-coded this for now
!localpart%k_space=.true.
!localpart%k=0.5
num = 0
do d = 0,de
if (d_end<0) then
localpart%k_space=.true.
localpart%k=abs(d_start) + abs(dd*float(d))
elseif (d_end>0) then
localpart%k_space=.false.
localpart%depth=abs(d_start) + abs(dd*float(d))
endif
do j = 0,je
localpart%lat = lat_start + dlat*float(j)
! if (localpart%lat >= lat_min .and. localpart%lat <= lat_max) then
do i = 0,ie
localpart%lon = lon_start + dlon*float(i)
lres=find_cell(grd, localpart%lon, localpart%lat, localpart%ine, localpart%jne)
num=num+1
if (lres) then
if (grd%msk(localpart%ine,localpart%jne)>-1.) then
localpart%drifter_num = num
localpart%id = generate_id(grd, localpart%ine, localpart%jne)
lres=pos_within_cell(grd, localpart%lon, localpart%lat,localpart%ine,localpart%jne, localpart%xi, localpart%yj)
call add_new_part_to_list(parts%list(localpart%ine,localpart%jne)%first, localpart)
endif
endif
enddo
! endif
enddo
enddo
call parts_chksum(parts, 'after generated particles')
end subroutine generate_grid_of_particles
! ##############################################################################
!> Adjust part dates to allow use of restarts from later dates
subroutine offset_part_dates(parts,Time)
! Arguments
type(particles), pointer :: parts !< Container for all types and memory
type(time_type), intent(in) :: Time !< Model time
! Local variables
type(particle), pointer :: this
integer :: iyr, imon, iday, ihr, imin, isec, yr_offset
real :: latest_start_year, part_start_year
real :: current_time_val
integer :: grdi, grdj
call get_date(Time, iyr, imon, iday, ihr, imin, isec)
latest_start_year=iyr-999999.
do grdj = parts%grd%jsc,parts%grd%jec ; do grdi = parts%grd%isc,parts%grd%iec
this=>parts%list(grdi,grdj)%first
do while (associated(this))
part_start_year=float(this%start_year)+this%start_day/367.
if (part_start_year>latest_start_year) latest_start_year=part_start_year
this=>this%next
enddo
enddo ; enddo
call mpp_max(latest_start_year)
current_time_val=float(iyr)+yearday(iyr, imon, iday, ihr, imin, isec)/367.
if (latest_start_year<=current_time_val) return ! No conflicts!
yr_offset=int(latest_start_year+1.)-iyr
if (mpp_pe().eq.mpp_root_pe()) write(*,'(a,i8,a)') &
'particles: parts found with creation dates after model date! Adjusting part dates by ',yr_offset,' years'
call parts_chksum(parts, 'before adjusting start dates')
do grdj = parts%grd%jsc,parts%grd%jec ; do grdi = parts%grd%isc,parts%grd%iec
this=>parts%list(grdi,grdj)%first
do while (associated(this))
this%start_year=this%start_year-yr_offset
this=>this%next
enddo
enddo ; enddo
call parts_chksum(parts, 'after adjusting start dates')
end subroutine offset_part_dates
! ###############################################################################################
!> Moves particles between lists if they have moved from cell to cell
subroutine move_part_between_cells(parts)
! Arguments
type(particles), pointer :: parts !< Container for all types and memory
! Local variables
type(particles_gridded), pointer :: grd => null()
type(particle), pointer :: moving_part => null(), this => null()
integer :: grdi, grdj
logical :: quick
! For convenience
grd=>parts%grd
do grdj = grd%jsd,grd%jed ; do grdi = grd%isd,grd%ied
this=>parts%list(grdi,grdj)%first
do while (associated(this))
if ((this%ine.ne.grdi) .or. (this%jne.ne.grdj)) then
moving_part=>this
this=>this%next
!Removing the particle from the old list
if (associated(moving_part%prev)) then
moving_part%prev%next=>moving_part%next
else
parts%list(grdi,grdj)%first=>moving_part%next
endif
if (associated(moving_part%next)) moving_part%next%prev=>moving_part%prev
!Inserting the particle into the new list
call insert_part_into_list(parts%list(moving_part%ine,moving_part%jne)%first,moving_part)
!Clear moving_part
moving_part=>null()
else
this=>this%next
endif
enddo
enddo ; enddo
end subroutine move_part_between_cells
! #############################################################################
!> Populates the halo lists with parts from neighbor processers
subroutine update_halo_particles(parts)
! Arguments
type(particles), pointer :: parts !< Container for all types and memory
! Local variables
type(particle), pointer :: kick_the_bucket, this
integer :: nparts_to_send_e, nparts_to_send_w
integer :: nparts_to_send_n, nparts_to_send_s
integer :: nparts_rcvd_from_e, nparts_rcvd_from_w
integer :: nparts_rcvd_from_n, nparts_rcvd_from_s
type(particles_gridded), pointer :: grd
integer :: i, nparts_start, nparts_end
integer :: stderrunit
integer :: grdi, grdj
integer :: halo_width
integer :: temp1, temp2
real :: current_halo_status
logical :: halo_debugging
halo_width=parts%grd%halo
halo_debugging=parts%halo_debugging
! Get the stderr unit number
stderrunit = stderr()
! For convenience
grd=>parts%grd
! For debugging, MP1
if (halo_debugging) then
do grdj = grd%jsd,grd%jed ; do grdi = grd%isd,grd%ied
this=>parts%list(grdi,grdj)%first
do while (associated(this))
write(stderrunit,*) 'A', this%id, mpp_pe(), this%halo_part, grdi, grdj
this=>this%next
enddo
enddo; enddo
! Use when debugging:
endif
! Step 1: Clear the current halos
call mpp_sync_self()
do grdj = grd%jsd,grd%jsc-1 ; do grdi = grd%isd,grd%ied
call delete_all_parts_in_list(parts, grdj, grdi)
enddo ; enddo
do grdj = grd%jec+1,grd%jed ; do grdi = grd%isd,grd%ied
call delete_all_parts_in_list(parts,grdj,grdi)
enddo ; enddo
do grdj = grd%jsd,grd%jed ; do grdi = grd%isd,grd%isc-1
call delete_all_parts_in_list(parts,grdj,grdi)
enddo ; enddo
do grdj = grd%jsd,grd%jed ; do grdi = grd%iec+1,grd%ied
call delete_all_parts_in_list(parts,grdj,grdi)
enddo ; enddo
call mpp_sync_self()
! For debugging
if (halo_debugging) then
do grdj = grd%jsd,grd%jed ; do grdi = grd%isd,grd%ied
this=>parts%list(grdi,grdj)%first
do while (associated(this))
write(stderrunit,*) 'B', this%id, mpp_pe(), this%halo_part, grdi, grdj
this=>this%next
enddo
enddo; enddo
endif
if (debug) then
nparts_start=count_parts(parts)
endif
call mpp_sync_self()
! Step 2: Updating the halos - This code is mostly copied from send_to_other_pes
! Find number of parts that headed east/west
nparts_to_send_e=0
nparts_to_send_w=0
! parts on eastern side of the processor
do grdj = grd%jsc,grd%jec ; do grdi = grd%iec-halo_width+2,grd%iec
this=>parts%list(grdi,grdj)%first
do while (associated(this))
kick_the_bucket=>this
this=>this%next
nparts_to_send_e=nparts_to_send_e+1
current_halo_status=kick_the_bucket%halo_part
kick_the_bucket%halo_part=1.
call pack_part_into_buffer2(kick_the_bucket, parts%obuffer_e, nparts_to_send_e)
kick_the_bucket%halo_part=current_halo_status
enddo
enddo; enddo
! parts on the western side of the processor
do grdj = grd%jsc,grd%jec ; do grdi = grd%isc,grd%isc+halo_width-1
this=>parts%list(grdi,grdj)%first
do while (associated(this))
kick_the_bucket=>this
this=>this%next
nparts_to_send_w=nparts_to_send_w+1
current_halo_status=kick_the_bucket%halo_part
kick_the_bucket%halo_part=1.
call pack_part_into_buffer2(kick_the_bucket, parts%obuffer_w, nparts_to_send_w)
kick_the_bucket%halo_part=current_halo_status
enddo
enddo; enddo
! Send parts east
if (grd%pe_E.ne.NULL_PE) then
call mpp_send(nparts_to_send_e, plen=1, to_pe=grd%pe_E, tag=COMM_TAG_1)
if (nparts_to_send_e.gt.0) then
call mpp_send(parts%obuffer_e%data, nparts_to_send_e*buffer_width, grd%pe_E, tag=COMM_TAG_2)
endif
endif
! Send parts west
if (grd%pe_W.ne.NULL_PE) then
call mpp_send(nparts_to_send_w, plen=1, to_pe=grd%pe_W, tag=COMM_TAG_3)
if (nparts_to_send_w.gt.0) then
call mpp_send(parts%obuffer_w%data, nparts_to_send_w*buffer_width, grd%pe_W, tag=COMM_TAG_4)
endif
endif
! Receive parts from west
if (grd%pe_W.ne.NULL_PE) then
nparts_rcvd_from_w=-999
call mpp_recv(nparts_rcvd_from_w, glen=1, from_pe=grd%pe_W, tag=COMM_TAG_1)
if (nparts_rcvd_from_w.lt.0) then
write(stderrunit,*) 'pe=',mpp_pe(),' received a bad number',nparts_rcvd_from_w,' from',grd%pe_W,' (W) !!!!!!!!!!!!!!!!!!!!!!'
endif
if (nparts_rcvd_from_w.gt.0) then
call increase_ibuffer(parts%ibuffer_w, nparts_rcvd_from_w,buffer_width)
call mpp_recv(parts%ibuffer_w%data, nparts_rcvd_from_w*buffer_width, grd%pe_W, tag=COMM_TAG_2)
do i=1, nparts_rcvd_from_w
call unpack_part_from_buffer2(parts, parts%ibuffer_w, i, grd )
enddo
endif
else
nparts_rcvd_from_w=0
endif
! Receive parts from east
if (grd%pe_E.ne.NULL_PE) then
nparts_rcvd_from_e=-999
call mpp_recv(nparts_rcvd_from_e, glen=1, from_pe=grd%pe_E, tag=COMM_TAG_3)
if (nparts_rcvd_from_e.lt.0) then
write(stderrunit,*) 'pe=',mpp_pe(),' received a bad number',nparts_rcvd_from_e,' from',grd%pe_E,' (E) !!!!!!!!!!!!!!!!!!!!!!'
endif
if (nparts_rcvd_from_e.gt.0) then
call increase_ibuffer(parts%ibuffer_e, nparts_rcvd_from_e,buffer_width)
call mpp_recv(parts%ibuffer_e%data, nparts_rcvd_from_e*buffer_width, grd%pe_E, tag=COMM_TAG_4)
do i=1, nparts_rcvd_from_e
call unpack_part_from_buffer2(parts, parts%ibuffer_e, i, grd )
enddo
endif
else
nparts_rcvd_from_e=0
endif
! Find number of parts that headed north/south
nparts_to_send_n=0
nparts_to_send_s=0
! parts on north side of the processor
do grdj = grd%jec-halo_width+2,grd%jec ; do grdi = grd%isd,grd%ied
this=>parts%list(grdi,grdj)%first
do while (associated(this))
kick_the_bucket=>this
this=>this%next
nparts_to_send_n=nparts_to_send_n+1
current_halo_status=kick_the_bucket%halo_part
kick_the_bucket%halo_part=1.
call pack_part_into_buffer2(kick_the_bucket, parts%obuffer_n, nparts_to_send_n )
kick_the_bucket%halo_part=current_halo_status
enddo
enddo; enddo
! parts on south side of the processor
do grdj = grd%jsc,grd%jsc+halo_width-1 ; do grdi = grd%isd,grd%ied
this=>parts%list(grdi,grdj)%first
do while (associated(this))
kick_the_bucket=>this
this=>this%next
nparts_to_send_s=nparts_to_send_s+1
current_halo_status=kick_the_bucket%halo_part
kick_the_bucket%halo_part=1.
call pack_part_into_buffer2(kick_the_bucket, parts%obuffer_s, nparts_to_send_s )
kick_the_bucket%halo_part=current_halo_status
enddo
enddo; enddo
! Send parts north
if (grd%pe_N.ne.NULL_PE) then
if(folded_north_on_pe) then
call mpp_send(nparts_to_send_n, plen=1, to_pe=grd%pe_N, tag=COMM_TAG_9)
else
call mpp_send(nparts_to_send_n, plen=1, to_pe=grd%pe_N, tag=COMM_TAG_5)
endif
if (nparts_to_send_n.gt.0) then
if(folded_north_on_pe) then
call mpp_send(parts%obuffer_n%data, nparts_to_send_n*buffer_width, grd%pe_N, tag=COMM_TAG_10)
else
call mpp_send(parts%obuffer_n%data, nparts_to_send_n*buffer_width, grd%pe_N, tag=COMM_TAG_6)
endif
endif
endif
! Send parts south
if (grd%pe_S.ne.NULL_PE) then
call mpp_send(nparts_to_send_s, plen=1, to_pe=grd%pe_S, tag=COMM_TAG_7)
if (nparts_to_send_s.gt.0) then
call mpp_send(parts%obuffer_s%data, nparts_to_send_s*buffer_width, grd%pe_S, tag=COMM_TAG_8)
endif
endif
! Receive parts from south
if (grd%pe_S.ne.NULL_PE) then
nparts_rcvd_from_s=-999
call mpp_recv(nparts_rcvd_from_s, glen=1, from_pe=grd%pe_S, tag=COMM_TAG_5)
if (nparts_rcvd_from_s.lt.0) then
write(stderrunit,*) 'pe=',mpp_pe(),' received a bad number',nparts_rcvd_from_s,' from',grd%pe_S,' (S) !!!!!!!!!!!!!!!!!!!!!!'
endif
if (nparts_rcvd_from_s.gt.0) then
call increase_ibuffer(parts%ibuffer_s, nparts_rcvd_from_s,buffer_width)
call mpp_recv(parts%ibuffer_s%data, nparts_rcvd_from_s*buffer_width, grd%pe_S, tag=COMM_TAG_6)
do i=1, nparts_rcvd_from_s
call unpack_part_from_buffer2(parts, parts%ibuffer_s, i, grd )
enddo
endif
else
nparts_rcvd_from_s=0
endif
! Receive parts from north
if (grd%pe_N.ne.NULL_PE) then
nparts_rcvd_from_n=-999
if(folded_north_on_pe) then
call mpp_recv(nparts_rcvd_from_n, glen=1, from_pe=grd%pe_N, tag=COMM_TAG_9)
else
call mpp_recv(nparts_rcvd_from_n, glen=1, from_pe=grd%pe_N, tag=COMM_TAG_7)
endif
if (nparts_rcvd_from_n.lt.0) then
write(stderrunit,*) 'pe=',mpp_pe(),' received a bad number',nparts_rcvd_from_n,' from',grd%pe_N,' (N) !!!!!!!!!!!!!!!!!!!!!!'
endif
if (nparts_rcvd_from_n.gt.0) then
call increase_ibuffer(parts%ibuffer_n, nparts_rcvd_from_n,buffer_width)
if(folded_north_on_pe) then
call mpp_recv(parts%ibuffer_n%data, nparts_rcvd_from_n*buffer_width, grd%pe_N, tag=COMM_TAG_10)
else
call mpp_recv(parts%ibuffer_n%data, nparts_rcvd_from_n*buffer_width, grd%pe_N, tag=COMM_TAG_8)
endif
do i=1, nparts_rcvd_from_n
call unpack_part_from_buffer2(parts, parts%ibuffer_n, i, grd )
enddo
endif
else
nparts_rcvd_from_n=0
endif
! For debugging
if (halo_debugging) then
call mpp_sync_self()
do grdj = grd%jsd,grd%jed ; do grdi = grd%isd,grd%ied
this=>parts%list(grdi,grdj)%first
do while (associated(this))
write(stderrunit,*) 'C', this%id, mpp_pe(), this%halo_part, grdi, grdj
this=>this%next
enddo
enddo; enddo
endif
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!Debugging!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
if (debug) then
nparts_end=count_parts(parts)
i=nparts_rcvd_from_n+nparts_rcvd_from_s+nparts_rcvd_from_e+nparts_rcvd_from_w &
-nparts_to_send_n-nparts_to_send_s-nparts_to_send_e-nparts_to_send_w
if (nparts_end-(nparts_start+i).ne.0) then
write(stderrunit,'(a,i4,a,i4)') 'particles, update_halos: nparts_end=',nparts_end,' on PE',mpp_pe()
write(stderrunit,'(a,i4,a,i4)') 'particles, update_halos: nparts_start=',nparts_start,' on PE',mpp_pe()
write(stderrunit,'(a,i4,a,i4)') 'particles, update_halos: delta=',i,' on PE',mpp_pe()
write(stderrunit,'(a,i4,a,i4)') 'particles, update_halos: error=',nparts_end-(nparts_start+i),' on PE',mpp_pe()
write(stderrunit,'(a,i4,a,i4)') 'particles, update_halos: nparts_to_send_n=',nparts_to_send_n,' on PE',mpp_pe()
write(stderrunit,'(a,i4,a,i4)') 'particles, update_halos: nparts_to_send_s=',nparts_to_send_s,' on PE',mpp_pe()
write(stderrunit,'(a,i4,a,i4)') 'particles, update_halos: nparts_to_send_e=',nparts_to_send_e,' on PE',mpp_pe()
write(stderrunit,'(a,i4,a,i4)') 'particles, update_halos: nparts_to_send_w=',nparts_to_send_w,' on PE',mpp_pe()
write(stderrunit,'(a,i4,a,i4)') 'particles, update_halos: nparts_rcvd_from_n=',nparts_rcvd_from_n,' on PE',mpp_pe()
write(stderrunit,'(a,i4,a,i4)') 'particles, update_halos: nparts_rcvd_from_s=',nparts_rcvd_from_s,' on PE',mpp_pe()
write(stderrunit,'(a,i4,a,i4)') 'particles, update_halos: nparts_rcvd_from_e=',nparts_rcvd_from_e,' on PE',mpp_pe()
write(stderrunit,'(a,i4,a,i4)') 'particles, update_halos: nparts_rcvd_from_w=',nparts_rcvd_from_w,' on PE',mpp_pe()
endif
endif
if (debug) then
i=0
do grdj = grd%jsc,grd%jec ; do grdi = grd%isc,grd%iec
this=>parts%list(grdi,grdj)%first
do while (associated(this))
call check_position(grd, this, 'exchange (bot)')
if (this%ine.lt.parts%grd%isc .or. &