UPP  V11.0.0
 All Data Structures Files Functions Pages
WRFPOST.f
Go to the documentation of this file.
1 
33  PROGRAM wrfpost
34 
35 !
36 !
37 !============================================================================================================
38 !
39 ! This is an MPI code. All array indexing is with respect to the global indices. Loop indices
40 ! look as follows for N MPI tasks.
41 !
42 !
43 !
44 ! Original New
45 ! Index Index
46 !
47 ! JM ----------------------------------------------- JEND
48 ! JM-1 - - JEND_M
49 ! JM-2 - MPI TASK N-1 - JEND_M2
50 ! - -
51 ! - -
52 ! ----------------------------------------------- JSTA, JSTA_M, JSTA_M2
53 ! ----------------------------------------------- JEND, JEND_M, JEND_M2
54 ! - -
55 ! - MPI TASK N-2 -
56 ! - -
57 ! - -
58 ! ----------------------------------------------- JSTA, JSTA_M, JSTA_M2
59 !
60 ! .
61 ! .
62 ! .
63 !
64 ! ----------------------------------------------- JEND, JEND_M, JEND_M2
65 ! - -
66 ! - MPI TASK 1 -
67 ! - -
68 ! - -
69 ! ----------------------------------------------- JSTA, JSTA_M, JSTA_M2
70 ! ----------------------------------------------- JEND, JEND_M, JEND_M2
71 ! - -
72 ! - MPI TASK 0 -
73 ! 3 - - JSTA_M2
74 ! 2 - - JSTA_M
75 ! 1 ----------------------------------------------- JSTA
76 !
77 ! 1 IM
78 !
79 !
80 ! Jim Tuccillo
81 ! Jan 2000
82 !
83 ! README - Jim Tuccillo Feb 2001
84 !
85 ! Many common blocks have been replaced by modules to support Fortran
86 ! "allocate" commands. Many of the 3-D arrays are now allocated to be the
87 ! exact size required based on the number of MPI tasks. The dimensioning will be
88 ! x ( im,jsta_2l:jend_2u,lm)
89 ! Most 2-D arrays continue to be dimensioned (im,jm). This is fine but please be aware
90 ! that the EXCH routine for arrays dimensioned (im,jm) is different than arrays dimensioned
91 ! (im,jsta_2l:jend_2u). Also, be careful about passing any arrays dimensioned
92 ! (im,jst_2l:jend_2u,lm). See examples in the code as to the correct calling sequence and
93 ! EXCH routine to use.
94 !
95 !
96 ! ASYNCHRONOUS I/O HAS BEEN ADDED. THE LAST MPI TASK DOES THE I/O. IF THERE IS
97 ! ONLY ONE MPI TASK THN TASK ) DOES THE I/O.
98 ! THE CODE HAS GOTTEN A LITTLE KLUDGY. BASICLY, IM, IMX and IMOUT MUST BE EQUAL
99 ! AND REPRESENT THE VALUE USED IN THE MODEL. THE SAME HOLDS FOR JM, JMX and JMOUT.
100 !
101 ! Jim Tuccillo June 2001
102 !
103 !
104 !===========================================================================================
105 !
106  use netcdf
107  use nemsio_module, only: nemsio_getheadvar, nemsio_gfile, nemsio_init, nemsio_open, &
108  nemsio_getfilehead,nemsio_close
109  use ctlblk_mod, only: filenameaer, me, num_procs, num_servers, mpi_comm_comp, datestr, &
110  mpi_comm_inter, filename, ioform, grib, idat, filenameflux, filenamed3d, gdsdegr, &
111  spldef, modelname, ihrst, lsmdef,vtimeunits, tprec, pthresh, datahandle, im, jm, lm, &
112  lp1, lm1, im_jm, isf_surface_physics, nsoil, spl, lsmp1, global, imp_physics, &
113  ista, iend, ista_m, iend_m, ista_2l, iend_2u, &
114  jsta, jend, jsta_m, jend_m, jsta_2l, jend_2u, novegtype, icount_calmict, npset, datapd,&
115  lsm, fld_info, etafld2_tim, eta2p_tim, mdl2sigma_tim, cldrad_tim, miscln_tim, &
116  mdl2agl_tim, mdl2std_tim, mdl2thandpv_tim, calrad_wcloud_tim, &
117  fixed_tim, time_output, imin, surfce2_tim, komax, ivegsrc, d3d_on, gocart_on,rdaod, &
118  readxml_tim, spval, fullmodelname, submodelname, hyb_sigp, filenameflat, aqfcmaq_on,numx
119  use grib2_module, only: gribit2,num_pset,nrecout,first_grbtbl,grib_info_finalize
120 !- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
121  implicit none
122 !
123  type(nemsio_gfile) :: nfile,ffile,rfile
124  include "mpif.h"
125 !
126 ! DECLARE VARIABLES.
127 !
128 ! SET HEADER WRITER FLAGS TO TRUE.
129 !
130 !temporary vars
131 !
132  real(kind=8) :: time_initpost=0.,initpost_tim=0.,btim,bbtim
133  real rinc(5), untcnvt
134  integer :: status=0,iostatusd3d=0,iostatusflux=0
135  integer i,j,iii,l,k,ierr,nrec,ist,lusig,idrt,ncid3d,ncid2d,varid
136  integer :: prntsec,iim,jjm,llm,ioutcount,itmp,iret,iunit, &
137  iunitd3d,iyear,imn,iday,lcntrl,ieof
138  integer :: iostatusaer
139  logical :: popascal
140 !
141  integer :: kpo,kth,kpv
142  real,dimension(komax) :: po,th,pv
143  namelist/nampgb/kpo,po,kth,th,kpv,pv,filenameaer,d3d_on,gocart_on,popascal &
144  ,hyb_sigp,rdaod,aqfcmaq_on,vtimeunits,numx
145  integer :: itag_ierr
146  namelist/model_inputs/filename,ioform,grib,datestr,modelname,submodelname &
147  ,filenameflux,filenameflat
148 
149  character startdate*19,sysdepinfo*80,iowrfname*3,post_fname*255
150  character cgar*1,cdum*4,line*10
151 !
152 !------------------------------------------------------------------------------
153 ! START HERE
154 !
155  call start()
156 !
157 ! INITIALIZE MPI
158 
159  CALL setup_servers(me, &
160  & num_procs, &
161  & num_servers, &
162  & mpi_comm_comp, &
163  & mpi_comm_inter)
164 !
165 ! ME IS THE RANK
166 ! NUM_PROCS IS THE NUMBER OF TASKS DOING POSTING
167 ! NUM_SERVERS IS ONE IF THERE ARE MORE THAN ONE TOTAL MPI TASKS, OTHERWISE ZERO
168 ! MPI_COMM_COMP IS THE INTRACOMMUNICATOR
169 ! MPI_COMM_INTER IS THE INTERCOMMUNICATOR FOR COMMUNCATION BETWEEN TASK 0 OF THE
170 ! TASKS DOING THE POSTING AND THE I/O SERVER
171 !
172 !
173 ! IF WE HAVE MORE THAN 1 MPI TASK THEN WE WILL FIRE UP THE IO SERVER
174 ! THE LAST TASK ( IN THE CONTEXT OF MPI_COMM_WORLD ) IS THE I/O SERVER
175 !
176  print*,'ME,NUM_PROCS,NUM_SERVERS=',me,num_procs,num_servers
177 
178  if (me == 0) CALL w3tagb('nems ',0000,0000,0000,'np23 ')
179 
180  if ( me >= num_procs ) then
181 !
182  call server
183 !
184  else
185  spval = 9.9e10
186 !
187 !**************************************************************************
188 !KaYee: Read itag in Fortran Namelist format
189 !Set default
190  submodelname='NONE'
191  numx=1
192 !open namelist
193  open(5,file='itag')
194  read(5,nml=model_inputs,iostat=itag_ierr,err=888)
195  !print*,'itag_ierr=',itag_ierr
196 888 if (itag_ierr /= 0) then
197  print*,'Incorrect namelist variable(s) found in the itag file,stopping!'
198  stop
199  endif
200 
201  if (me==0) print*,'fileName= ',filename
202  if (me==0) print*,'IOFORM= ',ioform
203  !if (me==0) print*,'OUTFORM= ',grib
204  if (me==0) print*,'OUTFORM= ',grib
205  if (me==0) print*,'DateStr= ',datestr
206  if (me==0) print*,'MODELNAME= ',modelname
207  if (me==0) print*,'SUBMODELNAME= ',submodelname
208  if (me==0) print*,'numx= ',numx
209 ! if(MODELNAME == 'NMM')then
210 ! read(5,1114) VTIMEUNITS
211 ! 1114 format(a4)
212 ! if (me==0) print*,'VALID TIME UNITS = ', VTIMEUNITS
213 ! endif
214 !
215  303 format('MODELNAME="',a,'" SUBMODELNAME="',a,'"')
216 
217  write(0,*)'MODELNAME: ', modelname, submodelname
218 
219  if (me==0) print 303,modelname,submodelname
220 ! assume for now that the first date in the stdin file is the start date
221  read(datestr,300) iyear,imn,iday,ihrst,imin
222  if (me==0) write(*,*) 'in WRFPOST iyear,imn,iday,ihrst,imin', &
223  iyear,imn,iday,ihrst,imin
224  300 format(i4,1x,i2,1x,i2,1x,i2,1x,i2)
225 
226  idat(1) = imn
227  idat(2) = iday
228  idat(3) = iyear
229  idat(4) = ihrst
230  idat(5) = imin
231 
232  111 format(a256)
233  112 format(a19)
234  113 format(a20)
235  114 format(a8)
236  120 format(a5)
237  121 format(a4)
238 
239 !KaYee: Read in GFS/FV3 runs in Fortran Namelist Format.
240  if (me==0) print*,'MODELNAME= ',modelname,'grib=',grib
241  if(modelname == 'GFS' .OR. modelname == 'FV3R') then
242  if (me == 0) print*,'first two file names in GFS or FV3= ' &
243  ,trim(filename),trim(filenameflux)
244  end if
245 
246  if(grib=='grib2') then
247  gdsdegr = 1.d6
248  endif
249  if (me==0) print *,'gdsdegr=',gdsdegr
250 !
251 ! set default for kpo, kth, th, kpv, pv
252  kpo = 0
253  po = 0
254  kth = 6
255  th = (/310.,320.,350.,450.,550.,650.,(0.,k=kth+1,komax)/) ! isentropic level to output
256  kpv = 8
257  pv = (/0.5,-0.5,1.0,-1.0,1.5,-1.5,2.0,-2.0,(0.,k=kpv+1,komax)/)
258 
259  hyb_sigp = .true.
260  d3d_on = .false.
261  gocart_on = .false.
262  aqfcmaq_on = .false.
263  popascal = .false.
264  filenameaer = ''
265  rdaod = .false.
266 ! gocart_on = .true.
267 ! d3d_on = .true.
268 
269 !set control file name
270  filenameflat='postxconfig-NT.txt'
271  read(5,nampgb,iostat=iret,end=119)
272  119 continue
273  if (me==0) print*,'in itag, mod(num_procs,numx)=', mod(num_procs,numx)
274  if(mod(num_procs,numx)/=0) then
275  if (me==0) then
276  print*,'total proces, num_procs=', num_procs
277  print*,'number of subdomain in x direction, numx=', numx
278  print*,'remainder of num_procs/numx = ', mod(num_procs,numx)
279  print*,'Warning!!! the remainder of num_procs/numx is not 0, reset numx=1 &
280  & in this run or you adjust numx in the itag file to restart'
281  endif
282 ! stop 9999
283  numx=1
284  if(me == 0) print*,'Warning!!! Reset numx as 1, numx=',numx
285  endif
286  if(numx>num_procs/2) then
287  if (me==0) then
288  print*,'total proces, num_procs=', num_procs
289  print*,'number of subdomain in x direction, numx=', numx
290  print*,'Warning!!! numx cannot exceed num_procs/2, reset numx=1 in this run'
291  print*,'or you adjust numx in the itag file to restart'
292  endif
293  numx=1
294  if(me == 0) print*,'Warning!!! Reset numx as 1, numx=',numx
295  endif
296  if(me == 0) then
297  print*,'komax,iret for nampgb= ',komax,iret
298  print*,'komax,kpo,kth,th,kpv,pv,fileNameAER,popascal= ',komax,kpo &
299  & ,kth,th(1:kth),kpv,pv(1:kpv),trim(filenameaer),popascal
300  print*,'NUM_PROCS=',num_procs
301  print*,'numx= ',numx
302  endif
303 
304  IF(trim(ioform) /= 'netcdfpara' .AND. trim(ioform) /= 'netcdf' ) THEN
305  numx=1
306  if(me == 0) print*,'2D decomposition only supports netcdfpara IO.'
307  if(me == 0) print*,'Reset numx= ',numx
308  ENDIF
309 
310  IF(modelname /= 'FV3R' .AND. modelname /= 'GFS') THEN
311  numx=1
312  if(me == 0) print*,'2D decomposition only supports GFS and FV3R.'
313  if(me == 0) print*,'Reset numx= ',numx
314  ENDIF
315 
316 ! set up pressure level from POSTGPVARS or DEFAULT
317  if(kpo == 0) then
318 ! use default pressure levels
319  if(me == 0) then
320  print*,'using default pressure levels,spldef=',(spldef(l),l=1,lsmdef)
321  endif
322  lsm = lsmdef
323  do l=1,lsm
324  spl(l) = spldef(l)
325  end do
326  else
327 ! use POSTGPVARS
328  if(me == 0) then
329  print*,'using pressure levels from POSTGPVARS'
330  endif
331  lsm = kpo
332  if( .not. popascal ) then
333  untcnvt = 100.
334  else
335  untcnvt = 1.
336  endif
337  if(po(lsm) < po(1))then ! post logic assumes asscending
338  do l=1,lsm
339  spl(l) = po(lsm-l+1)*untcnvt
340  end do
341  else
342  do l=1,lsm
343  spl(l) = po(l)*untcnvt
344  end do
345  end if
346  end if
347  lsmp1 = lsm+1
348  if (me==0) print*,'LSM, SPL = ',lsm,spl(1:lsm)
349 
350  116 continue
351 
352 ! set PTHRESH for different models
353  if(modelname == 'NMM')then
354  pthresh = 0.000004
355  else
356  pthresh = 0.000001
357  end if
358 !Chuang: add dynamical allocation
359  if(trim(ioform) == 'netcdf' .OR. trim(ioform) == 'netcdfpara') THEN
360  IF(modelname == 'NCAR' .OR. modelname == 'RAPR' .OR. modelname == 'NMM') THEN
361  call ext_ncd_ioinit(sysdepinfo,status)
362  print*,'called ioinit', status
363  call ext_ncd_open_for_read( trim(filename), 0, 0, " ", &
364  datahandle, status)
365  print*,'called open for read', status
366  if ( status /= 0 ) then
367  print*,'error opening ',filename, ' Status = ', status ; stop
368  endif
369  call ext_ncd_get_dom_ti_integer(datahandle &
370  ,'WEST-EAST_GRID_DIMENSION',iim,1,ioutcount, status )
371  im = iim-1
372  call ext_ncd_get_dom_ti_integer(datahandle &
373  ,'SOUTH-NORTH_GRID_DIMENSION',jjm,1,ioutcount, status )
374  jm = jjm-1
375  call ext_ncd_get_dom_ti_integer(datahandle &
376  ,'BOTTOM-TOP_GRID_DIMENSION',llm,1,ioutcount, status )
377  lm = llm-1
378  lp1 = lm+1
379  lm1 = lm-1
380  im_jm = im*jm
381 
382  print*,'im jm lm from wrfout= ',im,jm, lm
383 
384 ! Read and set global value for surface physics scheme
385  call ext_ncd_get_dom_ti_integer(datahandle &
386  ,'SF_SURFACE_PHYSICS',itmp,1,ioutcount, status )
387  isf_surface_physics = itmp
388  print*,'SF_SURFACE_PHYSICS= ',isf_surface_physics
389 ! set NSOIL to 4 as default for NOAH but change if using other
390 ! SFC scheme
391  nsoil = 4
392  IF(itmp == 1) then !thermal diffusion scheme
393  nsoil = 5
394  ELSE IF(itmp == 3) then ! RUC LSM
395  nsoil = 9
396  ELSE IF(itmp == 7) then ! Pleim Xu
397  nsoil = 2
398  END IF
399  print*,'NSOIL from wrfout= ',nsoil
400 
401  call ext_ncd_ioclose( datahandle, status )
402  ELSE
403 ! use parallel netcdf lib directly to read FV3 output in netCDF
404  spval = 9.99e20
405  status = nf90_open(trim(filename),ior(nf90_nowrite,nf90_mpiio), &
406  ncid3d,comm=mpi_comm_world,info=mpi_info_null)
407  if ( status /= 0 ) then
408  print*,'error opening ',filename, ' Status = ', status
409  stop
410  endif
411  status = nf90_open(trim(filenameflux),ior(nf90_nowrite,nf90_mpiio), &
412  ncid2d,comm=mpi_comm_world,info=mpi_info_null)
413  if ( status /= 0 ) then
414  print*,'error opening ',filenameflux, ' Status = ', status
415  stop
416  endif
417 ! read in LSM index and nsoil here
418  status=nf90_get_att(ncid2d,nf90_global,'landsfcmdl', isf_surface_physics)
419  if(status/=0)then
420  print*,'landsfcmdl not found; assigning to 2'
421  isf_surface_physics=2 !set LSM physics to 2 for NOAH
422  endif
423  if(isf_surface_physics<2)then
424  isf_surface_physics=2 !set LSM physics to 2 for NOAH
425  endif
426  status=nf90_get_att(ncid2d,nf90_global,'nsoil', nsoil)
427  if(status/=0)then
428  print*,'nsoil not found; assigning to 4'
429  nsoil=4 !set nsoil to 4 for NOAH
430  endif
431  if(me==0)print*,'SF_SURFACE_PHYSICS= ',isf_surface_physics
432  if(me==0)print*,'NSOIL= ',nsoil
433 ! read imp_physics
434  status=nf90_get_att(ncid2d,nf90_global,'imp_physics',imp_physics)
435  if(status/=0)then
436  print*,'imp_physics not found; assigning to GFDL 11'
437  imp_physics=11
438  endif
439  if (me == 0) print*,'MP_PHYSICS= ',imp_physics
440 ! get dimesions
441  status = nf90_inq_dimid(ncid3d,'grid_xt',varid)
442  if ( status /= 0 ) then
443  print*,status,varid
444  stop 1
445  end if
446  status = nf90_inquire_dimension(ncid3d,varid,len=im)
447  if ( status /= 0 ) then
448  print*,status
449  stop 1
450  end if
451  status = nf90_inq_dimid(ncid3d,'grid_yt',varid)
452  if ( status /= 0 ) then
453  print*,status,varid
454  stop 1
455  end if
456  status = nf90_inquire_dimension(ncid3d,varid,len=jm)
457  if ( status /= 0 ) then
458  print*,status
459  stop 1
460  end if
461  status = nf90_inq_dimid(ncid3d,'pfull',varid)
462  if ( status /= 0 ) then
463  print*,status,varid
464  stop 1
465  end if
466  status = nf90_inquire_dimension(ncid3d,varid,len=lm)
467  if ( status /= 0 ) then
468  print*,status
469  stop 1
470  end if
471  lp1 = lm+1
472  lm1 = lm-1
473  im_jm = im*jm
474 ! set NSOIL to 4 as default for NOAH but change if using other
475 ! SFC scheme
476 ! NSOIL = 4
477 
478  print*,'im jm lm nsoil from fv3 output = ',im,jm,lm,nsoil
479  END IF
480 
481  ELSE IF(trim(ioform) == 'binary' .OR. &
482  trim(ioform) == 'binarympiio' ) THEN
483  print*,'WRF Binary format is no longer supported'
484  stop 9996
485 ! NEMSIO format
486  ELSE IF(trim(ioform) == 'binarynemsio' .or. &
487  trim(ioform) == 'binarynemsiompiio' )THEN
488 
489  spval = 9.99e20
490  IF(me == 0)THEN
491  call nemsio_init(iret=status)
492  print *,'nemsio_init, iret=',status
493  call nemsio_open(nfile,trim(filename),'read',iret=status)
494  if ( status /= 0 ) then
495  print*,'error opening ',filename, ' Status = ', status ; stop
496  endif
497 !---
498  call nemsio_getfilehead(nfile,iret=status,nrec=nrec &
499  ,dimx=im,dimy=jm,dimz=lm,nsoil=nsoil)
500  if ( status /= 0 ) then
501  print*,'error finding model dimensions '; stop
502  endif
503  call nemsio_getheadvar(nfile,'global',global,iret)
504  if (iret /= 0)then
505  print*,"global not found in file-Assigned false"
506  global = .false.
507  end if
508  IF(modelname == 'GFS') global = .true.
509 ! global NMMB has i=1 overlap with i=im so post will cut off i=im
510  if(global .and. modelname == 'NMM') im = im-1
511 
512  END IF
513 
514  CALL mpi_bcast(im, 1,mpi_integer,0, mpi_comm_comp,status)
515  call mpi_bcast(jm, 1,mpi_integer,0, mpi_comm_comp,status)
516  call mpi_bcast(lm, 1,mpi_integer,0, mpi_comm_comp,status)
517  call mpi_bcast(nsoil,1,mpi_integer,0, mpi_comm_comp,status)
518 
519  if (me == 0) print*,'im jm lm nsoil from NEMS= ',im,jm, lm ,nsoil
520  call mpi_bcast(global,1,mpi_logical,0,mpi_comm_comp,status)
521  if (me == 0) print*,'Is this a global run ',global
522  lp1 = lm+1
523  lm1 = lm-1
524  im_jm = im*jm
525 
526 ! opening GFS flux file
527  IF(modelname == 'GFS') THEN
528 ! iunit=33
529  call nemsio_open(ffile,trim(filenameflux),'read',iret=iostatusflux)
530  if ( iostatusflux /= 0 ) then
531  print*,'error opening ',filenameflux, ' Status = ', iostatusflux
532  endif
533  iostatusd3d = -1
534  iunitd3d = -1
535 !
536 ! opening GFS aer file
537  call nemsio_open(rfile,trim(filenameaer),'read',iret=iostatusaer)
538  if ( iostatusaer /= 0 .and. me == 0) then
539  print*,'error opening AER ',filenameaer, ' Status = ', iostatusaer
540  endif
541 !
542 ! print*,'iostatusD3D in WRFPOST= ',iostatusD3D
543 
544  END IF
545 
546  ELSE
547  print*,'UNKNOWN MODEL OUTPUT FORMAT, STOPPING'
548  stop 9999
549  END IF
550 
551 
552  CALL mpi_first()
553  print*,'jsta,jend,jsta_m,jend_m,jsta_2l,jend_2u,spval=',jsta, &
554  jend,jsta_m,jend_m, jsta_2l,jend_2u,spval
555  CALL allocate_all()
556 
557 !
558 ! INITIALIZE POST COMMON BLOCKS
559 !
560  lcntrl = 14
561  rewind(lcntrl)
562 
563 ! EXP. initialize netcdf here instead
564  bbtim = mpi_wtime()
565  btim = mpi_wtime()
566 ! set default novegtype
567  if(modelname == 'GFS')THEN
568  novegtype = 13
569  ivegsrc = 2
570  else if(modelname=='NMM' .and. trim(ioform)=='binarynemsio')then
571  novegtype = 20
572  ivegsrc = 1
573  else if(modelname=='RAPR')then
574  novegtype = 20
575  ivegsrc = 1
576  else ! USGS
577  novegtype = 24
578  ivegsrc = 0
579  end if
580 
581 ! Reading model output for different models and IO format
582 
583  IF(trim(ioform) == 'netcdf' .OR. trim(ioform) == 'netcdfpara') THEN
584  IF(modelname == 'NCAR' .OR. modelname == 'RAPR') THEN
585  print*,'CALLING INITPOST TO PROCESS NCAR NETCDF OUTPUT'
586  CALL initpost
587  ELSE IF (modelname == 'FV3R' .OR. modelname == 'GFS') THEN
588 ! use parallel netcdf library to read output directly
589  print*,'CALLING INITPOST_NETCDF'
590  CALL initpost_netcdf(ncid2d,ncid3d)
591  ELSE
592  print*,'POST does not have netcdf option for model,',modelname,' STOPPING,'
593  stop 9998
594  END IF
595  ELSE IF(trim(ioform) == 'binarympiio') THEN
596  IF(modelname == 'NCAR' .OR. modelname == 'RAPR' .OR. modelname == 'NMM') THEN
597  print*,'WRF BINARY IO FORMAT IS NO LONGER SUPPORTED, STOPPING'
598  stop 9996
599  ELSE IF(modelname == 'RSM') THEN
600  print*,'MPI BINARY IO IS NOT YET INSTALLED FOR RSM, STOPPING'
601  stop 9997
602  ELSE
603  print*,'POST does not have mpiio option for this model, STOPPING'
604  stop 9998
605  END IF
606  ELSE IF(trim(ioform) == 'binarynemsio') THEN
607  IF(modelname == 'NMM') THEN
608  CALL initpost_nems(nrec,nfile)
609  ELSE
610  print*,'POST does not have nemsio option for model,',modelname,' STOPPING,'
611  stop 9998
612 
613  END IF
614 
615  ELSE IF(trim(ioform) == 'binarynemsiompiio')THEN
616  IF(modelname == 'GFS') THEN
617 ! close nemsio file for serial read
618  call nemsio_close(nfile,iret=status)
619  call nemsio_close(ffile,iret=status)
620  call nemsio_close(rfile,iret=status)
621  CALL initpost_gfs_nems_mpiio(iostatusaer)
622  ELSE
623  print*,'POST does not have nemsio mpi option for model,',modelname, &
624  'STOPPING,'
625  stop 9999
626 
627  END IF
628 
629  ELSE
630  print*,'UNKNOWN MODEL OUTPUT FORMAT, STOPPING'
631  stop 9999
632  END IF
633  initpost_tim = initpost_tim +(mpi_wtime() - btim)
634  IF(me == 0)THEN
635  WRITE(6,*)'WRFPOST: INITIALIZED POST COMMON BLOCKS'
636  ENDIF
637 !
638 ! IF GRIB2 read out post aviable fields xml file and post control file
639 !
640  if(grib == "grib2") then
641  btim=mpi_wtime()
642  call read_xml()
643  readxml_tim = readxml_tim + (mpi_wtime() - btim)
644  endif
645 !
646 ! LOOP OVER THE OUTPUT GRID(S). FIELD(S) AND OUTPUT GRID(S) ARE SPECIFIED
647 ! IN THE CONTROL FILE. WE PROCESS ONE GRID AND ITS FIELDS AT A TIME.
648 ! THAT'S WHAT THIS LOOP DOES.
649 !
650  icount_calmict = 0
651  first_grbtbl = .true.
652  npset = 0
653 !10 CONTINUE
654 !
655 ! READ CONTROL FILE DIRECTING WHICH FIELDS ON WHICH
656 ! LEVELS AND TO WHICH GRID TO INTERPOLATE DATA TO.
657 ! VARIABLE IEOF/=0 WHEN THERE ARE NO MORE GRIDS TO PROCESS.
658 !
659 ! -------- grib1 processing ---------------
660 ! ------------------
661 ! if (grib == "grib1") then !DO NOT REVERT TO GRIB1. GRIB1 NOT SUPPORTED ANYMORE
662 ! IEOF = 0
663 ! do while (ieof == 0)
664 ! CALL READCNTRL(kth,IEOF)
665 ! IF(ME == 0)THEN
666 ! WRITE(6,*)'POST: RETURN FROM READCNTRL. ', 'IEOF=',IEOF
667 ! ENDIF
668 !
669 ! PROCESS SELECTED FIELDS. FOR EACH SELECTED FIELD/LEVEL
670 ! WE GO THROUGH THE FOLLOWING STEPS:
671 ! (1) COMPUTE FIELD IF NEED BE
672 ! (2) WRITE FIELD TO OUTPUT FILE IN GRIB.
673 !
674 ! if (ieof == 0) then
675 ! CALL PROCESS(kth,kpv,th(1:kth),pv(1:kpv),iostatusD3D)
676 ! IF(ME == 0)THEN
677 ! WRITE(6,*)' '
678 ! WRITE(6,*)'WRFPOST: PREPARE TO PROCESS NEXT GRID'
679 ! ENDIF
680 ! endif
681 !
682 ! PROCESS NEXT GRID.
683 !
684 ! enddo
685 ! -------- grib2 processing ---------------
686 ! ------------------
687 ! elseif (grib == "grib2") then
688  if (me==0) write(0,*) ' in WRFPOST OUTFORM= ',grib
689  if (me==0) write(0,*) ' GRIB1 IS NOT SUPPORTED ANYMORE'
690  if (grib == "grib2") then
691  do while (npset < num_pset)
692  npset = npset+1
693  if (me==0) write(0,*)' in WRFPOST npset=',npset,' num_pset=',num_pset
694  CALL set_outflds(kth,th,kpv,pv)
695  if (me==0) write(0,*)' in WRFPOST size datapd',size(datapd)
696  if(allocated(datapd)) deallocate(datapd)
697 !Jesse x-decomposition
698 ! allocate(datapd(im,1:jend-jsta+1,nrecout+100))
699  allocate(datapd(1:iend-ista+1,1:jend-jsta+1,nrecout+100))
700 !$omp parallel do private(i,j,k)
701  do k=1,nrecout+100
702  do j=1,jend+1-jsta
703 !Jesse x-decomposition
704 ! do i=1,im
705  do i =1,iend+1-ista
706  datapd(i,j,k) = 0.
707  enddo
708  enddo
709  enddo
710  call get_postfilename(post_fname)
711  if (me==0) write(0,*)'post_fname=',trim(post_fname)
712  if (me==0) write(0,*)'get_postfilename,post_fname=',trim(post_fname), &
713  'npset=',npset, 'num_pset=',num_pset, &
714  'iSF_SURFACE_PHYSICS=',isf_surface_physics
715 !
716 ! PROCESS SELECTED FIELDS. FOR EACH SELECTED FIELD/LEVEL
717 ! WE GO THROUGH THE FOLLOWING STEPS:
718 ! (1) COMPUTE FIELD IF NEED BE
719 ! (2) WRITE FIELD TO OUTPUT FILE IN GRIB.
720 !
721  CALL process(kth,kpv,th(1:kth),pv(1:kpv),iostatusd3d)
722  IF(me == 0) WRITE(6,*)'WRFPOST: PREPARE TO PROCESS NEXT GRID'
723 !
724 ! write(0,*)'enter gribit2 before mpi_barrier'
725  call mpi_barrier(mpi_comm_comp,ierr)
726 
727 ! if(me==0)call w3tage('bf grb2 ')
728 ! write(0,*)'enter gribit2 after mpi barrier'
729  call gribit2(post_fname)
730  deallocate(datapd)
731  deallocate(fld_info)
732 !
733 ! PROCESS NEXT GRID.
734 !
735  enddo
736 
737  endif
738 !
739 !-------
740  call grib_info_finalize()
741 !
742  IF(me == 0) THEN
743  WRITE(6,*)' '
744  WRITE(6,*)'ALL GRIDS PROCESSED.'
745  WRITE(6,*)' '
746  ENDIF
747 !
748  call de_allocate
749 
750 ! GO TO 98
751  1000 CONTINUE
752 !exp call ext_ncd_ioclose ( DataHandle, Status )
753 !
754  IF(me == 0) THEN
755  print*, 'INITPOST_tim = ', initpost_tim
756  print*, 'MDLFLD_tim = ', etafld2_tim
757  print*, 'MDL2P_tim = ',eta2p_tim
758  print*, 'MDL2SIGMA_tim = ',mdl2sigma_tim
759  print*, 'MDL2AGL_tim = ',mdl2agl_tim
760  print*, 'SURFCE_tim = ',surfce2_tim
761  print*, 'CLDRAD_tim = ',cldrad_tim
762  print*, 'MISCLN_tim = ',miscln_tim
763  print*, 'MDL2STD_tim = ',mdl2std_tim
764  print*, 'FIXED_tim = ',fixed_tim
765  print*, 'MDL2THANDPV_tim = ',mdl2thandpv_tim
766  print*, 'CALRAD_WCLOUD_tim = ',calrad_wcloud_tim
767  print*, 'Total time = ',(mpi_wtime() - bbtim)
768  print*, 'Time for OUTPUT = ',time_output
769  print*, 'Time for READxml = ',readxml_tim
770  endif
771 !
772 ! END OF PROGRAM.
773 !
774 !
775 ! MPI_LAST WILL SHUTDOWN THE IO SERVER, IF IT EXISTS
776 !
777  CALL mpi_last
778 !
779 !
780  end if
781 !
782 !
783 !
784 ! call summary()
785  if (me == 0) CALL w3tage('UNIFIED_POST')
786  CALL mpi_finalize(ierr)
787 
788 
789  stop 0
790 
791  END
792