diff --git a/ChangeLog b/ChangeLog
index 92a34665c..ec9f145cc 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,5 +1,65 @@
======================================================================
+Originator: Chris Fischer
+Date: 10-2-2018
+Tag: cime_cesm2_0_rel_06
+Answer Changes: None
+Tests: scripts_regression_tests
+Dependencies:
+
+Brief Summary:
+ - Archive_metadata updates for CMIP6 experiments.
+ - OS is not generic env var.
+ - New machines at ETH and CSCS.
+ - Update ESMF library on cheyenne.
+ - Fix documentation for histaux_r2x namelist variable.
+ - Dont allow zero ntasks nthrds.
+ - Add lapack and blas libraries to pgi compile on cheyenne.
+ - Updates for compatibility with XML to html tools.
+ - Remove additional rad->deg conversion to radians when creating ocean
+ domain files.
+ - ESMF logging fix.
+
+User interface changes:
+
+PR summary: git log --oneline --first-parent [previous_tag]..master
+12e1c4d41 (HEAD -> maint-5.6, origin/maint-5.6) Merge pull request #2815 from bertinia/maint-5.6_archive_metadata
+767511a79 OS is not generic env var
+bec49f0c2 Merge pull request #2807 from beyerle/euler_daint
+9e74b8ab3 Merge pull request #2806 from ESMCI/fischer/ESMF_710r
+0f20ab46e Merge pull request #2774 from billsacks/fix_histaux_r2x_docs
+1c10e58e8 Merge pull request #2768 from jedwards4b/dont_allow_zero_ntasks_nthrds
+6a1069ce3 Merge pull request #2753 from jedwards4b/pgi_lapack
+5a0a56721 Merge pull request #2742 from bertinia/maint-5.6-component-return-values
+cbf8f2216 Merge pull request #2736 from jtruesdal/domainfix
+2e34a0305 Merge pull request #2733 from gold2718/esmf_log_fix
+
+
+Modified files: git diff --name-status [previous_tag]
+M config/cesm/config_grids.xml
+M config/cesm/machines/config_batch.xml
+M config/cesm/machines/config_compilers.xml
+M config/cesm/machines/config_machines.xml
+M scripts/Tools/archive_metadata
+M scripts/lib/CIME/XML/component.py
+M scripts/lib/CIME/XML/compsets.py
+M scripts/lib/CIME/XML/env_mach_pes.py
+M scripts/lib/CIME/XML/machines.py
+M scripts/lib/CIME/utils.py
+M src/components/data_comps/docn/cime_config/config_component.xml
+M src/drivers/mct/cime_config/config_component.xml
+M src/drivers/mct/cime_config/namelist_definition_drv.xml
+M src/drivers/mct/main/cime_comp_mod.F90
+M src/drivers/mct/main/cime_driver.F90
+M src/drivers/mct/shr/seq_comm_mct.F90
+M src/drivers/mct/shr/seq_infodata_mod.F90
+M src/share/esmf_wrf_timemgr/ESMF_Stubs.F90
+M tools/mapping/gen_domain_files/src/gen_domain.F90
+
+======================================================================
+
+======================================================================
+
Originator: Chris Fischer
Date: 7-24-2018
Tag: cime_cesm2_0_rel_05
diff --git a/config/cesm/machines/config_batch.xml b/config/cesm/machines/config_batch.xml
index b29a11dab..2ed123441 100644
--- a/config/cesm/machines/config_batch.xml
+++ b/config/cesm/machines/config_batch.xml
@@ -96,7 +96,7 @@
-
+
bjobs
bsub
bkill
@@ -110,19 +110,12 @@
-u
-
-
-
-
-
+ -J {{ job_id }}
-n {{ total_tasks }}
- -R "span[ptile={{ tasks_per_node }}]"
- -N
- -a {{ poe }}
+ -W $JOB_WALLCLOCK_TIME
-o {{ job_id }}.%J
-e {{ job_id }}.%J
- -J {{ job_id }}
@@ -189,13 +182,36 @@
-
-
+
+
- -S {{ shell }}
+ -R "select[model==XeonE5_2680v3]"
+
+
+ normal.24h
+ normal.4h
+
+
+
+
+
+
+ -R "span[ptile=4] select[model==XeonE3_1585Lv5]"
- batch
+ normal.24h
+ normal.4h
+
+
+
+
+
+
+ -R "select[model==XeonGold_6150]"
+
+
+ normal.24h
+ normal.4h
@@ -212,13 +228,6 @@
-
-
-
- -S {{ shell }}
-
-
-
^(\d+)
@@ -523,7 +532,7 @@
-
+
sbatch
diff --git a/config/cesm/machines/config_compilers.xml b/config/cesm/machines/config_compilers.xml
index e600c79b7..42fc62b42 100644
--- a/config/cesm/machines/config_compilers.xml
+++ b/config/cesm/machines/config_compilers.xml
@@ -69,7 +69,6 @@ using a fortran linker.
compilers -->
- -DOS
-D_USE_FLOW_CONTROL
FALSE
@@ -486,6 +485,124 @@ using a fortran linker.
TRUE
+
+
+ -I/project/s824/edavin/OASIS3-MCT_2.0/build.pgi/build/lib/mct -I/project/s824/edavin/OASIS3-MCT_2.0/build.pgi/build/lib/psmile.MPI1
+
+
+ -llapack -lblas
+ -L/project/s824/edavin/OASIS3-MCT_2.0/build.pgi/lib -lpsmile.MPI1 -lscrip -lmct_oasis -lmpeu_oasis
+
+
+
+
+
+ -I/project/s824/edavin/OASIS3-MCT_2.0/build.cray/build/lib/mct -I/project/s824/edavin/OASIS3-MCT_2.0/build.cray/build/lib/psmile.MPI1
+
+
+ -L/project/s824/edavin/OASIS3-MCT_2.0/build.cray/lib -lpsmile.MPI1 -lscrip -lmct_oasis -lmpeu_oasis
+
+
+
+
+
+ -DLINUX
+
+ $ENV{NETCDF}
+ lustre
+ $ENV{PNETCDF}
+
+ -L$ENV{NETCDF}/lib -lnetcdf -lnetcdff
+
+
+
+
+
+ -xCORE-AVX2
+
+
+ -xCORE-AVX2
+
+
+ -mkl
+
+
+
+
+
+ -O2
+
+
+ -O2
+
+
+
+
+
+ -DLINUX
+
+ $ENV{NETCDF}
+ lustre
+ $ENV{PNETCDF}
+
+ -L$ENV{NETCDF}/lib -lnetcdf -lnetcdff
+
+
+
+
+
+ -xCORE-AVX2
+
+
+ -xCORE-AVX2
+
+
+ -mkl
+
+
+
+
+
+ -O2
+
+
+ -O2
+
+
+
+
+
+ -DLINUX
+
+ $ENV{NETCDF}
+ lustre
+ $ENV{PNETCDF}
+
+ -L$ENV{NETCDF}/lib -lnetcdf -lnetcdff
+
+
+
+
+
+ -xCORE-AVX2
+
+
+ -xCORE-AVX2
+
+
+ -mkl
+
+
+
+
+
+ -O2
+
+
+ -O2
+
+
+
-O2
diff --git a/config/cesm/machines/config_machines.xml b/config/cesm/machines/config_machines.xml
index 97ebcec0b..379ab739b 100644
--- a/config/cesm/machines/config_machines.xml
+++ b/config/cesm/machines/config_machines.xml
@@ -144,22 +144,22 @@
-
- Brutus Linux Cluster ETH (pgi(9.0-1)/intel(10.1.018) with openi(1.4.1)/mvapich2(1.4rc2), 16 pes/node, batch system LSF, added by UB
+
+ Euler II Linux Cluster ETH, 24 pes/node, InfiniBand, XeonE5_2680v3, batch system LSF
LINUX
- pgi,intel
+ intel,pgi
openmpi,mpich
- /cluster/work/uwis/$USER
- /cluster/work/uwis/ccsm/inputdata
- /cluster/work/uwis/ccsm/inputdata/atm/datm7
- /cluster/work/uwis/$USER/archive/$CASE
- /cluster/work/uwis/ccsm/ccsm_baselines
- /cluster/work/uwis/ccsm/tools/cprnc/cprnc
+ /cluster/work/climate/$USER
+ /cluster/work/climate/cesm/inputdata
+ /cluster/work/climate/cesm/inputdata/atm/datm7
+ /cluster/work/climate/$USER/archive/$CASE
+ /cluster/work/climate/cesm/ccsm_baselines
+ /cluster/work/climate/cesm/tools/cprnc/cprnc
1
lsf
- tcraig -at- ucar.edu
- 16
- 16
+ urs.beyerle -at- env.ethz.ch
+ 24
+ 24
mpirun
@@ -169,35 +169,166 @@
- ompirun
+ mpirun
- /etc/profile.d/modules.perl
+ /cluster/apps/modules/init/python.py
/etc/profile.d/modules.sh
/etc/profile.d/modules.csh
-
- /usr/bin/modulecmd perl
+ /cluster/apps/modules/bin/modulecmd python
+ module
+ module
+
+
+
+
+ new
+
+
+ intel/2018.1
+
+
+ netcdf/4.3.1
+
+
+ pgi/14.1
+
+
+ mvapich2/1.8.1
+
+
+ open_mpi/1.6.5
+
+
+
+ 64M
+
+
+
+
+ Euler III Linux Cluster ETH, 4 pes/node, Ethernet, XeonE3_1585Lv5, batch system LSF
+ LINUX
+ intel,pgi
+ openmpi,mpich
+ /cluster/work/climate/$USER
+ /cluster/work/climate/cesm/inputdata
+ /cluster/work/climate/cesm/inputdata/atm/datm7
+ /cluster/work/climate/$USER/archive/$CASE
+ /cluster/work/climate/cesm/ccsm_baselines
+ /cluster/work/climate/cesm/tools/cprnc/cprnc
+ 1
+ lsf
+ urs.beyerle -at- env.ethz.ch
+ 4
+ 4
+
+ mpirun
+
+ -hostfile $ENV{PBS_JOBID}
+ -ppn $MAX_MPITASKS_PER_NODE
+ -n {{ total_tasks }}
+
+
+
+ mpirun
+
+
+
+
+ /cluster/apps/modules/init/python.py
+ /etc/profile.d/modules.sh
+ /etc/profile.d/modules.csh
+ /cluster/apps/modules/bin/modulecmd python
module
module
+
+ new
+
+
+ interconnect/ethernet
+
- intel/10.1.018
+ intel/2018.1
+
+
+ netcdf/4.3.1
- pgi/9.0-1
+ pgi/14.1
- mvapich2/1.4rc2
+ mvapich2/1.8.1
- open_mpi/1.4.1
+ open_mpi/1.6.5
+
+
+
+ 64M
+
+
+
+
+ Euler IV Linux Cluster ETH, 36 pes/node, InfiniBand, XeonGold_6150, batch system LSF
+ LINUX
+ intel,pgi
+ openmpi,mpich
+ /cluster/work/climate/$USER
+ /cluster/work/climate/cesm/inputdata
+ /cluster/work/climate/cesm/inputdata/atm/datm7
+ /cluster/work/climate/$USER/archive/$CASE
+ /cluster/work/climate/cesm/ccsm_baselines
+ /cluster/work/climate/cesm/tools/cprnc/cprnc
+ 1
+ lsf
+ urs.beyerle -at- env.ethz.ch
+ 36
+ 36
+
+ mpirun
+
+ -hostfile $ENV{PBS_JOBID}
+ -ppn $MAX_MPITASKS_PER_NODE
+ -n {{ total_tasks }}
+
+
+
+ mpirun
+
+
+
+
+ /cluster/apps/modules/init/python.py
+ /etc/profile.d/modules.sh
+ /etc/profile.d/modules.csh
+ /cluster/apps/modules/bin/modulecmd python
+ module
+ module
+
+
- netcdf/4.0.1
+ new
+
+
+ intel/2018.1
+
+
+ netcdf/4.3.1
+
+
+ pgi/14.1
+
+
+ mvapich2/1.8.1
+
+
+ open_mpi/1.6.5
@@ -339,16 +470,16 @@
pgi/17.9
- esmf-7.0.0-defio-mpi-g
+ esmf-7.1.0r-defio-mpi-g
- esmf-7.0.0-defio-mpi-O
+ esmf-7.1.0r-defio-mpi-O
- esmf-7.0.0-ncdfio-uni-g
+ esmf-7.1.0r-ncdfio-uni-g
- esmf-7.0.0-ncdfio-uni-O
+ esmf-7.1.0r-ncdfio-uni-O
gnu/7.3.0
@@ -1625,27 +1756,26 @@
-
- CSCS Cray XE6, os is CNL, 32 pes/node, batch system is SLURM
+
+ CSCS Cray XC50, os is SUSE SLES, 12 pes/node, batch system is SLURM
CNL
pgi,cray,gnu
mpich
- /scratch/rosa/$USER
- /project/s433/cesm_inputdata
- /project/s433/cesm_inputdata/atm/datm7
- /project/s433/$USER/archive/$CASE
- /project/s433/ccsm_baselines
- /project/s433/cesm_tools/ccsm_cprnc/cprnc
+ /scratch/snx3000/$USER
+ /project/s824/cesm_inputdata
+ /project/s824/cesm_inputdata/atm/datm7
+ /project/s824/$USER/archive/$CASE
+ /project/s824/ccsm_baselines
+ /project/s824/cesm_tools/ccsm_cprnc/cprnc
12
slurm
edouard.davin -at- env.ethz.ch
- 32
- 32
+ 12
+ 12
- aprun
+ srun
-n {{ total_tasks }}
- -N $MAX_MPITASKS_PER_NODE
-d $ENV{OMP_NUM_THREADS}
@@ -1655,7 +1785,6 @@
-
Linux workstation at Sandia on SRN with SEMS TPL modules
(s999964|climate|penn)
diff --git a/scripts/Tools/archive_metadata b/scripts/Tools/archive_metadata
index 12d8d72b6..6fe2a292c 100755
--- a/scripts/Tools/archive_metadata
+++ b/scripts/Tools/archive_metadata
@@ -5,16 +5,8 @@ via a web post and SVN check-in
Author: CSEG
"""
-
-from standard_script_setup import *
-
-from CIME.case import Case
-from CIME.utils import expect, append_status, is_last_process_complete
-
-import base64
+import argparse
import datetime
-import errno
-import difflib
import filecmp
import getpass
import glob
@@ -23,107 +15,95 @@ import json
import io
from os.path import expanduser
import re
-import shlex
import shutil
import ssl
import subprocess
import sys
from string import Template
-import textwrap
-import urllib, urllib2
+import urllib
+import urllib2
+
+from standard_script_setup import *
+from CIME.case import Case
+from CIME.utils import is_last_process_complete
+
+# import handler for config file
if sys.version_info[0] == 2:
from ConfigParser import SafeConfigParser as config_parser
else:
from configparser import ConfigParser as config_parser
+# define global constants
logger = logging.getLogger(__name__)
-
-# define global variables
-_now = datetime.datetime.now().strftime('%Y%m%d-%H%M%S')
-
-_expdb_url = 'https://csegweb.cgd.ucar.edu/expdb2.0'
-_JSON_expdb_url = 'https://csegweb.cgd.ucar.edu/expdb2.0/cgi-bin/processJSON.cgi'
-_query_expdb_url = 'https://csegweb.cgd.ucar.edu/expdb2.0/cgi-bin/query.cgi'
-_SVN_expdb_url = 'https://svn-cesm2-expdb.cgd.ucar.edu'
-_exp_types = ['CMIP6','production','tuning']
-
-_XML_vars = ['CASE','COMPILER','COMPSET','CONTINUE_RUN','DOUT_L_MS',
- 'DOUT_L_MSROOT','DOUT_S','DOUT_S_ROOT','GRID',
- 'MACH','MPILIB','MODEL','MODEL_VERSION','REST_N','REST_OPTION',
- 'RUNDIR','RUN_REFCASE','RUN_REFDATE','RUN_STARTDATE',
- 'RUN_TYPE','STOP_N','STOP_OPTION','USER']
-
-_run_vars = ['JOB_QUEUE','JOB_WALLCLOCK_TIME','PROJECT']
-
-_archive_list = ['Buildconf','CaseDocs','CaseStatus','LockedFiles',
- 'Macros.make','README.case','SourceMods','software_environment.txt',
- 'timing','logs','postprocess']
-
+_svn_expdb_url = 'https://svn-cesm2-expdb.cgd.ucar.edu'
+_exp_types = ['CMIP6', 'production', 'tuning']
+_xml_vars = ['CASE', 'COMPILER', 'COMPSET', 'CONTINUE_RUN', 'DOUT_S', 'DOUT_S_ROOT',
+ 'GRID', 'MACH', 'MPILIB', 'MODEL', 'MODEL_VERSION', 'REST_N', 'REST_OPTION',
+ 'RUNDIR', 'RUN_REFCASE', 'RUN_REFDATE', 'RUN_STARTDATE', 'RUN_TYPE',
+ 'STOP_N', 'STOP_OPTION', 'USER']
+_run_vars = ['JOB_QUEUE', 'JOB_WALLCLOCK_TIME', 'PROJECT']
+_archive_list = ['Buildconf', 'CaseDocs', 'CaseStatus', 'LockedFiles',
+ 'Macros.make', 'README.case', 'SourceMods', 'software_environment.txt',
+ 'timing', 'logs', 'postprocess/logs']
_call_template = Template('in "$function" - Ignoring SVN repo update\n'
'SVN error executing command "$cmd". \n'
'$error: $strerror')
-
_copy_template = Template('in "$function" - Unable to copy "$source" to "$dest"'
'$error: $strerror')
+_svn_error_template = Template('in "$function" - SVN client unavailable\n'
+ 'SVN error executing command "$cmd". \n'
+ '$error: $strerror')
+_ignore_patterns = ['*.pyc', '^.git', 'tmp', '.svn', '*~']
+_pp_xml_vars = {'atm' : 'ATMDIAG_OUTPUT_ROOT_PATH',
+ 'glc' : '',
+ 'lnd' : 'LNDDIAG_OUTPUT_ROOT_PATH',
+ 'ice' : 'ICEDIAG_PATH_CLIMO_CONT',
+ 'ocn' : 'OCNDIAG_TAVGDIR',
+ 'rof' : '',
+ 'timeseries' : 'TIMESERIES_OUTPUT_ROOTDIR',
+ 'xconform' : 'CONFORM_OUTPUT_DIR'}
+_pp_diag_vars = {'atm' : ['ATMDIAG_test_first_yr', 'ATMDIAG_test_nyrs'],
+ 'ice' : ['ICEDIAG_BEGYR_CONT', 'ICEDIAG_ENDYR_CONT', 'ICEDIAG_YRS_TO_AVG'],
+ 'lnd' : ['LNDDIAG_clim_first_yr_1', 'LNDDIAG_clim_num_yrs_1',
+ 'LNDDIAG_trends_first_yr_1', 'LNDDIAG_trends_num_yrs_1'],
+ 'ocn' : ['OCNDIAG_YEAR0', 'OCNDIAG_YEAR1',
+ 'OCNDIAG_TSERIES_YEAR0', 'OCNDIAG_TSERIES_YEAR1']}
+
+# setting the ssl context to avoid issues with CGD certificates
+_context = ssl._create_unverified_context() # pylint:disable=protected-access
-# file patterns to ignore when checking into SVN
-_IGNORE_PATTERNS = ['*.pyc','^.git','tmp','.svn','*~']
-
-# file patterns to ignore when doing comparisons for SVN updates
-# most of these are symbolic links back to CIME source tree
-_EXCLUDE_FILES = ['archive_metadata','case.build',
- 'case.cmpgen_namelists','case.lt_archive','.case.run',
- 'case.setup','case.st_archive','case.submit','check_case',
- 'check_input_data','preview_namelists','xmlchange','xmlquery',
- '.env_mach_specific.csh','.env_mach_specific.sh',
- 'archive_files', 'Makefile', 'check_lockedfiles', 'getTiming',
- 'lt_archive.sh', 'mkDepends', 'mkSrcfiles', 'save_provenance',
- 'README.archive','case.qstatus','pelayout','preview_run']
-_EXCLUDE_DIRS = ['archive_files','Tools','.svn','.git','tmp']
-
-# get the path settings for postprocessing
-_pp_xml_vars = { 'atm' : 'ATMDIAG_OUTPUT_ROOT_PATH',
- 'glc' : '',
- 'lnd' : 'LNDDIAG_OUPTUT_ROOT_PATH',
- 'ice' : 'ICEDIAG_PATH_CLIMO_CONT',
- 'ocn' : 'OCNDIAG_TAVGDIR',
- 'rof' : '',
- 'timeseries' : 'TIMESERIES_OUTPUT_ROOTDIR',
- 'xconform' : 'CONFORM_OUTPUT_DIR' }
# -------------------------------------------------------------------------------
class PasswordPromptAction(argparse.Action):
# -------------------------------------------------------------------------------
+ """ SVN developer's password class handler
+ """
+ # pylint: disable=redefined-builtin
def __init__(self,
- option_strings,
- dest=None,
- nargs=0,
- default=None,
- required=False,
- type=None,
- metavar=None,
- help=None):
-
+ option_strings=None,
+ dest=None,
+ default=None,
+ required=False,
+ nargs=0,
+ help=None):
super(PasswordPromptAction, self).__init__(
- option_strings=option_strings,
- dest=dest,
- nargs=nargs,
- default=default,
- required=required,
- metavar=metavar,
- type=type,
- help=help)
+ option_strings=option_strings,
+ dest=dest,
+ default=default,
+ required=required,
+ nargs=nargs,
+ help=help)
def __call__(self, parser, args, values, option_string=None):
- # check if home .subversion/cmip6.conf
+ # check if ~/.subversion/cmip6.conf exists
home = expanduser("~")
conf_path = os.path.join(home, ".subversion/cmip6.conf")
if os.path.exists(conf_path):
# read the .cmip6.conf file
config = config_parser()
config.read(conf_path)
- password = config.get('svn','password')
+ password = config.get('svn', 'password')
else:
password = getpass.getpass()
setattr(args, self.dest, password)
@@ -131,13 +111,18 @@ class PasswordPromptAction(argparse.Action):
# ---------------------------------------------------------------------
def basic_authorization(user, password):
# ---------------------------------------------------------------------
- s = user + ":" + password
- return "Basic " + s.encode("base64").rstrip()
+ """ Basic authentication encoding
+ """
+ sauth = user + ":" + password
+ return "Basic " + sauth.encode("base64").rstrip()
# ---------------------------------------------------------------------
class SVNException(Exception):
# ---------------------------------------------------------------------
+ """ SVN command exception handler
+ """
def __init__(self, value):
+ super(SVNException, self).__init__(value)
self.value = value
def __str__(self):
@@ -159,57 +144,73 @@ def commandline_options(args):
CIME.utils.setup_standard_logging_options(parser)
parser.add_argument('--user', dest='user', type=str, default=None, required=True,
- help='User name for SVN CESM developer access')
+ help='User name for SVN CESM developer access (required)')
- parser.add_argument('--password', dest='password', action=PasswordPromptAction, type=str, default='', required=True,
- help='Password for SVN CESM developer access')
+ parser.add_argument('--password', dest='password', action=PasswordPromptAction,
+ default='', required=True,
+ help='Password for SVN CESM developer access (required)')
- parser.add_argument('--caseroot', nargs=1, required=False,
+ parser.add_argument('--caseroot', nargs=1, required=False,
help='Fully quailfied path to case root directory (optional). ' \
- 'Defaults to current working directory.')
-
- parser.add_argument('--workdir', nargs=1, required=False,
- help='Fully quailfied path to directory for storing intermediate '\
- 'case files. A sub-directory called '\
- 'temp_archive_dir_YYYYmmdd_hhmm is created, populated '\
- 'with case files, and posted to the CESM experiments database. '\
- 'This argument can be used to archive a caseroot when the user ' \
- 'does not have write permission in the caseroot (optional). ' \
- 'Defaults to current working directory.')
-
- parser.add_argument('--expType', nargs=1, required=True, choices=_exp_types,
+ 'Defaults to current working directory.')
+
+ parser.add_argument('--workdir', nargs=1, required=False,
+ help='Fully quailfied path to directory for storing intermediate ' \
+ 'case files. A sub-directory called ' \
+ 'archive_temp_dir is created, populated ' \
+ 'with case files, and posted to the CESM experiments database and ' \
+ 'SVN repository at URL "{0}". ' \
+ 'This argument can be used to archive a caseroot when the user ' \
+ 'does not have write permission in the caseroot (optional). ' \
+ 'Defaults to current working directory.'.format(_svn_expdb_url))
+
+ parser.add_argument('--expType', dest='expType', nargs=1, required=True, choices=_exp_types,
help='Experiment type. For CMIP6 experiments, the case must already ' \
- 'exist in the experiments database at URL '\
- ' http://csegweb.cgd.ucar.edu/expdb2.0. ' \
- 'Must be one of {0}'.format(_exp_types))
+ 'exist in the experiments database at URL ' \
+ ' "http://csegweb.cgd.ucar.edu/expdb2.0" (required). ' \
+ 'Must be one of "{0}"'.format(_exp_types))
parser.add_argument('--title', nargs=1, required=False, default=None,
help='Title of experiment (optional).')
- parser.add_argument('--ignoreLogs', action='store_true',
+ parser.add_argument('--ignore-logs', dest='ignore_logs', action='store_true',
help='Ignore updating the SVN repository with the caseroot/logs files. ' \
- 'The experiments database and SVN repository will be updated.')
+ 'The experiments database will be updated (optional).')
- parser.add_argument('--ignoreTiming', action='store_true',
- help='Ignore updating the experiments database with timing data.')
+ parser.add_argument('--ignore-timing', dest='ignore_timing', action='store_true',
+ help='Ignore updating the the SVN repository with caseroot/timing files.' \
+ 'The experiments database will be updated (optional).')
- parser.add_argument('--ignoreRepoUpdate', action='store_true',
+ parser.add_argument('--ignore-repo-update', dest='ignore_repo_update', action='store_true',
help='Ignore updating the SVN repository with all the caseroot files. ' \
- 'The experiments database will be updated.')
+ 'The experiments database will be updated (optional).')
+
+ parser.add_argument('--add-files', dest='user_add_files', required=False,
+ help='Comma-separated list with no spaces of files or directories to be ' \
+ 'added to the SVN repository. These are in addition to the default added ' \
+ 'caseroot files and directories: '\
+ '"{0}, *.xml, user_nl_*" (optional).'.format(_archive_list))
parser.add_argument('--dryrun', action='store_true',
help='Parse settings and print what actions will be taken but ' \
- 'do not execute the action.')
+ 'do not execute the action (optional).')
- parser.add_argument('--query_cmip6', nargs=2, required=False,
+ parser.add_argument('--query_cmip6', nargs=2, required=False,
help='Query the experiments database global attributes ' \
- 'for specified CMIP6 casename as argument 1. ' \
- 'Writes a json formatted output file, specified by argument 2, ' \
- 'to subdir archive_files (optional).')
+ 'for specified CMIP6 casename as argument 1. ' \
+ 'Writes a json formatted output file, specified by argument 2, ' \
+ 'to subdir archive_files (optional).')
- options = CIME.utils.parse_args_and_handle_standard_logging_options(args, parser)
+ parser.add_argument('--test-post', dest='test_post', action='store_true',
+ help='Post metadata to the test expdb2.0 web application server ' \
+ 'at URL "http://csegwebdev.cgd.ucar.edu/expdb2.0". ' \
+ 'No --test-post argument defaults to posting metadata to the ' \
+ 'production expdb2.0 web application server '\
+ 'at URL "http://csegweb.cgd.ucar.edu/expdb2.0" (optional).')
- return options
+ opts = CIME.utils.parse_args_and_handle_standard_logging_options(args, parser)
+
+ return opts
# ---------------------------------------------------------------------
def get_case_vars(case_dict, case):
@@ -224,7 +225,7 @@ def get_case_vars(case_dict, case):
"""
logger.debug('get_case_vars')
- for xml_id in _XML_vars:
+ for xml_id in _xml_vars:
case_dict[xml_id] = case.get_value(xml_id, resolved=True, subgroup=None)
for xml_id in _run_vars:
@@ -233,27 +234,60 @@ def get_case_vars(case_dict, case):
return case_dict
# ---------------------------------------------------------------------
-def get_disk_size(start_path, exclude=None):
+def bytes_2_human(n):
+# ---------------------------------------------------------------------
+ """bytes to human
+ http://code.activestate.com/recipes/578019
+ >>> bytes_2_human(10000)
+ '9.8K'
+ >>> bytes_2_human(100001221)
+ '95.4M'
+
+ """
+ logger.debug('bytes_2_human')
+ symbols = ('K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y')
+ prefix = {}
+ for i, s in enumerate(symbols):
+ prefix[s] = 1 << (i+1)*10
+ for s in reversed(symbols):
+ if n >= prefix[s]:
+ value = float(n) / prefix[s]
+ return '%.1f%s' % (value, s)
+
+ return "%sB" % n
+
+# ---------------------------------------------------------------------
+def get_disk_usage(path):
# ---------------------------------------------------------------------
- """get_disk_size
- return the total disk usage for a given path ignoring symlinks.
+ """get_disk_usage
+ return the total disk usage for a given path.
Arguments:
- start_path - path to start
- exclude - a directory to exclude - should this be a list?
+ path - path to start
"""
- logger.debug('get_disk_size')
+ logger.debug('get_disk_usage')
total_size = 0
- for dirpath, dirnames, filenames in os.walk(start_path):
- if exclude and exclude in dirnames:
- continue
- for f in filenames:
- fp = os.path.join(dirpath, f)
- if os.path.islink(fp):
- continue
- total_size += os.path.getsize(fp)
-
- return total_size
+ raw_size = 0
+ cwd = os.getcwd()
+ if os.path.exists(path):
+ os.chdir(path)
+ cmd = ['du', '--summarize', '--human-readable']
+ try:
+ total_size = subprocess.check_output(cmd)
+ total_size = total_size.replace('\t.\n', '')
+ except subprocess.CalledProcessError:
+ msg = "Error executing command = '{0}'".format(cmd)
+ logger.warning(msg)
+ cmd = ['du', '--summarize']
+ try:
+ raw_size = subprocess.check_output(cmd)
+ raw_size = int(raw_size.replace('\t.\n', ''))
+ except subprocess.CalledProcessError:
+ msg = "Error executing command = '{0}'".format(cmd)
+ logger.warning(msg)
+
+ os.chdir(cwd)
+ return raw_size, total_size
# ---------------------------------------------------------------------
def get_pp_path(pp_dir, comp, subdir):
@@ -262,7 +296,7 @@ def get_pp_path(pp_dir, comp, subdir):
return the path to postprocessing
Arguments:
- root_path - path to start
+ root_path - path to start
comp - component name
subdir - postprocess subdir
"""
@@ -275,9 +309,9 @@ def get_pp_path(pp_dir, comp, subdir):
if comp == 'timeseries':
pp_path_var = _pp_xml_vars['timeseries']
subdir = ''
- elif subdir == 'timeseries' or comp in ['glc','rof']:
+ elif subdir == 'timeseries' or comp in ['glc', 'rof']:
pp_path_var = _pp_xml_vars['timeseries']
- subdir = 'tseries'
+ subdir = '{0}/proc/tseries'.format(comp)
elif comp == 'xconform':
pp_path_var = _pp_xml_vars['xconform']
elif comp == 'ocn' and subdir == 'diag':
@@ -288,19 +322,49 @@ def get_pp_path(pp_dir, comp, subdir):
cmd = ['./pp_config', '--get', pp_path_var, '--value']
try:
pp_path = subprocess.check_output(cmd)
- except subprocess.CalledProcessError as e:
+ except subprocess.CalledProcessError:
msg = "Error executing command = '{0}'".format(cmd)
logger.warning(msg)
# construct the final path basaed on comp and subdir
pp_path = pp_path.rstrip()
if len(pp_path) > 2:
- if comp not in ['timeseries', 'xconform']:
- pp_path = os.path.join(pp_path, comp, 'proc', subdir)
-
+ if (comp not in ['timeseries', 'xconform', 'ice', 'ocn']
+ or 'tseries' in subdir):
+ pp_path = os.path.join(pp_path, subdir)
+ elif comp == 'ice' and subdir == 'diag':
+ pp_path = pp_path.replace('climo', 'diag', 1)
os.chdir(cwd)
return pp_path
+# ---------------------------------------------------------------------
+def get_diag_dates(comp, pp_dir):
+# ---------------------------------------------------------------------
+ """ get_diag_dates
+
+ Query the postprocessing env_diags_[comp].xml file to get the model diag
+ dates for the given component.
+ """
+ logger.debug('get_diag_dates')
+
+ cwd = os.getcwd()
+ os.chdir(pp_dir)
+
+ model_dates = ''
+ pp_vars = _pp_diag_vars.get(comp)
+ for pp_var in pp_vars:
+ cmd = ['./pp_config', '--get', pp_var, '--value']
+ try:
+ pp_value = subprocess.check_output(cmd)
+ except subprocess.CalledProcessError:
+ msg = "Error executing command = '{0}'".format(cmd)
+ logger.warning(msg)
+ tmp_dates = '{0} = {1}'.format(pp_var, pp_value)
+ model_dates = model_dates + tmp_dates
+
+ os.chdir(cwd)
+ return model_dates
+
# ---------------------------------------------------------------------
def get_pp_status(case_dict):
# ---------------------------------------------------------------------
@@ -316,23 +380,27 @@ def get_pp_status(case_dict):
# initialize status variables
msg_avg = dict()
msg_diags = dict()
- diag_comps = ['atm','ice','lnd','ocn']
- tseries_comps = ['atm','glc','ice','lnd','ocn','rof']
+ diag_comps = ['atm', 'ice', 'lnd', 'ocn']
+ tseries_comps = ['atm', 'glc', 'ice', 'lnd', 'ocn', 'rof']
- pp_dir = os.path.join(case_dict['CASEROOT'],'postprocess')
- pp_log_dir = os.path.join(case_dict['CASEROOT'],'postprocess','logs')
+ pp_dir = os.path.join(case_dict['CASEROOT'], 'postprocess')
+ pp_log_dir = os.path.join(case_dict['CASEROOT'], 'postprocess', 'logs')
- msg_avg['atm'] = "Successfully completed generating atmosphere climatology averages"
+ msg_avg['atm'] = "COMPLETED SUCCESSFULLY"
msg_diags['atm'] = "Successfully completed generating atmosphere diagnostics"
+ case_dict['atm_avg_dates'] = case_dict['atm_diag_dates'] = get_diag_dates('atm', pp_dir)
msg_avg['ice'] = "Successfully completed generating ice climatology averages"
msg_diags['ice'] = "Successfully completed generating ice diagnostics"
+ case_dict['ice_avg_dates'] = case_dict['ice_diag_dates'] = get_diag_dates('ice', pp_dir)
- msg_avg['lnd'] = "Successfully completed generating land climatology averages"
+ msg_avg['lnd'] = "COMPLETED SUCCESSFULLY"
msg_diags['lnd'] = "Successfully completed generating land diagnostics"
+ case_dict['lnd_avg_dates'] = case_dict['lnd_diag_dates'] = get_diag_dates('lnd', pp_dir)
msg_avg['ocn'] = "Successfully completed generating ocean climatology averages"
msg_diags['ocn'] = "Successfully completed generating ocean diagnostics"
+ case_dict['ocn_avg_dates'] = case_dict['ocn_diag_dates'] = get_diag_dates('ocn', pp_dir)
for comp in diag_comps:
case_dict[comp+'_avg_status'] = 'Unknown'
@@ -345,14 +413,14 @@ def get_pp_status(case_dict):
case_dict[comp+'_diag_size'] = 0
avg_logs = list()
- avg_file_pattern = ("{0}/{1}_averages.log.*".format(pp_log_dir,comp))
+ avg_file_pattern = ("{0}/{1}_averages.log.*".format(pp_log_dir, comp))
avg_logs = glob.glob(avg_file_pattern)
- if (avg_logs):
+ if avg_logs:
log_file = max(avg_logs, key=os.path.getctime)
- if (is_last_process_complete(log_file, msg_avg[comp],
+ if (is_last_process_complete(log_file, msg_avg[comp],
'Average list complies with standards.')):
- case_dict[comp+'_avg_status'] = 'Complete'
+ case_dict[comp+'_avg_status'] = 'Succeeded'
else:
case_dict[comp+'_avg_status'] = 'Started'
@@ -360,16 +428,16 @@ def get_pp_status(case_dict):
case_dict[comp+'_avg_size'] = 0
case_dict[comp+'_avg_path'] = get_pp_path(pp_dir, comp, 'climo')
if case_dict[comp+'_avg_path'] is not None:
- case_dict[comp+'_avg_size'] = get_disk_size(case_dict[comp+'_avg_path'])
+ (raw_size, case_dict[comp+'_avg_size']) = get_disk_usage(case_dict[comp+'_avg_path'])
diag_logs = list()
- diag_file_pattern = ("{0}/{1}_diagnostics.log.*".format(pp_log_dir,comp))
+ diag_file_pattern = ("{0}/{1}_diagnostics.log.*".format(pp_log_dir, comp))
diag_logs = glob.glob(diag_file_pattern)
- if (diag_logs):
+ if diag_logs:
log_file = max(diag_logs, key=os.path.getctime)
- if (is_last_process_complete(log_file, msg_diags[comp], 'ncks version')):
- case_dict[comp+'_diag_status'] = 'Complete'
+ if is_last_process_complete(log_file, msg_diags[comp], 'ncks version'):
+ case_dict[comp+'_diag_status'] = 'Succeeded'
else:
case_dict[comp+'_diag_status'] = 'Started'
@@ -377,8 +445,7 @@ def get_pp_status(case_dict):
case_dict[comp+'_diag_size'] = 0
case_dict[comp+'_diag_path'] = get_pp_path(pp_dir, comp, 'diag')
if case_dict[comp+'_diag_path'] is not None:
- case_dict[comp+'_diag_size'] = get_disk_size(case_dict[comp+'_diag_path'])
-
+ (raw_size, case_dict[comp+'_diag_size']) = get_disk_usage(case_dict[comp+'_diag_path'])
# get timeseries status
case_dict['timeseries_status'] = 'Unknown'
@@ -387,18 +454,22 @@ def get_pp_status(case_dict):
tseries_logs = list()
tseries_file_pattern = ("{0}/timeseries.log.*".format(pp_log_dir))
tseries_logs = glob.glob(tseries_file_pattern)
- if (tseries_logs):
- log_file = max(tseries_logs, key=os.path.getctime)
- if (is_last_process_complete(log_file, 'Successfully completed generating variable time-series files',
- 'opening')):
- case_dict['timeseries_status'] = 'Complete'
- else:
- case_dict['timeseries_status'] = 'Started'
+ if tseries_logs:
+ log_file = max(tseries_logs, key=os.path.getctime)
+ if is_last_process_complete(filepath=log_file,
+ expect_text='Successfully completed',
+ fail_text='opening'):
+ case_dict['timeseries_status'] = 'Succeeded'
+ else:
+ case_dict['timeseries_status'] = 'Started'
+ tseries_size = 0
for comp in tseries_comps:
case_dict[comp+'_timeseries_path'] = get_pp_path(pp_dir, comp, 'timeseries')
- case_dict[comp+'_timeseries_size'] = get_disk_size(case_dict[comp+'_timeseries_path'])
+ (raw_size, case_dict[comp+'_timeseries_size']) = get_disk_usage(case_dict[comp+'_timeseries_path'])
+
case_dict[comp+'_timeseries_status'] = case_dict['timeseries_status']
- case_dict['timeseries_size'] = case_dict['timeseries_size'] + case_dict[comp+'_timeseries_size']
+ tseries_size += raw_size
+ case_dict['timeseries_size'] = bytes_2_human(tseries_size)
# get iconform status = this initializes files in the POSTPROCESS_PATH
case_dict['iconform_status'] = 'Unknown'
@@ -408,32 +479,74 @@ def get_pp_status(case_dict):
iconform_logs = list()
iconform_file_pattern = ("{0}/iconform.log.*".format(pp_log_dir))
iconform_logs = glob.glob(iconform_file_pattern)
- if (iconform_logs):
+ if iconform_logs:
log_file = max(iconform_logs, key=os.path.getctime)
- if (is_last_process_complete(log_file, 'Successfully created the conform tool',
- 'Running createOutputSpecs')):
- case_dict['iconform_status'] = 'Complete'
+ if (is_last_process_complete(log_file, 'Successfully created the conform tool',
+ 'Running createOutputSpecs')):
+ case_dict['iconform_status'] = 'Succeeded'
else:
case_dict['iconform_status'] = 'Started'
# get xconform status
case_dict['xconform_status'] = 'Unknown'
case_dict['xconform_path'] = get_pp_path(pp_dir, 'xconform', subdir=None)
- case_dict['xconform_size'] = get_disk_size(case_dict['xconform_path'])
-
+ (xconform_size, case_dict['xconform_size']) = get_disk_usage(case_dict['xconform_path'])
+ case_dict['xconform_size'] = bytes_2_human(xconform_size)
xconform_logs = list()
xconform_file_pattern = ("{0}/xconform.log.*".format(pp_log_dir))
xconform_logs = glob.glob(xconform_file_pattern)
- if (xconform_logs):
+ if xconform_logs:
log_file = max(xconform_logs, key=os.path.getctime)
- if (is_last_process_complete(log_file, 'Successfully completed converting all files',
- 'cesm_conform_generator INFO')):
- case_dict['xconform_status'] = 'Complete'
+ if (is_last_process_complete(log_file,
+ 'Successfully completed converting all files',
+ 'cesm_conform_generator INFO')):
+ case_dict['xconform_status'] = 'Succeeded'
else:
case_dict['xconform_status'] = 'Started'
return case_dict
+# ---------------------------------------------------------------------
+def get_run_last_date(casename, run_path):
+# ---------------------------------------------------------------------
+ """ get_run_last_date
+ parse the last cpl.r file in the run_path to retrieve that last date.
+
+ Arguments:
+ casename
+ run_path - path to run directory
+ """
+ logger.debug('get_run_last_date')
+
+ pattern = ('{0}.cpl.r.*.nc'.format(casename))
+ cpl_files = sorted(glob.glob(os.path.join(run_path, pattern)))
+
+ if cpl_files:
+ _, cpl_file = os.path.split(cpl_files[-1])
+ fparts = cpl_file.split('.')
+ return fparts[-2]
+
+ return '0000-00-00'
+
+# ---------------------------------------------------------------------
+def get_sta_last_date(sta_path):
+# ---------------------------------------------------------------------
+ """ get_sta_last_date
+ parse the last rest directory in the sta_path to retrieve that last date.
+
+ Arguments:
+ sta_path - path to run directory
+ """
+ logger.debug('get_sta_last_date')
+
+ rest_dirs = sorted(glob.glob(os.path.join(sta_path, 'rest/*')))
+
+ if rest_dirs:
+ _, rest_dir = os.path.split(rest_dirs[-1])
+ return rest_dir
+
+ return '0000-00-00'
+
# ---------------------------------------------------------------------
def get_case_status(case_dict):
# ---------------------------------------------------------------------
@@ -449,29 +562,39 @@ def get_case_status(case_dict):
# initialize status variables
case_dict['run_status'] = 'Unknown'
case_dict['run_path'] = case_dict['RUNDIR']
- case_dict['run_size'] = get_disk_size(case_dict['run_path'])
-
+ case_dict['run_size'] = 0
+ case_dict['run_last_date'] = case_dict['RUN_STARTDATE']
case_dict['sta_status'] = 'Unknown'
case_dict['sta_path'] = case_dict['DOUT_S_ROOT']
case_dict['sta_size'] = 0
- if case_dict['DOUT_S'] == True:
- # get only the history, rest and logs dir - ignoring the proc subdirs
- case_dict['sta_size'] = get_disk_size(case_dict['sta_path'], exclude='proc')
+ case_dict['sta_last_date'] = case_dict['RUN_STARTDATE']
- cs = case_dict['CASEROOT']+'/CaseStatus'
- if os.path.exists(cs):
+ cstatus = case_dict['CASEROOT']+'/CaseStatus'
+ if os.path.exists(cstatus):
# get the run status
- status = is_last_process_complete(cs, "case.run success", "case.run starting")
- case_dict['run_status'] = 'Succeeded'
- if status is False:
- case_dict['run_status'] = 'Unknown'
+ run_status = is_last_process_complete(cstatus, "case.run success", "case.run starting")
+ if run_status is True:
+ case_dict['run_status'] = 'Succeeded'
+ (raw_disk, case_dict['run_size']) = get_disk_usage(case_dict['run_path'])
+ case_dict['run_last_date'] = get_run_last_date(case_dict['CASE'], case_dict['run_path'])
# get the STA status
if case_dict['DOUT_S']:
- status = is_last_process_complete(cs, "st_archive success", "st_archive starting")
- case_dict['sta_status'] = 'Succeeded'
- if status is False:
- case_dict['sta_status'] = 'Unknown'
+ # get only the history, rest and logs dir - ignoring the proc subdirs
+ sta_status = is_last_process_complete(cstatus, "st_archive success",
+ "st_archive starting")
+ if sta_status is True:
+ case_dict['sta_status'] = 'Succeeded'
+ # exclude the proc directories in the sta size estimates
+ case_dict['sta_last_date'] = get_sta_last_date(case_dict['DOUT_S_ROOT'])
+ sta_size = 0
+ for subdir in ['atm/hist', 'esp/hist', 'ice/hist', 'glc/hist',
+ 'lnd/hist', 'logs', 'ocn/hist', 'rest', 'rof/hist',
+ 'wav/hist']:
+ path = os.path.join(case_dict['sta_path'], subdir)
+ (raw_disk, _) = get_disk_usage(path)
+ sta_size += raw_disk
+ case_dict['sta_size'] = bytes_2_human(sta_size)
# check if the postprocess dir exists in the caseroot
case_dict['postprocess'] = False
@@ -497,19 +620,22 @@ def check_expdb_case(case_dict, username, password):
"""
logger.debug('check_expdb_case')
exists = False
- data_dict = {'casename':case_dict['CASE'], 'queryType':'checkCaseExists', 'expType':case_dict['expType']}
+ data_dict = {'casename':case_dict['CASE'],
+ 'queryType':'checkCaseExists',
+ 'expType':case_dict['expType']}
data = json.dumps(data_dict)
params = urllib.urlencode(dict(username=username, password=password, data=data))
try:
- context = ssl._create_unverified_context()
- response = urllib2.urlopen(url=_query_expdb_url, data=params, context=context)
+ response = urllib2.urlopen(url=case_dict['query_expdb_url'], data=params, context=_context)
html = response.read()
if html.find('True'):
exists = True
except urllib2.HTTPError as http_e:
- logger.info('ERROR archive_metadata HTTP post failed "{0} - {1}"'.format(http_e.code, http_e.code))
+ logger.info('ERROR archive_metadata HTTP post failed "%s"', http_e.code)
+ sys.exit(1)
except urllib2.URLError as url_e:
- logger.info('ERROR archive_metadata URL failed "{0}"'.format(url_e.reason))
+ logger.info('ERROR archive_metadata URL failed "%s"', url_e.reason)
+ sys.exit(1)
return exists
@@ -517,8 +643,8 @@ def check_expdb_case(case_dict, username, password):
def query_expdb_cmip6(case_dict, username, password):
# ---------------------------------------------------------------------
""" query_exp_case
- Query the expdb for CMIP6 casename = case_dict['q_casename'] metadata.
- Write out a json file to case_dict['q_outfile'].
+ Query the expdb for CMIP6 casename = case_dict['q_casename'] metadata.
+ Write out a json file to case_dict['q_outfile'].
Arguments:
case_dict (dict) - case dictionary to store XML variables
@@ -528,34 +654,34 @@ def query_expdb_cmip6(case_dict, username, password):
"""
logger.debug('query_expdb_cmip6')
exists = False
- data_dict = {'casename':case_dict['q_casename'], 'queryType':'CMIP6GlobalAtts', 'expType':'CMIP6'}
+ data_dict = {'casename':case_dict['q_casename'],
+ 'queryType':'CMIP6GlobalAtts',
+ 'expType':'CMIP6'}
data = json.dumps(data_dict)
params = urllib.urlencode(dict(username=username, password=password, data=data))
try:
- context = ssl._create_unverified_context()
- response = urllib2.urlopen(url=_query_expdb_url, data=params, context=context)
+ response = urllib2.urlopen(url=case_dict['query_expdb_url'], data=params, context=_context)
output = json.load(response)
- exists = True
except urllib2.HTTPError as http_e:
- logger.info('ERROR archive_metadata HTTP post failed "{0} - {1}"'.format(http_e.code, http_e.code))
+ logger.info('ERROR archive_metadata HTTP post failed "%s"', http_e.code)
except urllib2.URLError as url_e:
- logger.info('ERROR archive_metadata URL failed "{0}"'.format(url_e.reason))
+ logger.info('ERROR archive_metadata URL failed "%s"', url_e.reason)
if output:
if not os.path.exists('{0}/archive_files'.format(case_dict['workdir'])):
os.makedirs('{0}/archive_files'.format(case_dict['workdir']))
filename = '{0}/archive_files/{1}'.format(case_dict['workdir'], case_dict['q_outfile'])
- with io.open(filename, 'w+', encoding='utf-8') as f:
- f.write(json.dumps(output, ensure_ascii=False))
- f.close()
-
+ with io.open(filename, 'w+', encoding='utf-8') as fname:
+ fname.write(json.dumps(output, ensure_ascii=False))
+ fname.close()
+ exists = True
return exists
# ---------------------------------------------------------------------
-def create_JSON(case_dict):
+def create_json(case_dict):
# ---------------------------------------------------------------------
- """ create_JSON
+ """ create_json
Create a JSON file in the caseroot/archive_files dir.
Arguments:
@@ -566,16 +692,17 @@ def create_JSON(case_dict):
if not os.path.exists('{0}/archive_files'.format(case_dict['workdir'])):
os.makedirs('{0}/archive_files'.format(case_dict['workdir']))
- filename = '{0}/archive_files/json.{1}'.format(case_dict['workdir'], _now)
- with io.open(filename, 'w', encoding='utf-8') as f:
- f.write(unicode(json.dumps(case_dict, indent=4, sort_keys=True, ensure_ascii=True)))
- f.close()
+ filename = '{0}/archive_files/json.{1}'.format(case_dict['workdir'],
+ datetime.datetime.now().strftime('%Y%m%d-%H%M%S'))
+ with io.open(filename, 'w', encoding='utf-8') as fname:
+ fname.write(unicode(json.dumps(case_dict, indent=4, sort_keys=True, ensure_ascii=True)))
+ fname.close()
# ---------------------------------------------------------------------
-def post_JSON(case_dict, username, password):
+def post_json(case_dict, username, password):
# ---------------------------------------------------------------------
- """ post_JSON
- Post a JSON file in the caseroot/archive_files to the
+ """ post_json
+ Post a JSON file in the caseroot/archive_files to the
remote expdb URL.
Arguments:
@@ -590,13 +717,12 @@ def post_JSON(case_dict, username, password):
data = json.dumps(case_dict)
params = urllib.urlencode(dict(username=username, password=password, data=data))
try:
- context = ssl._create_unverified_context()
- response = urllib2.urlopen(url=_JSON_expdb_url, data=params, context=context)
+ urllib2.urlopen(url=case_dict['json_expdb_url'], data=params, context=_context)
except urllib2.HTTPError as http_e:
- logger.info('ERROR archive_metadata HTTP post failed "{0} - {1}"'.format(http_e.code, http_e.code))
+ logger.info('ERROR archive_metadata HTTP post failed "%s"', http_e.code)
except urllib2.URLError as url_e:
- logger.info('ERROR archive_metadata URL failed "{0}"'.format(url_e.reason))
-
+ logger.info('ERROR archive_metadata URL failed "%s"', url_e.reason)
+
# ---------------------------------------------------------------------
def check_svn():
# ---------------------------------------------------------------------
@@ -606,21 +732,23 @@ def check_svn():
"""
logger.debug('check_svn')
- cmd = ['svn','--version']
+ cmd = ['svn', '--version']
svn_exists = True
- try:
+ result = ''
+ try:
result = subprocess.check_output(cmd)
- if 'version' not in result:
- msg = 'SVN is not available. Ignoring SVN update'
- raise SVNException(msg)
- svn_exists = False
- except subprocess.CalledProcessError as e:
- msg = _SVNError_template.substitute(function='check_svn',cmd=cmd,
- error=e.returncode, strerror=e.output)
+ except subprocess.CalledProcessError as error:
+ msg = _svn_error_template.substitute(function='check_svn', cmd=cmd,
+ error=error.returncode, strerror=error.output)
+ svn_exists = False
logger.info(msg)
raise SVNException(msg)
+
+ if 'version' not in result:
+ msg = 'SVN is not available. Ignoring SVN update'
svn_exists = False
-
+ raise SVNException(msg)
+
return svn_exists
# ---------------------------------------------------------------------
@@ -630,43 +758,17 @@ def create_temp_archive(case_dict):
Create a temporary SVN sandbox directory in the current caseroot
"""
+ archive_temp_dir = '{0}/archive_temp_dir'.format(case_dict['workdir'])
+ logger.debug('create_temp_archive %s', archive_temp_dir)
- c = datetime.datetime.now()
- temp_archive_dir = '{0}/temp_archive_dir_{1}{2}{3}_{4}{5}'.format(case_dict['workdir'], c.year,
- str(c.month).zfill(2),
- str(c.day).zfill(2),
- str(c.hour).zfill(2),
- str(c.minute).zfill(2))
-
- logger.debug('create_temp_archive %s',temp_archive_dir)
-
- if not os.path.exists(temp_archive_dir):
- os.makedirs(temp_archive_dir)
-
- return temp_archive_dir
-
-# ---------------------------------------------------------------------
-def get_PBS_files(case_dict):
-# ---------------------------------------------------------------------
- """ return a list of PBS output files from a CESM case
- """
- logger.debug('get_PBS_files')
-
- files = list()
- tmp_files = list()
-
- os.chdir(case_dict['CASEROOT'])
- patterns = ['{0}.run.o*'.format(case_dict['CASE']),
- '{0}.run.e*'.format(case_dict['CASE']),
- '{0}.st_archive.o*'.format(case_dict['CASE']),
- '{0}.st_archive.e*'.format(case_dict['CASE']) ]
+ if not os.path.exists(archive_temp_dir):
+ os.makedirs(archive_temp_dir)
+ else:
+ logger.info('ERROR archive_metadata archive_temp_dir already exists. exiting...')
+ sys.exit(1)
- for pattern in patterns:
- tmp_files = list()
- tmp_files = glob.glob(pattern)
- files.append(tmp_files)
+ return archive_temp_dir
- return files
# ---------------------------------------------------------------------
def check_svn_repo(case_dict, username, password):
@@ -679,15 +781,17 @@ def check_svn_repo(case_dict, username, password):
repo_exists = False
svn_repo = '{0}/trunk'.format(case_dict['svn_repo_url'])
- cmd = ['svn','list', svn_repo, '--username', username, '--password', password]
+ cmd = ['svn', 'list', svn_repo, '--username', username, '--password', password]
+ result = ''
try:
result = subprocess.check_output(cmd)
- if re.search('README.archive', result):
- repo_exists = True
- except subprocess.CalledProcessError as e:
+ except subprocess.CalledProcessError:
msg = 'SVN repo does not exist for this case. A new one will be created.'
logger.warning(msg)
+ if re.search('README.archive', result):
+ repo_exists = True
+
return repo_exists
# ---------------------------------------------------------------------
@@ -701,17 +805,21 @@ def get_trunk_tag(case_dict, username, password):
tag = 0
svn_repo = '{0}/trunk_tags'.format(case_dict['svn_repo_url'])
- cmd = ['svn','list', svn_repo, '--username', username, '--password', password]
- try:
+ cmd = ['svn', 'list', svn_repo, '--username', username, '--password', password]
+ result = ''
+ try:
result = subprocess.check_output(cmd)
+ except subprocess.CalledProcessError as error:
+ cmd_nopasswd = ['svn', 'list', svn_repo, '--username', username, '--password', '******']
+ msg = _call_template.substitute(function='get_trunk_tag', cmd=cmd_nopasswd,
+ error=error.returncode, strerror=error.output)
+ logger.warning(msg)
+ raise SVNException(msg)
+
+ if result:
last_tag = [i for i in result.split('\n') if i][-1]
last_tag = last_tag[:-1].split('_')[-1]
tag = int(last_tag.strip('0'))
- except subprocess.CalledProcessError as e:
- msg = _call_template.substitute(function='get_trunk_tag',cmd=cmd,
- error=e.returncode, strerror=e.output)
- logger.warning(msg)
- raise SVNException(msg)
return tag
@@ -726,12 +834,13 @@ def checkout_repo(case_dict, username, password):
os.chdir(case_dict['archive_temp_dir'])
svn_repo = '{0}/trunk'.format(case_dict['svn_repo_url'])
- cmd = ['svn', 'co', '--username', username,'--password', password, svn_repo, '.']
+ cmd = ['svn', 'co', '--username', username, '--password', password, svn_repo, '.']
try:
- result = subprocess.check_call(cmd)
- except subprocess.CalledProcessError as e:
- msg = _call_template.substitute(function='checkout_repo',cmd=cmd,
- error=e.returncode, strerror=e.output)
+ subprocess.check_call(cmd)
+ except subprocess.CalledProcessError as error:
+ cmd_nopasswd = ['svn', 'co', '--username', username, '--password', '******', svn_repo, '.']
+ msg = _call_template.substitute(function='checkout_repo', cmd=cmd_nopasswd,
+ error=error.returncode, strerror=error.output)
logger.warning(msg)
raise SVNException(msg)
@@ -745,74 +854,86 @@ def create_readme(case_dict):
Create a generic README.archive file
"""
logger.debug('create_readme')
-
os.chdir(case_dict['archive_temp_dir'])
-
- f = open('README.archive','w')
- f.write('Archived metadata is available for this case at URL:\n')
- f.write(_expdb_url)
- f.close()
+
+ fname = open('README.archive', 'w')
+ fname.write('Archived metadata is available for this case at URL:\n')
+ fname.write(case_dict['base_expdb_url'])
+ fname.close()
# ---------------------------------------------------------------------
def update_repo_add_file(filename, dir1, dir2):
# ---------------------------------------------------------------------
+ """ update_repo_add_file
+
+ Add a file to the SVN repository
+ """
src = os.path.join(dir1, filename)
dest = os.path.join(dir2, filename)
logger.debug('left_only: '+src+' -> '+dest)
shutil.copy2(src, dest)
cmd = ['svn', 'add', dest]
try:
- result = subprocess.check_call(cmd)
- except subprocess.CalledProcessError as e:
- msg = _call_template.substitute(function='update_lcoal_repo',cmd=cmd,
- error=e.returncode, strerror=e.output)
+ subprocess.check_call(cmd)
+ except subprocess.CalledProcessError as error:
+ msg = _call_template.substitute(function='update_lcoal_repo', cmd=cmd,
+ error=error.returncode, strerror=error.output)
logger.warning(msg)
raise SVNException(msg)
# ---------------------------------------------------------------------
def update_repo_rm_file(filename, dir1, dir2):
# ---------------------------------------------------------------------
+ """ update_repo_rm_file
+
+ Remove a file from the SVN repository
+ """
src = os.path.join(dir2, filename)
dest = os.path.join(dir1, filename)
logger.debug('right_only: '+src+' -> '+dest)
if os.path.exists(dest):
cmd = ['svn', 'rm', dest]
try:
- result = subprocess.check_call(cmd)
- except subprocess.CalledProcessError as e:
- msg = _call_template.substitute(function='update_lcoal_repo',cmd=cmd,
- error=e.returncode, strerror=e.output)
+ subprocess.check_call(cmd)
+ except subprocess.CalledProcessError as error:
+ msg = _call_template.substitute(function='update_lcoal_repo', cmd=cmd,
+ error=error.returncode, strerror=error.output)
logger.warning(msg)
raise SVNException(msg)
# ---------------------------------------------------------------------
def update_repo_copy_file(filename, dir1, dir2):
# ---------------------------------------------------------------------
+ """ update_repo_copy_file
+
+ Copy a file into the SVN local repo
+ """
src = os.path.join(dir1, filename)
dest = os.path.join(dir2, filename)
shutil.copy2(src, dest)
# ---------------------------------------------------------------------
-def compare_dir_trees(dir1, dir2, exclude_files, exclude_dirs):
+def compare_dir_trees(dir1, dir2, archive_list):
# ---------------------------------------------------------------------
- """
+ """ compare_dir_trees
+
Compare two directories recursively. Files in each directory are
assumed to be equal if their names and contents are equal.
"""
- dirs_cmp = filecmp.dircmp(dir1, dir2, exclude_files)
-
- tmp_lf = [fn for fn in dirs_cmp.left_only if fn not in exclude_dirs]
- lf = [fn for fn in tmp_lf if fn not in exclude_files]
-
- tmp_rf = [fn for fn in dirs_cmp.right_only if fn not in exclude_dirs]
- rf = [fn for fn in tmp_rf if fn not in exclude_files]
+ xml_files = glob.glob(os.path.join(dir1, '*.xml'))
+ user_nl_files = glob.glob(os.path.join(dir1, 'user_nl_*'))
+ dirs_cmp = filecmp.dircmp(dir1, dir2, _ignore_patterns)
- tmp_ff = [fn for fn in dirs_cmp.funny_files if fn not in exclude_dirs]
- ff = [fn for fn in tmp_ff if fn not in exclude_files]
+ left_only = [fn for fn in dirs_cmp.left_only if not os.path.islink(fn)
+ and (fn in xml_files or fn in user_nl_files or fn in archive_list)]
+ right_only = [fn for fn in dirs_cmp.right_only if not os.path.islink(fn)
+ and (fn in xml_files or fn in user_nl_files or fn in archive_list)]
+ funny_files = [fn for fn in dirs_cmp.funny_files if not os.path.islink(fn)
+ and (fn in xml_files or fn in user_nl_files or fn in archive_list)]
# files and directories need to be added to svn repo from the caseroot
- if len(lf)>0:
- for filename in lf:
+ if left_only:
+ for filename in left_only:
if os.path.isfile(os.path.join(dir1, filename)) and filename[-1] != '~':
update_repo_add_file(filename, dir1, dir2)
else:
@@ -821,198 +942,246 @@ def compare_dir_trees(dir1, dir2, exclude_files, exclude_dirs):
os.makedirs(new_dir2)
cmd = ['svn', 'add', '--depth=empty', new_dir2]
try:
- result = subprocess.check_call(cmd)
- except subprocess.CalledProcessError as e:
- msg = _call_template.substitute(function='update_lcoal_repo',cmd=cmd,
- error=e.returncode, strerror=e.output)
+ subprocess.check_call(cmd)
+ except subprocess.CalledProcessError as error:
+ msg = _call_template.substitute(function='update_lcoal_repo', cmd=cmd,
+ error=error.returncode, strerror=error.output)
logger.warning(msg)
raise SVNException(msg)
# recurse through this new subdir
- compare_dir_trees(new_dir1, new_dir2, exclude_files, exclude_dirs)
+ archive_list = archive_list + filename
+ compare_dir_trees(new_dir1, new_dir2, archive_list)
# files need to be removed from svn repo that are no longer in the caseroot
- if len(rf)>0:
- for filename in rf:
+ if right_only:
+ for filename in right_only:
if os.path.isfile(os.path.join(dir1, filename)) and filename[-1] != '~':
update_repo_rm_file(filename, dir1, dir2)
# files are the same but could not be compared so copy the caseroot version
- if len(ff)>0:
- for filename in ff:
+ if funny_files:
+ for filename in funny_files:
if os.path.isfile(os.path.join(dir1, filename)) and filename[-1] != '~':
update_repo_copy_file(filename, dir1, dir2)
# common files have changed in the caseroot and need to be copied to the svn repo
- (_, mismatch, errors) = filecmp.cmpfiles(
+ (_, mismatch, errors) = filecmp.cmpfiles(
dir1, dir2, dirs_cmp.common_files, shallow=False)
- if len(mismatch)>0:
- for filename in mismatch :
+ if mismatch:
+ for filename in mismatch:
if os.path.isfile(os.path.join(dir1, filename)) and filename[-1] != '~':
update_repo_copy_file(filename, dir1, dir2)
# error in file comparison so copy the caseroot file to the svn repo
- if len(errors)>0:
- for filename in errors :
+ if errors:
+ for filename in errors:
if os.path.isfile(os.path.join(dir1, filename)) and filename[-1] != '~':
update_repo_copy_file(filename, dir1, dir2)
# recurse through the subdirs
common_dirs = dirs_cmp.common_dirs
- if len(common_dirs)>0:
+ if common_dirs:
for common_dir in common_dirs:
- if common_dir not in exclude_dirs:
+ if common_dir in archive_list:
new_dir1 = os.path.join(dir1, common_dir)
new_dir2 = os.path.join(dir2, common_dir)
- compare_dir_trees(new_dir1, new_dir2, exclude_files, exclude_dirs)
+ compare_dir_trees(new_dir1, new_dir2, archive_list)
else:
return
# ---------------------------------------------------------------------
-def update_local_repo(case_dict, ignoreLogs):
+def update_local_repo(case_dict, ignore_logs, ignore_timing):
# ---------------------------------------------------------------------
""" update_local_repo
-
+
Compare and update local SVN sandbox
- """
+ """
logger.debug('update_local_repo')
-
- archive_temp_dir = os.path.basename(os.path.normpath(case_dict['archive_temp_dir']))
from_dir = case_dict['CASEROOT']
to_dir = case_dict['archive_temp_dir']
- # append the archive_temp_dir to the exclude dirs list
- exclude_dirs = _EXCLUDE_DIRS
- exclude_dirs.append(archive_temp_dir)
-
- # append all the PBS output files to the exclude files list
- pbs_files = get_PBS_files(case_dict)
- exclude_files = _EXCLUDE_FILES + pbs_files
-
- compare_dir_trees(from_dir, to_dir, exclude_files, exclude_dirs)
+ compare_dir_trees(from_dir, to_dir, case_dict['archive_list'])
- # check if ignoreLogs is specified
- if ignoreLogs:
+ # check if ignore_logs is specified
+ if ignore_logs:
os.chdir(case_dict['archive_temp_dir'])
if os.path.isdir('./logs'):
try:
shutil.rmtree('./logs')
- except OSError as e:
+ except OSError:
logger.warning('in "update_local_repo" - Unable to remove "logs" in archive dir.')
cmd = ['svn', 'delete', './logs']
try:
- result = subprocess.check_call(cmd)
- except subprocess.CalledProcessError as e:
- msg = _call_template.substitute(function='update_lcoal_repo',cmd=cmd,
- error=e.returncode, strerror=e.output)
+ subprocess.check_call(cmd)
+ except subprocess.CalledProcessError as error:
+ msg = _call_template.substitute(function='update_lcoal_repo', cmd=cmd,
+ error=error.returncode, strerror=error.output)
logger.warning(msg)
raise SVNException(msg)
-
+
if os.path.isdir('./postprocess/logs'):
os.chdir('./postprocess')
try:
shutil.rmtree('./logs')
- except OSError as e:
- logger.warning('in "update_local_repo" - Unable to remove "postprocess/logs" in archive dir.')
+ except OSError:
+ logger.warning('in "update_local_repo" - '\
+ 'Unable to remove "postprocess/logs" in archive dir.')
cmd = ['svn', 'delete', './logs']
try:
- result = subprocess.check_call(cmd)
- except subprocess.CalledProcessError as e:
- msg = _call_template.substitute(function='update_lcoal_repo',cmd=cmd,
- error=e.returncode, strerror=e.output)
+ subprocess.check_call(cmd)
+ except subprocess.CalledProcessError as error:
+ msg = _call_template.substitute(function='update_lcoal_repo', cmd=cmd,
+ error=error.returncode, strerror=error.output)
+ logger.warning(msg)
+ raise SVNException(msg)
+
+ # check if ignore_timing is specified
+ if ignore_timing:
+ os.chdir(case_dict['archive_temp_dir'])
+ if os.path.isdir('./timing'):
+ try:
+ shutil.rmtree('./timing')
+ except OSError:
+ logger.warning('in "update_local_repo" - Unable to remove "timing" in archive dir.')
+
+ cmd = ['svn', 'delete', './timing']
+ try:
+ subprocess.check_call(cmd)
+ except subprocess.CalledProcessError as error:
+ msg = _call_template.substitute(function='update_lcoal_repo', cmd=cmd,
+ error=error.returncode, strerror=error.output)
logger.warning(msg)
raise SVNException(msg)
# ---------------------------------------------------------------------
-def populate_local_repo(case_dict, ignoreLogs):
+def populate_local_repo(case_dict, ignore_logs, ignore_timing):
# ---------------------------------------------------------------------
""" populate_local_repo
Populate local SVN sandbox
"""
logger.debug('populate_local_repo')
-
os.chdir(case_dict['CASEROOT'])
- # loop through the _archive_list and copy to the temp archive dir
- for archive in _archive_list:
+ # loop through the archive_list and copy to the temp archive dir
+ for archive in case_dict['archive_list']:
if os.path.exists(archive):
if os.path.isdir(archive):
try:
target = case_dict['archive_temp_dir']+'/'+archive
- shutil.copytree(archive, target, symlinks=False,
- ignore=shutil.ignore_patterns(*_IGNORE_PATTERNS))
- except OSError as e:
+ shutil.copytree(archive, target, symlinks=False,
+ ignore=shutil.ignore_patterns(*_ignore_patterns))
+ except OSError as error:
msg = _copy_template.substitute(function='populate_local_repo',
- source=archive, dest=case_dict['archive_temp_dir'],
- error=e.errno, strerror=e.strerror)
+ source=archive,
+ dest=case_dict['archive_temp_dir'],
+ error=error.errno,
+ strerror=error.strerror)
logger.warning(msg)
else:
try:
shutil.copy2(archive, case_dict['archive_temp_dir'])
- except OSError as e:
+ except OSError as error:
msg = _copy_template.substitute(function='populate_local_repo',
- source=archive, dest=case_dict['archive_temp_dir'],
- error=e.errno, strerror=e.strerror)
+ source=archive,
+ dest=case_dict['archive_temp_dir'],
+ error=error.errno,
+ strerror=error.strerror)
logger.warning(msg)
- # add files with .xml as the prefix
+ # add files with .xml as the suffix
xml_files = glob.glob('*.xml')
for xml_file in xml_files:
if os.path.isfile(xml_file):
try:
shutil.copy2(xml_file, case_dict['archive_temp_dir'])
- except OSError as e:
+ except OSError as error:
msg = _copy_template.substitute(function='populate_local_repo',
- source=xml_file, dest=case_dict['archive_temp_dir'],
- error=e.errno, strerror=e.strerror)
+ source=xml_file,
+ dest=case_dict['archive_temp_dir'],
+ error=error.errno,
+ strerror=error.strerror)
logger.warning(msg)
-
- # add files with user_ as the suffix
- user_files = glob.glob('user_*')
+
+ # add files with .xml as the suffix from the postprocess directory
+ if os.path.isdir('./postprocess'):
+ pp_path = '{0}/{1}'.format(case_dict['archive_temp_dir'], 'postprocess')
+ if not os.path.exists(pp_path):
+ os.mkdir(pp_path)
+ xml_files = glob.glob('./postprocess/*.xml')
+ for xml_file in xml_files:
+ if os.path.isfile(xml_file):
+ try:
+ shutil.copy2(xml_file, pp_path)
+ except OSError as error:
+ msg = _copy_template.substitute(function='populate_local_repo',
+ source=xml_file,
+ dest=case_dict['archive_temp_dir'],
+ error=error.errno,
+ strerror=error.strerror)
+ logger.warning(msg)
+
+ # add files with user_nl_ as the prefix
+ user_files = glob.glob('user_nl_*')
for user_file in user_files:
if os.path.isfile(user_file):
try:
shutil.copy2(user_file, case_dict['archive_temp_dir'])
- except OSError as e:
+ except OSError as error:
msg = _copy_template.substitute(function='populate_local_repo',
- source=user_file, dest=case_dict['archive_temp_dir'],
- error=e.errno, strerror=e.strerror)
+ source=user_file,
+ dest=case_dict['archive_temp_dir'],
+ error=error.errno,
+ strerror=error.strerror)
logger.warning(msg)
-
- # add files with Depends as the suffix
+ # add files with Depends as the prefix
conf_files = glob.glob('Depends.*')
for conf_file in conf_files:
if os.path.isfile(conf_file):
try:
shutil.copy2(conf_file, case_dict['archive_temp_dir'])
- except OSError as e:
+ except OSError as error:
msg = _copy_template.substitute(function='populate_local_repo',
- source=conf_file, dest=case_dict['archive_temp_dir'],
- error=e.errno, strerror=e.strerror)
+ source=conf_file,
+ dest=case_dict['archive_temp_dir'],
+ error=error.errno,
+ strerror=error.strerror)
logger.warning(msg)
- # check if ignoreLogs is specified
- if ignoreLogs:
+ # check if ignore_logs is specified
+ if ignore_logs:
os.chdir(case_dict['archive_temp_dir'])
if os.path.isdir('./logs'):
try:
shutil.rmtree('./logs')
- except OSError as e:
- logger.warning('in "populate_local_repo" - Unable to remove "logs" in archive dir.')
-
+ except OSError:
+ logger.warning('in "populate_local_repo" - Unable to remove "logs" in archive_temp_dir.')
if os.path.isdir('./postprocess/logs'):
os.chdir('./postprocess')
try:
shutil.rmtree('./logs')
- except OSError as e:
- logger.warning('in "populate_local_repo" - Unable to remove "postprocess/logs" in archive dir.')
+ except OSError:
+ logger.warning('in "populate_local_repo" - ' \
+ 'Unable to remove "postprocess/logs" in archive_temp_dir.')
+ os.chdir(case_dict['CASEROOT'])
+
+ # check if ignore_timing is specified
+ if ignore_timing:
+ os.chdir(case_dict['archive_temp_dir'])
+ if os.path.isdir('./timing'):
+ try:
+ shutil.rmtree('./timing')
+ except OSError:
+ logger.warning('in "populate_local_repo" - Unable to remove "timing" in archive_temp_dir.')
+ os.chdir(case_dict['CASEROOT'])
+
# ---------------------------------------------------------------------
-def checkin_trunk(case_dict, svn_cmd, message, username, password):
+def checkin_trunk(case_dict, svn_cmd, message, username, password):
# ---------------------------------------------------------------------
""" checkin_trunk
@@ -1023,41 +1192,52 @@ def checkin_trunk(case_dict, svn_cmd, message, username, password):
os.chdir(case_dict['archive_temp_dir'])
svn_repo = '{0}/trunk'.format(case_dict['svn_repo_url'])
msg = '"{0}"'.format(message)
- cmd = ['svn', svn_cmd, '--username', username, '--password', password, '.', '--message', msg]
+ cmd = ['svn', svn_cmd, '--username', username,
+ '--password', password, '.', '--message', msg]
if svn_cmd in ['import']:
# create the trunk dir
msg = '"create trunk"'
- cmd = ['svn', 'mkdir', '--parents', svn_repo, '--username', username, '--password', password, '--message', msg]
+ cmd = ['svn', 'mkdir', '--parents', svn_repo,
+ '--username', username, '--password', password, '--message', msg]
try:
- result = subprocess.check_call(cmd)
- except subprocess.CalledProcessError as e:
- msg = _call_template.substitute(function='checkin_trunk',cmd=cmd,
- error=e.returncode, strerror=e.output)
+ subprocess.check_call(cmd)
+ except subprocess.CalledProcessError as error:
+ cmd_nopasswd = ['svn', 'mkdir', '--parents', svn_repo,
+ '--username', username, '--password', '******',
+ '--message', msg]
+ msg = _call_template.substitute(function='checkin_trunk', cmd=cmd_nopasswd,
+ error=error.returncode, strerror=error.output)
logger.warning(msg)
raise SVNException(msg)
# create the trunk_tags dir
tags = '{0}/trunk_tags'.format(case_dict['svn_repo_url'])
msg = '"create trunk_tags"'
- cmd = ['svn', 'mkdir', tags, '--username', username, '--password', password, '--message', msg]
+ cmd = ['svn', 'mkdir', tags, '--username', username,
+ '--password', password, '--message', msg]
try:
- result = subprocess.check_call(cmd)
- except subprocess.CalledProcessError as e:
- msg = _call_template.substitute(function='checkin_trunk',cmd=cmd,
- error=e.returncode, strerror=e.output)
+ subprocess.check_call(cmd)
+ except subprocess.CalledProcessError as error:
+ cmd_nopasswd = ['svn', 'mkdir', tags, '--username', username,
+ '--password', '******', '--message', msg]
+ msg = _call_template.substitute(function='checkin_trunk', cmd=cmd_nopasswd,
+ error=error.returncode, strerror=error.output)
logger.warning(msg)
raise SVNException(msg)
msg = '"{0}"'.format(message)
- cmd = ['svn', svn_cmd, '--username', username, '--password', password, '.', svn_repo, '--message', msg]
+ cmd = ['svn', svn_cmd, '--username', username, '--password', password, '.',
+ svn_repo, '--message', msg]
# check-in the trunk to svn
try:
- result = subprocess.check_call(cmd)
- except subprocess.CalledProcessError as e:
- msg = _call_template.substitute(function='checkin_trunk',cmd=cmd,
- error=e.returncode, strerror=e.output)
+ subprocess.check_call(cmd)
+ except subprocess.CalledProcessError as error:
+ cmd_nopasswd = ['svn', svn_cmd, '--username', username,
+ '--password', '******', '.', '--message', msg]
+ msg = _call_template.substitute(function='checkin_trunk', cmd=cmd_nopasswd,
+ error=error.returncode, strerror=error.output)
logger.warning(msg)
raise SVNException(msg)
@@ -1073,20 +1253,23 @@ def create_tag(case_dict, new_tag, username, password):
# create a new trunk tag
os.chdir(case_dict['archive_temp_dir'])
svn_repo = '{0}/trunk'.format(case_dict['svn_repo_url'])
- svn_repo_tag = '{0}/trunk_tags/{1}'.format(case_dict['svn_repo_url'],new_tag)
+ svn_repo_tag = '{0}/trunk_tags/{1}'.format(case_dict['svn_repo_url'], new_tag)
msg = '"create new trunk tag"'
- cmd = ['svn', 'copy', '--username', username, '--password', password, svn_repo, svn_repo_tag, '--message', msg]
+ cmd = ['svn', 'copy', '--username', username, '--password', password,
+ svn_repo, svn_repo_tag, '--message', msg]
try:
- result = subprocess.check_call(cmd)
- except subprocess.CalledProcessError as e:
- msg = _call_template.substitute(function='checkin_trunk',cmd=cmd,
- error=e.returncode, strerror=e.output)
+ subprocess.check_call(cmd)
+ except subprocess.CalledProcessError as error:
+ cmd_nopasswd = ['svn', 'copy', '--username', username, '--password', '******',
+ svn_repo, svn_repo_tag, '--message', msg]
+ msg = _call_template.substitute(function='checkin_trunk', cmd=cmd_nopasswd,
+ error=error.returncode, strerror=error.output)
logger.warning(msg)
raise SVNException(msg)
-# ---------------------------------------------------------------------
-def update_repo(ignoreLogs, case_dict, username, password):
-# ---------------------------------------------------------------------
+# -------------------------------------------------------------------------
+def update_repo(ignore_logs, ignore_timing, case_dict, username, password):
+# -------------------------------------------------------------------------
""" update_repo
Update SVN repo
@@ -1099,33 +1282,35 @@ def update_repo(ignoreLogs, case_dict, username, password):
if svn_exists:
# check if the case repo exists
- case_dict['svn_repo_url'] = '{0}/{1}'.format(_SVN_expdb_url, case_dict['CASE'])
+ case_dict['svn_repo_url'] = '{0}/{1}'.format(_svn_expdb_url, case_dict['CASE'])
repo_exists = check_svn_repo(case_dict, username, password)
case_dict['archive_temp_dir'] = create_temp_archive(case_dict)
+ case_dict['archive_list'] = _archive_list + case_dict['user_add_files']
if repo_exists:
- # need to update trunk and make a new tag
+ # update trunk and make a new tag
last_tag = get_trunk_tag(case_dict, username, password)
- new_tag = '{0}_{1}'.format(case_dict['CASE'],str(last_tag+1).zfill(4))
+ new_tag = '{0}_{1}'.format(case_dict['CASE'], str(last_tag+1).zfill(4))
checkout_repo(case_dict, username, password)
- update_local_repo(case_dict, ignoreLogs)
- msg = 'update case metadata for {0} by {1}'.format(case_dict['CASE'],username)
+ update_local_repo(case_dict, ignore_logs, ignore_timing)
+ msg = 'update case metadata for {0} by {1}'.format(case_dict['CASE'], username)
checkin_trunk(case_dict, 'ci', msg, username, password)
create_tag(case_dict, new_tag, username, password)
logger.info('SVN repository trunk updated at URL "%s"', case_dict['svn_repo_url'])
- logger.info(' and a new trunk tag created for "%s"', new_tag)
+ logger.info(' and a new trunk tag created "%s"', new_tag)
else:
# create a new case repo
new_tag = '{0}_0001'.format(case_dict['CASE'])
create_readme(case_dict)
- populate_local_repo(case_dict, ignoreLogs)
- msg = 'initial import of case metadata for {0} by {1}'.format(case_dict['CASE'],username)
+ populate_local_repo(case_dict, ignore_logs, ignore_timing)
+ msg = ('initial import of case metadata for {0} by {1}'
+ .format(case_dict['CASE'], username))
checkin_trunk(case_dict, 'import', msg, username, password)
create_tag(case_dict, new_tag, username, password)
logger.info('SVN repository imported to trunk URL "%s"', case_dict['svn_repo_url'])
logger.info(' and a new trunk tag created for "%s"', new_tag)
- except SVNException as e:
+ except SVNException:
pass
return case_dict
@@ -1151,16 +1336,17 @@ def get_timing_data(case_dict):
if os.path.exists(timing_dir):
# check if timing files exists
timing_file_pattern = 'cesm_timing.'+case_dict['CASE']
- last_time = max(glob.glob(timing_dir+'/'+timing_file_pattern+'.*'), key=os.path.getctime)
- if len(last_time) > 0:
+ last_time = max(glob.glob(timing_dir+'/'+timing_file_pattern+'.*'),
+ key=os.path.getctime)
+ if last_time:
if 'gz' in last_time:
# gunzip file first
- with gzip.open(last_time, 'rb') as f:
- file_content = f.readlines()
+ with gzip.open(last_time, 'rb') as fname:
+ file_content = fname.readlines()
else:
- with open(last_time, 'r') as f:
- file_content = f.readlines()
-
+ with open(last_time, 'r') as fname:
+ file_content = fname.readlines()
+
# search the file content for matching lines
model_cost = [line for line in file_content if 'Model Cost:' in line]
model_throughput = [line for line in file_content if 'Model Throughput:' in line]
@@ -1211,34 +1397,44 @@ def initialize_main(options):
case_dict['archive_temp_dir'] = ''
+ case_dict['user_add_files'] = list()
+ if options.user_add_files:
+ case_dict['user_add_files'] = options.user_add_files.split(',')
+
case_dict['q_casename'] = ''
case_dict['q_outfile'] = ''
if options.query_cmip6:
case_dict['q_casename'] = options.query_cmip6[0]
case_dict['q_outfile'] = options.query_cmip6[1]
+ case_dict['base_expdb_url'] = 'https://csegweb.cgd.ucar.edu/expdb2.0'
+ if options.test_post:
+ case_dict['base_expdb_url'] = 'https://csegwebdev.cgd.ucar.edu/expdb2.0'
+ case_dict['json_expdb_url'] = case_dict['base_expdb_url'] + '/cgi-bin/processJSON.cgi'
+ case_dict['query_expdb_url'] = case_dict['base_expdb_url'] + '/cgi-bin/query.cgi'
+
return case_dict, username, password
# ---------------------------------------------------------------------
-def main(options):
+def main_func(options):
# ---------------------------------------------------------------------
- """ main
+ """ main function
Arguments:
options (list) - input options from command line
"""
- logger.debug('main')
+ logger.debug('main_func')
(case_dict, username, password) = initialize_main(options)
- # loop through the _XML_vars gathering values
+ # loop through the _xml_vars gathering values
with Case(case_dict['CASEROOT'], read_only=True) as case:
if case_dict['dryrun']:
logger.info('Dryrun - calling get_case_vars')
else:
case_dict = get_case_vars(case_dict, case)
- # get the case status into the case_dict
+ # get the case status into the case_dict
if case_dict['dryrun']:
logger.info('Dryrun - calling get_case_status')
else:
@@ -1250,77 +1446,77 @@ def main(options):
logger.info('Dryrun - calling query_expdb_cmip6 for case metadata')
else:
if query_expdb_cmip6(case_dict, username, password):
- logger.info('{0} experiments database CMIP6 global attribute metadata written to {1}'.format(case_dict['q_casename'], case_dict['q_outfile']))
+ logger.info('Casename "%s" CMIP6 global attribute '\
+ 'metadata written to "./archive_files/%s" ' \
+ 'from "%s"',
+ case_dict['q_casename'], case_dict['q_outfile'],
+ case_dict['query_expdb_url'])
logger.info('Successful completion of archive_metadata')
sys.exit(0)
else:
- logger.info('ERROR archive_metadata failed to find {0} in experiments database.'.format(case_dict['q_casename']))
+ logger.info('ERROR archive_metadata failed to find "%s" '\
+ 'in experiments database at "%s".',
+ case_dict['q_casename'], case_dict['query_expdb_url'])
sys.exit(1)
-
- # check that the casename is reserved in the expdb
- # for CMIP6 experiments
- if (case_dict['expType'].lower() == 'cmip6'):
+
+ # check reserved casename expdb for CMIP6 experiments
+ if case_dict['expType'].lower() == 'cmip6':
if case_dict['dryrun']:
logger.info('Dryrun - calling check_expdb_case for CMIP6 experiment reservation')
else:
if not check_expdb_case(case_dict, username, password):
- logger.info('Unable to archive CMIP6 metadata. '
- '"%s" casename does not exist in database. '
- 'All CMIP6 experiments casenames must be '
- 'reserved in the experiments database at URL: '
- 'https://csegweb.cgd.ucar.edu/expdb2.0 '
- 'prior to running archive_metadata.',case_dict['CASE'])
+ logger.info('Unable to archive CMIP6 metadata. '\
+ '"%s" casename does not exist in database. '\
+ 'All CMIP6 experiments casenames must be '\
+ 'reserved in the experiments database at URL: '\
+ 'https://csegweb.cgd.ucar.edu/expdb2.0 '\
+ 'prior to running archive_metadata.', case_dict['CASE'])
sys.exit(1)
# create / update the cesm expdb repo with the caseroot files
- if not options.ignoreRepoUpdate:
+ if not options.ignore_repo_update:
if case_dict['dryrun']:
logger.info('Dryrun - calling update_repo')
else:
- case_dict = update_repo(options.ignoreLogs, case_dict,
- username, password)
+ case_dict = update_repo(options.ignore_logs, options.ignore_timing,
+ case_dict, username, password)
# parse the timing data into the case_dict
- if not options.ignoreTiming:
+ if not options.ignore_timing:
if case_dict['dryrun']:
logger.info('Dryrun - calling get_timing_data')
else:
case_dict = get_timing_data(case_dict)
- # create a JSON file containing the case_dict with the date appended to the filename
+ # Create a JSON file containing the case_dict with the date appended to the filename
if case_dict['dryrun']:
- logger.info('Dryrun - calling create_JSON')
+ logger.info('Dryrun - calling create_json')
else:
- create_JSON(case_dict)
+ create_json(case_dict)
# post the JSON to the remote DB
if case_dict['dryrun']:
- logger.info('Dryrun - calling post_JSON')
+ logger.info('Dryrun - calling post_json')
else:
- post_JSON(case_dict, username, password)
+ post_json(case_dict, username, password)
# clean-up the temporary archive files dir
if case_dict['dryrun']:
- logger.info('Dryrun - removing temporary directory')
+ logger.info('Dryrun - deleting "./archive_temp_dir"')
else:
- if not options.ignoreRepoUpdate and os.path.exists(case_dict['archive_temp_dir']):
+ if not options.ignore_repo_update and os.path.exists(case_dict['archive_temp_dir']):
shutil.rmtree(case_dict['archive_temp_dir'])
logger.info('Successful completion of archive_metadata')
+ return 0
-#===================================
+#===================================
if __name__ == "__main__":
- if ("--test" in sys.argv):
- test_results = doctest.testmod(verbose=True)
- sys.exit(1 if test_results.failed > 0 else 0)
-
- options = commandline_options(sys.argv)
try:
- status = main(options)
- sys.exit(status)
+ __status__ = main_func(commandline_options(sys.argv))
+ sys.exit(__status__)
except Exception as error:
- print(str(error))
+ print str(error)
sys.exit(1)
-
diff --git a/scripts/create_newcase b/scripts/create_newcase
index f35b36112..7121f6bb7 100755
--- a/scripts/create_newcase
+++ b/scripts/create_newcase
@@ -208,7 +208,7 @@ def _main_func(description):
case.create(casename, srcroot, compset, grid, user_mods_dir=user_mods_dir,
machine_name=machine, project=project,
pecount=pecount, compiler=compiler, mpilib=mpilib,
- pesfile=pesfile,user_grid=user_grid, gridfile=gridfile,
+ pesfile=pesfile,gridfile=gridfile,
multi_driver=multi_driver, ninst=ninst, test=test,
walltime=walltime, queue=queue, output_root=output_root,
run_unsupported=run_unsupported, answer=answer,
diff --git a/scripts/lib/CIME/case/case.py b/scripts/lib/CIME/case/case.py
index 65af650cc..6337da601 100644
--- a/scripts/lib/CIME/case/case.py
+++ b/scripts/lib/CIME/case/case.py
@@ -754,7 +754,7 @@ def _setup_mach_pes(self, pecount, multi_driver, ninst, machine_name, mpilib):
def configure(self, compset_name, grid_name, machine_name=None,
project=None, pecount=None, compiler=None, mpilib=None,
- pesfile=None,user_grid=False, gridfile=None,
+ pesfile=None, gridfile=None,
multi_driver=False, ninst=1, test=False,
walltime=None, queue=None, output_root=None,
run_unsupported=False, answer=None,
@@ -773,11 +773,10 @@ def configure(self, compset_name, grid_name, machine_name=None,
compset_name, files)
self._components = self.get_compset_components()
+
#--------------------------------------------
# grid
#--------------------------------------------
- if user_grid is True and gridfile is not None:
- self.set_value("GRIDS_SPEC_FILE", gridfile)
grids = Grids(gridfile)
gridinfo = grids.get_grid_info(name=grid_name, compset=self._compsetname)
@@ -1421,7 +1420,7 @@ def get_latest_cpl_log(self, coupler_log_path=None):
def create(self, casename, srcroot, compset_name, grid_name,
user_mods_dir=None, machine_name=None,
project=None, pecount=None, compiler=None, mpilib=None,
- pesfile=None,user_grid=False, gridfile=None,
+ pesfile=None, gridfile=None,
multi_driver=False, ninst=1, test=False,
walltime=None, queue=None, output_root=None,
run_unsupported=False, answer=None,
@@ -1436,7 +1435,7 @@ def create(self, casename, srcroot, compset_name, grid_name,
self.configure(compset_name, grid_name, machine_name=machine_name,
project=project,
pecount=pecount, compiler=compiler, mpilib=mpilib,
- pesfile=pesfile,user_grid=user_grid, gridfile=gridfile,
+ pesfile=pesfile, gridfile=gridfile,
multi_driver=multi_driver, ninst=ninst, test=test,
walltime=walltime, queue=queue,
output_root=output_root,
diff --git a/scripts/lib/CIME/nmlgen.py b/scripts/lib/CIME/nmlgen.py
index e693fe39c..9d7e012ce 100644
--- a/scripts/lib/CIME/nmlgen.py
+++ b/scripts/lib/CIME/nmlgen.py
@@ -8,6 +8,7 @@
import datetime
import re
+import hashlib
from CIME.XML.standard_module_setup import *
from CIME.namelist import Namelist, parse, \
@@ -467,17 +468,25 @@ def create_stream_file_and_update_shr_strdata_nml(self, config, #pylint:disable=
with open(stream_path, 'w') as stream_file:
stream_file.write(stream_file_text)
+
+ lines_hash = self._get_input_file_hash(data_list_path)
with open(data_list_path, 'a') as input_data_list:
for i, filename in enumerate(domain_filenames.split("\n")):
if filename.strip() == '':
continue
filepath = os.path.join(domain_filepath, filename.strip())
- input_data_list.write("domain{:d} = {}\n".format(i+1, filepath))
+ string = "domain{:d} = {}\n".format(i+1, filepath)
+ hashValue = hashlib.md5(string.rstrip().encode('utf-8')).hexdigest()
+ if hashValue not in lines_hash:
+ input_data_list.write(string)
for i, filename in enumerate(data_filenames.split("\n")):
if filename.strip() == '':
continue
filepath = os.path.join(data_filepath, filename.strip())
- input_data_list.write("file{:d} = {}\n".format(i+1, filepath))
+ string = "file{:d} = {}\n".format(i+1, filepath)
+ hashValue = hashlib.md5(string.rstrip().encode('utf-8')).hexdigest()
+ if hashValue not in lines_hash:
+ input_data_list.write(string)
self.update_shr_strdata_nml(config, stream, stream_path)
def update_shr_strdata_nml(self, config, stream, stream_path):
@@ -589,39 +598,57 @@ def create_shr_strdata_nml(self):
def get_group_variables(self, group_name):
return self._namelist.get_group_variables(group_name)
-
- def _write_input_files(self, input_data_list):
+ def _get_input_file_hash(self, data_list_path):
+ lines_hash = set()
+ if os.path.isfile(data_list_path):
+ with open(data_list_path, "r") as input_data_list:
+ for line in input_data_list:
+ hashValue = hashlib.md5(line.rstrip().encode('utf-8')).hexdigest()
+ logger.debug( "Found line {} with hash {}".format(line,hashValue))
+ lines_hash.add(hashValue)
+ return lines_hash
+
+ def _write_input_files(self, data_list_path):
"""Write input data files to list."""
- for group_name in self._namelist.get_group_names():
- for variable_name in self._namelist.get_variable_names(group_name):
- input_pathname = self._definition.get_node_element_info(variable_name, "input_pathname")
- if input_pathname is not None:
- # This is where we end up for all variables that are paths
- # to input data files.
- literals = self._namelist.get_variable_value(group_name, variable_name)
- for literal in literals:
- file_path = character_literal_to_string(literal)
- # NOTE - these are hard-coded here and a better way is to make these extensible
- if file_path == 'UNSET' or file_path == 'idmap':
- continue
- if input_pathname == 'abs':
- # No further mangling needed for absolute paths.
- # At this point, there are overwrites that should be ignored
- if not os.path.isabs(file_path):
+ # append to input_data_list file
+ lines_hash = self._get_input_file_hash(data_list_path)
+ with open(data_list_path, "a") as input_data_list:
+ for group_name in self._namelist.get_group_names():
+ for variable_name in self._namelist.get_variable_names(group_name):
+ input_pathname = self._definition.get_node_element_info(variable_name, "input_pathname")
+ if input_pathname is not None:
+ # This is where we end up for all variables that are paths
+ # to input data files.
+ literals = self._namelist.get_variable_value(group_name, variable_name)
+ for literal in literals:
+ file_path = character_literal_to_string(literal)
+ # NOTE - these are hard-coded here and a better way is to make these extensible
+ if file_path == 'UNSET' or file_path == 'idmap':
continue
+ if input_pathname == 'abs':
+ # No further mangling needed for absolute paths.
+ # At this point, there are overwrites that should be ignored
+ if not os.path.isabs(file_path):
+ continue
+ else:
+ pass
+ elif input_pathname.startswith('rel:'):
+ # The part past "rel" is the name of a variable that
+ # this variable specifies its path relative to.
+ root_var = input_pathname[4:]
+ root_dir = self.get_value(root_var)
+ file_path = os.path.join(root_dir, file_path)
+ else:
+ expect(False,
+ "Bad input_pathname value: {}.".format(input_pathname))
+ # Write to the input data list.
+ string = "{} = {}".format(variable_name, file_path)
+ hashValue = hashlib.md5(string.rstrip().encode('utf-8')).hexdigest()
+ if hashValue not in lines_hash:
+ logger.debug("Adding line {} with hash {}".format(string,hashValue))
+ input_data_list.write(string+"\n")
else:
- pass
- elif input_pathname.startswith('rel:'):
- # The part past "rel" is the name of a variable that
- # this variable specifies its path relative to.
- root_var = input_pathname[4:]
- root_dir = self.get_value(root_var)
- file_path = os.path.join(root_dir, file_path)
- else:
- expect(False,
- "Bad input_pathname value: {}.".format(input_pathname))
- # Write to the input data list.
- input_data_list.write("{} = {}\n".format(variable_name, file_path))
+ logger.debug("Line already in file {}".format(string))
def write_output_file(self, namelist_file, data_list_path=None, groups=None, sorted_groups=True):
"""Write out the namelists and input data files.
@@ -645,9 +672,7 @@ def write_output_file(self, namelist_file, data_list_path=None, groups=None, sor
self._namelist.write(namelist_file, groups=groups, sorted_groups=sorted_groups)
if data_list_path is not None:
- # append to input_data_list file
- with open(data_list_path, "a") as input_data_list:
- self._write_input_files(input_data_list)
+ self._write_input_files(data_list_path)
def add_nmlcontents(self, filename, group, append=True, format_="nmlcontents", sorted_groups=True):
""" Write only contents of nml group """
diff --git a/scripts/lib/CIME/utils.py b/scripts/lib/CIME/utils.py
index 2abb95f66..417cc0588 100644
--- a/scripts/lib/CIME/utils.py
+++ b/scripts/lib/CIME/utils.py
@@ -1352,11 +1352,11 @@ def does_file_have_string(filepath, text):
"""
return os.path.isfile(filepath) and text in open(filepath).read()
-
-def is_last_process_complete(filepath, expect_text, fail_text ):
+def is_last_process_complete(filepath, expect_text, fail_text):
"""
Search the filepath in reverse order looking for expect_text
- before finding fail_text.
+ before finding fail_text. This utility is used by archive_metadata.
+
"""
complete = False
fh = open(filepath, 'r')
diff --git a/src/components/data_comps/datm/cime_config/buildnml b/src/components/data_comps/datm/cime_config/buildnml
index 6ede9d4c3..99f44d6ff 100755
--- a/src/components/data_comps/datm/cime_config/buildnml
+++ b/src/components/data_comps/datm/cime_config/buildnml
@@ -25,7 +25,7 @@ logger = logging.getLogger(__name__)
# pylint: disable=too-many-arguments,too-many-locals,too-many-branches,too-many-statements
####################################################################################
-def _create_namelists(case, confdir, inst_string, infile, nmlgen):
+def _create_namelists(case, confdir, inst_string, infile, nmlgen, data_list_path):
####################################################################################
"""Write out the namelist for this component.
@@ -65,14 +65,6 @@ def _create_namelists(case, confdir, inst_string, infile, nmlgen):
logger.debug("DATM presaero mode is {}".format(datm_presaero))
logger.debug("DATM topo mode is {}".format(datm_topo))
- #----------------------------------------------------
- # Clear out old data.
- #----------------------------------------------------
- data_list_path = os.path.join(case.get_case_root(), "Buildconf",
- "datm.input_data_list")
- if os.path.exists(data_list_path):
- os.remove(data_list_path)
-
#----------------------------------------------------
# Create configuration information.
#----------------------------------------------------
@@ -204,6 +196,14 @@ def buildnml(case, caseroot, compname):
# Create the namelist generator object - independent of instance
nmlgen = NamelistGenerator(case, definition_file, files=files)
+ #----------------------------------------------------
+ # Clear out old data.
+ #----------------------------------------------------
+ data_list_path = os.path.join(case.get_case_root(), "Buildconf",
+ "datm.input_data_list")
+ if os.path.exists(data_list_path):
+ os.remove(data_list_path)
+
#----------------------------------------------------
# Loop over instances
#----------------------------------------------------
@@ -234,7 +234,7 @@ def buildnml(case, caseroot, compname):
namelist_infile = [infile]
# create namelist and stream file(s) data component
- _create_namelists(case, confdir, inst_string, namelist_infile, nmlgen)
+ _create_namelists(case, confdir, inst_string, namelist_infile, nmlgen, data_list_path)
# copy namelist files and stream text files, to rundir
if os.path.isdir(rundir):
diff --git a/src/components/data_comps/desp/cime_config/buildnml b/src/components/data_comps/desp/cime_config/buildnml
index c9c6b8dd4..96cdfee64 100755
--- a/src/components/data_comps/desp/cime_config/buildnml
+++ b/src/components/data_comps/desp/cime_config/buildnml
@@ -24,7 +24,7 @@ logger = logging.getLogger(__name__)
# pylint: disable=too-many-arguments,too-many-locals,too-many-branches,too-many-statements
####################################################################################
-def _create_namelists(case, confdir, infile, nmlgen):
+def _create_namelists(case, confdir, infile, nmlgen, data_list_path):
####################################################################################
"""Write out the namelist for this component.
@@ -48,14 +48,6 @@ def _create_namelists(case, confdir, infile, nmlgen):
#----------------------------------------------------
logger.debug("DESP mode is %s", desp_mode)
- #----------------------------------------------------
- # Clear out old data.
- #----------------------------------------------------
- data_list_path = os.path.join(case.get_case_root(), "Buildconf",
- "desp.input_data_list")
- if os.path.exists(data_list_path):
- os.remove(data_list_path)
-
#----------------------------------------------------
# Create configuration information.
#----------------------------------------------------
@@ -116,6 +108,13 @@ def buildnml(case, caseroot, compname):
# Create the namelist generator object - independent of instance
nmlgen = NamelistGenerator(case, definition_file)
+ #----------------------------------------------------
+ # Clear out old data.
+ #----------------------------------------------------
+ data_list_path = os.path.join(case.get_case_root(), "Buildconf",
+ "desp.input_data_list")
+ if os.path.exists(data_list_path):
+ os.remove(data_list_path)
#----------------------------------------------------
# Loop over instances
#----------------------------------------------------
@@ -146,7 +145,7 @@ def buildnml(case, caseroot, compname):
namelist_infile = [infile]
# create namelist and stream file(s) data component
- _create_namelists(case, confdir, namelist_infile, nmlgen)
+ _create_namelists(case, confdir, namelist_infile, nmlgen, data_list_path)
# copy namelist files and stream text files, to rundir
if os.path.isdir(rundir):
diff --git a/src/components/data_comps/dice/cime_config/buildnml b/src/components/data_comps/dice/cime_config/buildnml
index b8378afe5..fe3ada041 100755
--- a/src/components/data_comps/dice/cime_config/buildnml
+++ b/src/components/data_comps/dice/cime_config/buildnml
@@ -25,7 +25,7 @@ logger = logging.getLogger(__name__)
# pylint: disable=too-many-arguments,too-many-locals,too-many-branches,too-many-statements
####################################################################################
-def _create_namelists(case, confdir, inst_string, infile, nmlgen):
+def _create_namelists(case, confdir, inst_string, infile, nmlgen, data_list_path):
####################################################################################
"""Write out the namelist for this component.
@@ -58,14 +58,6 @@ def _create_namelists(case, confdir, inst_string, infile, nmlgen):
logger.debug("DICE mode is {}".format(dice_mode))
logger.debug("DICE grid is {}".format(ice_grid))
- #----------------------------------------------------
- # Clear out old data.
- #----------------------------------------------------
- data_list_path = os.path.join(case.get_case_root(), "Buildconf",
- "dice.input_data_list")
- if os.path.exists(data_list_path):
- os.remove(data_list_path)
-
#----------------------------------------------------
# Create configuration information.
#----------------------------------------------------
@@ -162,6 +154,14 @@ def buildnml(case, caseroot, compname):
# Create the namelist generator object - independent of instance
nmlgen = NamelistGenerator(case, definition_file, files=files)
+ #----------------------------------------------------
+ # Clear out old data.
+ #----------------------------------------------------
+ data_list_path = os.path.join(case.get_case_root(), "Buildconf",
+ "dice.input_data_list")
+ if os.path.exists(data_list_path):
+ os.remove(data_list_path)
+
#----------------------------------------------------
# Loop over instances
#----------------------------------------------------
@@ -192,7 +192,7 @@ def buildnml(case, caseroot, compname):
namelist_infile = [infile]
# create namelist and stream file(s) data component
- _create_namelists(case, confdir, inst_string, namelist_infile, nmlgen)
+ _create_namelists(case, confdir, inst_string, namelist_infile, nmlgen, data_list_path)
# copy namelist files and stream text files, to rundir
if os.path.isdir(rundir):
diff --git a/src/components/data_comps/dlnd/cime_config/buildnml b/src/components/data_comps/dlnd/cime_config/buildnml
index c28c12d21..d1024a83c 100755
--- a/src/components/data_comps/dlnd/cime_config/buildnml
+++ b/src/components/data_comps/dlnd/cime_config/buildnml
@@ -25,7 +25,7 @@ logger = logging.getLogger(__name__)
# pylint: disable=too-many-arguments,too-many-locals,too-many-branches,too-many-statements
####################################################################################
-def _create_namelists(case, confdir, inst_string, infile, nmlgen):
+def _create_namelists(case, confdir, inst_string, infile, nmlgen, data_list_path):
####################################################################################
"""Write out the namelist for this component.
@@ -57,14 +57,6 @@ def _create_namelists(case, confdir, inst_string, infile, nmlgen):
logger.debug("DLND grid is {}".format(lnd_grid))
logger.debug("DLND glc_nec is {}".format(glc_nec))
- #----------------------------------------------------
- # Clear out old data.
- #----------------------------------------------------
- data_list_path = os.path.join(case.get_case_root(), "Buildconf",
- "dlnd.input_data_list")
- if os.path.exists(data_list_path):
- os.remove(data_list_path)
-
#----------------------------------------------------
# Create configuration information.
#----------------------------------------------------
@@ -170,6 +162,14 @@ def buildnml(case, caseroot, compname):
# Create the namelist generator object - independent of instance
nmlgen = NamelistGenerator(case, definition_file, files=files)
+ #----------------------------------------------------
+ # Clear out old data.
+ #----------------------------------------------------
+ data_list_path = os.path.join(case.get_case_root(), "Buildconf",
+ "dlnd.input_data_list")
+ if os.path.exists(data_list_path):
+ os.remove(data_list_path)
+
#----------------------------------------------------
# Loop over instances
#----------------------------------------------------
@@ -200,7 +200,7 @@ def buildnml(case, caseroot, compname):
namelist_infile = [infile]
# create namelist and stream file(s) data component
- _create_namelists(case, confdir, inst_string, namelist_infile, nmlgen)
+ _create_namelists(case, confdir, inst_string, namelist_infile, nmlgen, data_list_path)
# copy namelist files and stream text files, to rundir
if os.path.isdir(rundir):
diff --git a/src/components/data_comps/docn/cime_config/buildnml b/src/components/data_comps/docn/cime_config/buildnml
index 438d3dd75..c3d75cca9 100755
--- a/src/components/data_comps/docn/cime_config/buildnml
+++ b/src/components/data_comps/docn/cime_config/buildnml
@@ -25,7 +25,7 @@ logger = logging.getLogger(__name__)
# pylint: disable=too-many-arguments,too-many-locals,too-many-branches,too-many-statements
####################################################################################
-def _create_namelists(case, confdir, inst_string, infile, nmlgen):
+def _create_namelists(case, confdir, inst_string, infile, nmlgen, data_list_path):
####################################################################################
"""Write out the namelist for this component.
@@ -58,14 +58,6 @@ def _create_namelists(case, confdir, inst_string, infile, nmlgen):
logger.debug("DOCN mode is {}".format(docn_mode))
logger.debug("DOCN grid is {}".format(ocn_grid))
- #----------------------------------------------------
- # Clear out old data.
- #----------------------------------------------------
- data_list_path = os.path.join(case.get_case_root(), "Buildconf",
- "docn.input_data_list")
- if os.path.exists(data_list_path):
- os.remove(data_list_path)
-
#----------------------------------------------------
# Create configuration information.
#----------------------------------------------------
@@ -175,6 +167,14 @@ def buildnml(case, caseroot, compname):
# Create the namelist generator object - independent of instance
nmlgen = NamelistGenerator(case, definition_file, files=files)
+ #----------------------------------------------------
+ # Clear out old data.
+ #----------------------------------------------------
+ data_list_path = os.path.join(case.get_case_root(), "Buildconf",
+ "docn.input_data_list")
+ if os.path.exists(data_list_path):
+ os.remove(data_list_path)
+
#----------------------------------------------------
# Loop over instances
#----------------------------------------------------
@@ -205,7 +205,7 @@ def buildnml(case, caseroot, compname):
namelist_infile = [infile]
# create namelist and stream file(s) data component
- _create_namelists(case, confdir, inst_string, namelist_infile, nmlgen)
+ _create_namelists(case, confdir, inst_string, namelist_infile, nmlgen, data_list_path)
# copy namelist files and stream text files, to rundir
if os.path.isdir(rundir):
diff --git a/src/components/data_comps/drof/cime_config/buildnml b/src/components/data_comps/drof/cime_config/buildnml
index 49e547e70..1c83dd06c 100755
--- a/src/components/data_comps/drof/cime_config/buildnml
+++ b/src/components/data_comps/drof/cime_config/buildnml
@@ -25,7 +25,7 @@ logger = logging.getLogger(__name__)
# pylint: disable=too-many-arguments,too-many-locals,too-many-branches,too-many-statements
####################################################################################
-def _create_namelists(case, confdir, inst_string, infile, nmlgen):
+def _create_namelists(case, confdir, inst_string, infile, nmlgen, data_list_path):
####################################################################################
"""Write out the namelist for this component.
@@ -57,14 +57,6 @@ def _create_namelists(case, confdir, inst_string, infile, nmlgen):
logger.debug("DROF mode is {}".format(drof_mode))
logger.debug("DROF grid is {}".format(rof_grid))
- #----------------------------------------------------
- # Clear out old data.
- #----------------------------------------------------
- data_list_path = os.path.join(case.get_case_root(), "Buildconf",
- "drof.input_data_list")
- if os.path.exists(data_list_path):
- os.remove(data_list_path)
-
#----------------------------------------------------
# Create configuration information.
#----------------------------------------------------
@@ -170,6 +162,14 @@ def buildnml(case, caseroot, compname):
# Create the namelist generator object - independent of instance
nmlgen = NamelistGenerator(case, definition_file)
+ #----------------------------------------------------
+ # Clear out old data.
+ #----------------------------------------------------
+ data_list_path = os.path.join(case.get_case_root(), "Buildconf",
+ "drof.input_data_list")
+ if os.path.exists(data_list_path):
+ os.remove(data_list_path)
+
#----------------------------------------------------
# Loop over instances
#----------------------------------------------------
@@ -200,7 +200,7 @@ def buildnml(case, caseroot, compname):
namelist_infile = [infile]
# create namelist and stream file(s) data component
- _create_namelists(case, confdir, inst_string, namelist_infile, nmlgen)
+ _create_namelists(case, confdir, inst_string, namelist_infile, nmlgen, data_list_path)
# copy namelist files and stream text files, to rundir
if os.path.isdir(rundir):
diff --git a/src/components/data_comps/dwav/cime_config/buildnml b/src/components/data_comps/dwav/cime_config/buildnml
index 70d2c4634..d268faee0 100755
--- a/src/components/data_comps/dwav/cime_config/buildnml
+++ b/src/components/data_comps/dwav/cime_config/buildnml
@@ -24,7 +24,7 @@ logger = logging.getLogger(__name__)
# pylint: disable=too-many-arguments,too-many-locals,too-many-branches,too-many-statements
####################################################################################
-def _create_namelists(case, confdir, inst_string, infile, nmlgen):
+def _create_namelists(case, confdir, inst_string, infile, nmlgen, data_list_path):
####################################################################################
"""Write out the namelist for this component.
@@ -56,13 +56,6 @@ def _create_namelists(case, confdir, inst_string, infile, nmlgen):
logger.debug("DWAV mode is {}".format(dwav_mode))
logger.debug("DWAV grid is {}".format(wav_grid))
- #----------------------------------------------------
- # Clear out old data.
- #----------------------------------------------------
- data_list_path = os.path.join(case.get_case_root(), "Buildconf", "dwav.input_data_list")
- if os.path.exists(data_list_path):
- os.remove(data_list_path)
-
#----------------------------------------------------
# Create configuration information.
#----------------------------------------------------
@@ -161,6 +154,13 @@ def buildnml(case, caseroot, compname):
# Create the namelist generator object - independent of instance
nmlgen = NamelistGenerator(case, definition_file)
+ #----------------------------------------------------
+ # Clear out old data.
+ #----------------------------------------------------
+ data_list_path = os.path.join(case.get_case_root(), "Buildconf", "dwav.input_data_list")
+ if os.path.exists(data_list_path):
+ os.remove(data_list_path)
+
#----------------------------------------------------
# Loop over instances
#----------------------------------------------------
@@ -191,7 +191,7 @@ def buildnml(case, caseroot, compname):
namelist_infile = [infile]
# create namelist and stream file(s) data component
- _create_namelists(case, confdir, inst_string, namelist_infile, nmlgen)
+ _create_namelists(case, confdir, inst_string, namelist_infile, nmlgen, data_list_path)
# copy namelist files and stream text files, to rundir
if os.path.isdir(rundir):
diff --git a/src/drivers/mct/main/cime_comp_mod.F90 b/src/drivers/mct/main/cime_comp_mod.F90
index 63e877a01..5ae2192b6 100644
--- a/src/drivers/mct/main/cime_comp_mod.F90
+++ b/src/drivers/mct/main/cime_comp_mod.F90
@@ -198,7 +198,7 @@ module cime_comp_mod
type(mct_aVect) , pointer :: o2x_ox => null()
type(mct_aVect) , pointer :: a2x_ax => null()
- character(len=CL) :: suffix
+ character(len=CL) :: inst_suffix
logical :: iamin_id
character(len=seq_comm_namelen) :: compname
@@ -2160,7 +2160,6 @@ subroutine cime_run()
hashint = 0
-
call seq_infodata_putData(infodata,atm_phase=1,lnd_phase=1,ocn_phase=1,ice_phase=1)
call seq_timemgr_EClockGetData( EClock_d, stepno=begstep)
call seq_timemgr_EClockGetData( EClock_d, dtime=dtime)
@@ -3089,9 +3088,9 @@ subroutine cime_run()
if (do_hist_r2x) then
call t_drvstartf ('driver_rofpost_histaux', barrier=mpicom_CPLID)
do eri = 1,num_inst_rof
- suffix = component_get_suffix(rof(eri))
+ inst_suffix = component_get_suffix(rof(eri))
call seq_hist_writeaux(infodata, EClock_d, rof(eri), flow='c2x', &
- aname='r2x'//trim(suffix), dname='domrb', &
+ aname='r2x',dname='domrb',inst_suffix=trim(inst_suffix), &
nx=rof_nx, ny=rof_ny, nt=1, write_now=t24hr_alarm)
enddo
call t_drvstopf ('driver_rofpost_histaux')
@@ -3686,14 +3685,14 @@ subroutine cime_run()
if (do_hist_a2x) then
do eai = 1,num_inst_atm
- suffix = component_get_suffix(atm(eai))
+ inst_suffix = component_get_suffix(atm(eai))
if (trim(hist_a2x_flds) == 'all') then
call seq_hist_writeaux(infodata, EClock_d, atm(eai), flow='c2x', &
- aname='a2x'//trim(suffix), dname='doma', &
+ aname='a2x',dname='doma', inst_suffix=trim(inst_suffix), &
nx=atm_nx, ny=atm_ny, nt=ncpl)
else
call seq_hist_writeaux(infodata, EClock_d, atm(eai), flow='c2x', &
- aname='a2x'//trim(suffix), dname='doma', &
+ aname='a2x',dname='doma', inst_suffix=trim(inst_suffix), &
nx=atm_nx, ny=atm_ny, nt=ncpl, flds=hist_a2x_flds)
endif
enddo
@@ -3701,14 +3700,14 @@ subroutine cime_run()
if (do_hist_a2x1hri .and. t1hr_alarm) then
do eai = 1,num_inst_atm
- suffix = component_get_suffix(atm(eai))
+ inst_suffix = component_get_suffix(atm(eai))
if (trim(hist_a2x1hri_flds) == 'all') then
call seq_hist_writeaux(infodata, EClock_d, atm(eai), flow='c2x', &
- aname='a2x1hi'//trim(suffix), dname='doma', &
+ aname='a2x1hi',dname='doma',inst_suffix=trim(inst_suffix), &
nx=atm_nx, ny=atm_ny, nt=24)
else
call seq_hist_writeaux(infodata, EClock_d, atm(eai), flow='c2x', &
- aname='a2x1hi'//trim(suffix), dname='doma', &
+ aname='a2x1hi',dname='doma',inst_suffix=trim(inst_suffix), &
nx=atm_nx, ny=atm_ny, nt=24, flds=hist_a2x1hri_flds)
endif
enddo
@@ -3716,14 +3715,14 @@ subroutine cime_run()
if (do_hist_a2x1hr) then
do eai = 1,num_inst_atm
- suffix = component_get_suffix(atm(eai))
+ inst_suffix = component_get_suffix(atm(eai))
if (trim(hist_a2x1hr_flds) == 'all') then
call seq_hist_writeaux(infodata, EClock_d, atm(eai), flow='c2x', &
- aname='a2x1h'//trim(suffix), dname='doma', &
+ aname='a2x1h',dname='doma',inst_suffix=trim(inst_suffix), &
nx=atm_nx, ny=atm_ny, nt=24, write_now=t1hr_alarm)
else
call seq_hist_writeaux(infodata, EClock_d, atm(eai), flow='c2x', &
- aname='a2x1h'//trim(suffix), dname='doma', &
+ aname='a2x1h',dname='doma',inst_suffix=trim(inst_suffix), &
nx=atm_nx, ny=atm_ny, nt=24, write_now=t1hr_alarm, flds=hist_a2x1hr_flds)
endif
enddo
@@ -3731,14 +3730,14 @@ subroutine cime_run()
if (do_hist_a2x3hr) then
do eai = 1,num_inst_atm
- suffix = component_get_suffix(atm(eai))
+ inst_suffix = component_get_suffix(atm(eai))
if (trim(hist_a2x3hr_flds) == 'all') then
call seq_hist_writeaux(infodata, EClock_d, atm(eai), flow='c2x', &
- aname='a2x3h'//trim(suffix), dname='doma', &
+ aname='a2x3h',dname='doma',inst_suffix=trim(inst_suffix), &
nx=atm_nx, ny=atm_ny, nt=8, write_now=t3hr_alarm)
else
call seq_hist_writeaux(infodata, EClock_d, atm(eai), flow='c2x', &
- aname='a2x3h'//trim(suffix), dname='doma', &
+ aname='a2x3h',dname='doma',inst_suffix=trim(inst_suffix), &
nx=atm_nx, ny=atm_ny, nt=8, write_now=t3hr_alarm, flds=hist_a2x3hr_flds)
endif
enddo
@@ -3746,14 +3745,14 @@ subroutine cime_run()
if (do_hist_a2x3hrp) then
do eai = 1,num_inst_atm
- suffix = component_get_suffix(atm(eai))
+ inst_suffix = component_get_suffix(atm(eai))
if (trim(hist_a2x3hrp_flds) == 'all') then
call seq_hist_writeaux(infodata, EClock_d, atm(eai), flow='c2x', &
- aname='a2x3h_prec'//trim(suffix), dname='doma', &
+ aname='a2x3h_prec',dname='doma',inst_suffix=trim(inst_suffix), &
nx=atm_nx, ny=atm_ny, nt=8, write_now=t3hr_alarm)
else
call seq_hist_writeaux(infodata, EClock_d, atm(eai), flow='c2x', &
- aname='a2x3h_prec'//trim(suffix), dname='doma', &
+ aname='a2x3h_prec',dname='doma',inst_suffix=trim(inst_suffix), &
nx=atm_nx, ny=atm_ny, nt=8, write_now=t3hr_alarm, flds=hist_a2x3hrp_flds)
endif
enddo
@@ -3761,14 +3760,14 @@ subroutine cime_run()
if (do_hist_a2x24hr) then
do eai = 1,num_inst_atm
- suffix = component_get_suffix(atm(eai))
+ inst_suffix = component_get_suffix(atm(eai))
if (trim(hist_a2x24hr_flds) == 'all') then
call seq_hist_writeaux(infodata, EClock_d, atm(eai), flow='c2x', &
- aname='a2x1d'//trim(suffix), dname='doma', &
+ aname='a2x1d',dname='doma',inst_suffix=trim(inst_suffix), &
nx=atm_nx, ny=atm_ny, nt=1, write_now=t24hr_alarm)
else
call seq_hist_writeaux(infodata, EClock_d, atm(eai), flow='c2x', &
- aname='a2x1d'//trim(suffix), dname='doma', &
+ aname='a2x1d',dname='doma',inst_suffix=trim(inst_suffix), &
nx=atm_nx, ny=atm_ny, nt=1, write_now=t24hr_alarm, flds=hist_a2x24hr_flds)
endif
enddo
@@ -3818,11 +3817,11 @@ subroutine cime_run()
rdays_offset = tbnds1_offset, &
years_offset = -1)
do eli = 1,num_inst_lnd
- suffix = component_get_suffix(lnd(eli))
+ inst_suffix = component_get_suffix(lnd(eli))
! Use yr_offset=-1 so the file with fields from year 1 has time stamp
! 0001-01-01 rather than 0002-01-01, etc.
call seq_hist_writeaux(infodata, EClock_d, lnd(eli), flow='c2x', &
- aname='l2x1yr_glc'//trim(suffix), dname='doml', &
+ aname='l2x1yr_glc',dname='doml',inst_suffix=trim(inst_suffix), &
nx=lnd_nx, ny=lnd_ny, nt=1, write_now=.true., &
tbnds1_offset = tbnds1_offset, yr_offset=-1, &
av_to_write=prep_glc_get_l2gacc_lx_one_instance(eli))
@@ -3832,9 +3831,9 @@ subroutine cime_run()
if (do_hist_l2x) then
do eli = 1,num_inst_lnd
- suffix = component_get_suffix(lnd(eli))
+ inst_suffix = component_get_suffix(lnd(eli))
call seq_hist_writeaux(infodata, EClock_d, lnd(eli), flow='c2x', &
- aname='l2x'//trim(suffix), dname='doml', &
+ aname='l2x',dname='doml',inst_suffix=trim(inst_suffix), &
nx=lnd_nx, ny=lnd_ny, nt=ncpl)
enddo
endif
diff --git a/src/drivers/mct/main/seq_hist_mod.F90 b/src/drivers/mct/main/seq_hist_mod.F90
index 480f30ecd..313598703 100644
--- a/src/drivers/mct/main/seq_hist_mod.F90
+++ b/src/drivers/mct/main/seq_hist_mod.F90
@@ -411,15 +411,15 @@ subroutine seq_hist_writeavg(infodata, EClock_d, &
real(r8) :: curr_time ! Time interval since reference time
real(r8) :: prev_time ! Time interval since reference time
real(r8) :: avg_time ! Average time of tavg
- integer(IN) :: yy, mm, dd ! year, month, day
+ integer(IN) :: yy, mm, dd ! year, month, day
integer(IN) :: fk ! index
character(CL) :: time_units ! units of time variable
character(CL) :: calendar ! calendar type
integer(IN) :: lsize ! local size of an aVect
character(CL) :: case_name ! case name
character(CL) :: hist_file ! Local path to history filename
- logical :: whead, wdata ! flags write header vs. data
- integer(IN) :: iidx ! component instance counter
+ logical :: whead, wdata ! flags write header vs. data
+ integer(IN) :: iidx ! component instance counter
type(mct_aVect), save :: a2x_ax_avg(num_inst_atm) ! tavg aVect/bundle
type(mct_aVect), save :: x2a_ax_avg(num_inst_atm)
@@ -974,23 +974,24 @@ end subroutine seq_hist_writeavg
!===============================================================================
- subroutine seq_hist_writeaux(infodata, EClock_d, comp, flow, aname, dname, &
+ subroutine seq_hist_writeaux(infodata, EClock_d, comp, flow, aname, dname, inst_suffix, &
nx, ny, nt, write_now, flds, tbnds1_offset, yr_offset, av_to_write)
implicit none
!--- arguments ---
type (seq_infodata_type) , intent(inout) :: infodata
- type(ESMF_Clock) , intent(in) :: EClock_d ! driver clock
- type(component_type) , intent(in) :: comp ! component instance
- character(len=3) , intent(in) :: flow ! 'x2c' or 'c2x'
- character(*) , intent(in) :: aname ! avect name for hist file
- character(*) , intent(in) :: dname ! domain name for hist file
- integer(IN) , intent(in) :: nx ! 2d global size nx
- integer(IN) , intent(in) :: ny ! 2d global size ny
- integer(IN) , intent(in) :: nt ! number of time samples per file
- logical , optional, intent(in) :: write_now ! write a sample now, if not used, write every call
- character(*) , optional, intent(in) :: flds ! list of fields to write
+ type(ESMF_Clock) , intent(in) :: EClock_d ! driver clock
+ type(component_type) , intent(in) :: comp ! component instance
+ character(len=3) , intent(in) :: flow ! 'x2c' or 'c2x'
+ character(*) , intent(in) :: aname ! avect name for hist file
+ character(*) , intent(in) :: dname ! domain name for hist file
+ character(*) , intent(in) :: inst_suffix ! instance number part of file name
+ integer(IN) , intent(in) :: nx ! 2d global size nx
+ integer(IN) , intent(in) :: ny ! 2d global size ny
+ integer(IN) , intent(in) :: nt ! number of time samples per file
+ logical , optional, intent(in) :: write_now ! write a sample now, if not used, write every call
+ character(*) , optional, intent(in) :: flds ! list of fields to write
! Offset for starting time bound, in fractional days. This should be negative. If
! tbnds1_offset is provided, then: When it's time to write the file, create the lower
@@ -1041,7 +1042,7 @@ subroutine seq_hist_writeaux(infodata, EClock_d, comp, flow, aname, dname, &
logical :: lwrite_now
logical :: whead, wdata ! for writing restart/history cdf files
real(r8) :: tbnds(2)
- character(len=12) :: date_str
+ character(len=16) :: date_str
integer(IN), parameter :: maxout = 20
integer(IN) , save :: ntout = 0
@@ -1163,9 +1164,9 @@ subroutine seq_hist_writeaux(infodata, EClock_d, comp, flow, aname, dname, &
if (present(yr_offset)) then
yy = yy + yr_offset
end if
- call shr_cal_ymdtod2string(date_str, yy, mm, dd)
- write(hist_file(found), "(6a)") &
- trim(case_name),'.cpl.h',trim(aname),'.',trim(date_str), '.nc'
+ call shr_cal_ymdtod2string(date_str, yy, mm, dd, curr_tod)
+ write(hist_file(found), "(8a)") &
+ trim(case_name),'.cpl',trim(inst_suffix),'.h',trim(aname),'.',trim(date_str), '.nc'
else
fk1 = 2
endif
@@ -1282,19 +1283,20 @@ end subroutine seq_hist_writeaux
!===============================================================================
- subroutine seq_hist_spewav(infodata, aname, gsmap, av, nx, ny, nt, write_now, flds)
+ subroutine seq_hist_spewav(infodata, aname, inst_suffix, gsmap, av, nx, ny, nt, write_now, flds)
implicit none
type(seq_infodata_type) , intent(in) :: infodata
- character(*) , intent(in) :: aname ! avect name for hist file
- type(mct_gsmap) , intent(in) :: gsmap ! gsmap
- type(mct_aVect) , intent(in) :: av ! avect
- integer(IN) , intent(in) :: nx ! 2d global size nx
- integer(IN) , intent(in) :: ny ! 2d global size ny
- integer(IN) , intent(in) :: nt ! number of time samples per file
- logical , intent(in), optional :: write_now ! write a sample now, if not used, write every call
- character(*) , intent(in), optional :: flds ! list of fields to write
+ character(*) , intent(in) :: aname ! avect name for hist file
+ character(*) , intent(in) :: inst_suffix ! instance number part of file name
+ type(mct_gsmap) , intent(in) :: gsmap ! gsmap
+ type(mct_aVect) , intent(in) :: av ! avect
+ integer(IN) , intent(in) :: nx ! 2d global size nx
+ integer(IN) , intent(in) :: ny ! 2d global size ny
+ integer(IN) , intent(in) :: nt ! number of time samples per file
+ logical , intent(in), optional :: write_now ! write a sample now, if not used, write every call
+ character(*) , intent(in), optional :: flds ! list of fields to write
!--- local ---
character(CL) :: case_name ! case name
@@ -1398,7 +1400,7 @@ subroutine seq_hist_spewav(infodata, aname, gsmap, av, nx, ny, nt, write_now, fl
fk1 = 1
call seq_infodata_GetData( infodata, case_name=case_name)
write(hist_file(found), "(a, i4.4, a)") &
- trim(case_name)//'.cpl.h'//trim(aname)//'.', nfiles(found), '.nc'
+ trim(case_name)//'.cpl'//trim(inst_suffix)//'.h'//trim(aname)//'.', nfiles(found), '.nc'
else
fk1 = 2
endif