Skip to content

Commit

Permalink
Merge pull request #1741 from GEOS-ESM/develop
Browse files Browse the repository at this point in the history
GitFlow: Merge Develop into Main for 2.28 Release
  • Loading branch information
mathomp4 authored Oct 17, 2022
2 parents 672097f + 37c705e commit 6d6869a
Show file tree
Hide file tree
Showing 24 changed files with 756 additions and 478 deletions.
55 changes: 55 additions & 0 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
@@ -1,8 +1,24 @@
version: 2.1

parameters:
GHA_Actor:
type: string
default: ""
GHA_Action:
type: string
default: ""
GHA_Event:
type: string
default: ""
GHA_Meta:
type: string
default: ""

# Anchors to prevent forgetting to update a version
os_version: &os_version ubuntu20
baselibs_version: &baselibs_version v7.5.0
bcs_version: &bcs_version v10.23.0
tag_build_arg_name: &tag_build_arg_name maplversion

orbs:
ci: geos-esm/circleci-tools@1
Expand Down Expand Up @@ -150,3 +166,42 @@ workflows:
requires:
- build-and-test-MAPL-on-<< matrix.compiler >>-using-Unix Makefiles
baselibs_version: *baselibs_version

build-and-publish-docker:
when:
equal: [ "release", << pipeline.parameters.GHA_Event >> ]
jobs:
- ci/publish-docker:
filters:
tags:
only: /^v.*$/
name: publish-intel-docker-image
context:
- docker-hub-creds
- ghcr-creds
os_version: *os_version
baselibs_version: *baselibs_version
container_name: mapl
mpi_name: intelmpi
mpi_version: 2021.6.0
compiler_name: intel
compiler_version: 2022.1.0
image_name: geos-env
tag_build_arg_name: *tag_build_arg_name
- ci/publish-docker:
filters:
tags:
only: /^v.*$/
name: publish-gcc-docker-image
context:
- docker-hub-creds
- ghcr-creds
os_version: *os_version
baselibs_version: *baselibs_version
container_name: mapl
mpi_name: openmpi
mpi_version: 4.1.4
compiler_name: gcc
compiler_version: 12.1.0
image_name: geos-env-mkl
tag_build_arg_name: *tag_build_arg_name
24 changes: 24 additions & 0 deletions .docker/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
# MAPL Dockerfile

ARG osversion
ARG imagename
ARG baselibversion
ARG mpiname
ARG mpiversion
ARG compilername
ARG compilerversion

ARG BASE_IMAGE=gmao/${osversion}-${imagename}:${baselibversion}-${mpiname}_${mpiversion}-${compilername}_${compilerversion}
FROM ${BASE_IMAGE}

ARG maplversion

RUN git clone -b ${maplversion} https://github.com/GEOS-ESM/MAPL.git /MAPL-src && \
cd /MAPL-src && \
mepo clone && \
mkdir build && \
cd build && \
cmake .. -DCMAKE_INSTALL_PREFIX=/MAPL/install -DBASEDIR=$BASEDIR/Linux -DUSE_F2PY=OFF -DCMAKE_Fortran_COMPILER=$FC -DCMAKE_C_COMPILER=$CC -DCMAKE_CXX_COMPILER=$CXX && \
make -j install/strip && \
cd / && \
rm -rf /MAPL-src
12 changes: 12 additions & 0 deletions .github/workflows/trigger-circleci-pipeline-on-release.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
on:
release:
types: [published]
jobs:
trigger-circleci:
runs-on: ubuntu-latest
steps:
- name: CircleCI Trigger on Release
id: docker-build
uses: CircleCI-Public/[email protected]
env:
CCI_TOKEN: ${{ secrets.CCI_TOKEN }}
141 changes: 89 additions & 52 deletions Apps/MAPL_GridCompSpecs_ACG.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,11 @@
import sys
import os
import csv
import warnings

SUCCESS = 0

CATEGORIES = ("IMPORT","EXPORT","INTERNAL")

###############################################################
class MAPL_DataSpec:
Expand All @@ -15,14 +19,18 @@ class MAPL_DataSpec:
'refresh_interval', 'averaging_interval', 'halowidth',
'precision','default','restart', 'ungridded_dims',
'field_type', 'staggering', 'rotation',
'friendlyto', 'add2export']
'friendlyto', 'add2export', 'datatype',
'attr_inames', 'att_rnames', 'attr_ivalues', 'attr_rvalues',
'ungridded_name', 'ungridded_unit', 'ungridded_coords']

# The following arguments are skipped if value is empty string
optional_options = [ 'dims', 'vlocation', 'num_subtiles',
'refresh_interval', 'averaging_interval', 'halowidth',
'precision','default','restart', 'ungridded_dims',
'field_type', 'staggering', 'rotation',
'friendlyto', 'add2export']
'friendlyto', 'add2export', 'datatype',
'attr_inames', 'att_rnames', 'attr_ivalues', 'attr_rvalues',
'ungridded_name', 'ungridded_unit', 'ungridded_coords']

entry_aliases = {'dims': {'z' : 'MAPL_DimsVertOnly',
'xy' : 'MAPL_DimsHorzOnly',
Expand All @@ -45,6 +53,9 @@ class MAPL_DataSpec:
# The following arguments must be placed within array brackets.
arraylike_options = ['ungridded_dims']

ALLOC = 'alloc'
DELIMITER = ', '
TERMINATOR = '_RC)'

def __init__(self, category, args, indent=3):
self.category = category
Expand All @@ -68,7 +79,10 @@ def get_rank(self):
if ungridded:
extra_dims = ungridded.strip('][').split(',')
extra_rank = len(extra_dims)
dims = MAPL_DataSpec.entry_aliases['dims'][self.args['dims']]
aliases = MAPL_DataSpec.entry_aliases['dims']
dims = self.args['dims']
if dims in aliases:
dims = aliases[dims]
return ranks[dims] + extra_rank

@staticmethod
Expand Down Expand Up @@ -98,12 +112,25 @@ def emit_declare_pointers(self):
return text

def emit_get_pointers(self):
text = self.emit_header()
short_name = MAPL_DataSpec.internal_name(self.args['short_name'])
mangled_name = MAPL_DataSpec.mangled_name(self.args['short_name'])
text = text + "call MAPL_GetPointer(" + self.category + ', ' + short_name + ", " + mangled_name + ", rc=status); VERIFY_(status)"
text = text + self.emit_trailer(nullify=True)
return text
""" Generate MAPL_GetPointer calls for the MAPL_DataSpec (self) """
""" Creates string by joining list of generated and literal strings """
""" including if block (emit_header) and 'alloc = value' (emit_pointer_alloc """
return MAPL_DataSpec.DELIMITER.join(
[ self.emit_header() + "call MAPL_GetPointer(" + self.category,
MAPL_DataSpec.internal_name(self.args['short_name']),
MAPL_DataSpec.mangled_name(self.args['short_name']) ] +
self.emit_pointer_alloc() +
[ MAPL_DataSpec.TERMINATOR + self.emit_trailer(nullify=True) ] )

def emit_pointer_alloc(self):
EMPTY_LIST = []
key = MAPL_DataSpec.ALLOC
if key in self.args:
value = self.args[key].strip().lower()
listout = [ key + '=' + get_fortran_logical(value) ] if len(value) > 0 else EMPTY_LIST
else:
listout = EMPTY_LIST
return listout

def emit_header(self):
text = self.newline()
Expand All @@ -117,9 +144,8 @@ def emit_args(self):
text = "call MAPL_Add" + self.category.capitalize() + "Spec(gc," + self.continue_line()
for option in MAPL_DataSpec.all_options:
text = text + self.emit_arg(option)
text = text + 'rc=status)' + self.newline()
text = text + MAPL_DataSpec.TERMINATOR + self.newline()
self.indent = self.indent - 5
text = text + 'VERIFY_(status)'
return text

def emit_arg(self, option):
Expand All @@ -143,7 +169,7 @@ def emit_arg(self, option):
value = MAPL_DataSpec.entry_aliases[option][self.args[option]]
else:
value = self.args[option]
text = text + value + ", " + self.continue_line()
text = text + value + MAPL_DataSpec.DELIMITER + self.continue_line()
return text

def emit_trailer(self, nullify=False):
Expand All @@ -160,9 +186,6 @@ def emit_trailer(self, nullify=False):
return text





def read_specs(specs_filename):

def csv_record_reader(csv_reader):
Expand All @@ -184,19 +207,22 @@ def dataframe(reader, columns):
df.append(dict(zip(columns, row)))
return df

# Python is case sensitive, so dict lookups are case sensitive.
# The column names are Fortran identifiers, which are case insensitive.
# So all lookups in the dict below should be converted to lowercase.
# Aliases must be lowercase.
column_aliases = {
'NAME' : 'short_name',
'LONG NAME' : 'long_name',
'VLOC' : 'vlocation',
'UNITS' : 'units',
'DIMS' : 'dims',
'UNGRIDDED' : 'ungridded_dims',
'PREC' : 'precision',
'COND' : 'condition',
'DEFAULT' : 'default',
'RESTART' : 'restart',
'FRIENDLYTO' : 'friendlyto',
'ADD2EXPORT' : 'add2export'
'name' : 'short_name',
'long name' : 'long_name',
'vloc' : 'vlocation',
'ungridded' : 'ungridded_dims',
'ungrid' : 'ungridded_dims',
'prec' : 'precision',
'cond' : 'condition',
'friend2' : 'friendlyto',
'addexp' : 'add2export',
'numsubs ' : 'num_subtiles,',
'avint' : 'averaging_interval'
}

specs = {}
Expand All @@ -205,7 +231,6 @@ def dataframe(reader, columns):
gen = csv_record_reader(specs_reader)
schema_version = next(gen)[0].split(' ')[1]
component = next(gen)[0].split(' ')[1]
# print("Generating specification code for component: ",component)
while True:
try:
gen = csv_record_reader(specs_reader)
Expand All @@ -214,17 +239,33 @@ def dataframe(reader, columns):
bare_columns = [c.strip() for c in bare_columns]
columns = []
for c in bare_columns:
if c in column_aliases:
columns.append(column_aliases[c])
else:
columns.append(c)
columns.append(getifin(column_aliases, c))
specs[category] = dataframe(gen, columns)
except StopIteration:
break

return specs


def getifin(dictionary, key):
""" Return dictionary[key.lower()] if key.lower() in dictionary else key """
return dictionary[key.lower()] if key.lower() in dictionary else key.lower()

def get_fortran_logical(value_in):
""" Return string representing Fortran logical from an input string """
""" representing a logical value into """
TRUE_VALUE = '.true.'
FALSE_VALUE = '.false.'
TRUE_VALUES = {TRUE_VALUE, 't', 'true', '.t.', 'yes', 'y', 'si', 'oui', 'sim'}
FALSE_VALUES = {FALSE_VALUE, 'f', 'false', '.f.', 'no', 'n', 'no', 'non', 'nao'}
if value_in is None:
sys.exit("'None' is not valid for get_fortran_logical.")
if value_in.strip().lower() in TRUE_VALUES:
val_out = TRUE_VALUE
elif value_in.strip().lower() in FALSE_VALUES:
val_out = FALSE_VALUE
else:
sys.exit("Unrecognized logical: " + value_in)
return val_out

def header():
"""
Expand All @@ -249,8 +290,6 @@ def open_with_header(filename):
f.write(header())
return f



#############################################
# Main program begins here
#############################################
Expand All @@ -276,11 +315,11 @@ def open_with_header(filename):
help="override default output filename for get_pointer() code")
parser.add_argument("-d", "--declare-pointers", action="store", nargs='?',
const="{component}_DeclarePointer___.h", default=None,
help="override default output filename for AddSpec code")
help="override default output filename for pointer declaration code")
args = parser.parse_args()


# Process blocked CSV input file using pandas
# Process blocked CSV input file
specs = read_specs(args.input)

if args.name:
Expand All @@ -292,7 +331,7 @@ def open_with_header(filename):

# open all output files
f_specs = {}
for category in ("IMPORT","EXPORT","INTERNAL"):
for category in CATEGORIES:
option = args.__dict__[category.lower()+"_specs"]
if option:
fname = option.format(component=component)
Expand All @@ -309,16 +348,17 @@ def open_with_header(filename):
else:
f_get_pointers = None

# Generate code from specs (processed above with pandas)
for category in ("IMPORT","EXPORT","INTERNAL"):
for item in specs[category]:
spec = MAPL_DataSpec(category.lower(), item)
if f_specs[category]:
f_specs[category].write(spec.emit_specs())
if f_declare_pointers:
f_declare_pointers.write(spec.emit_declare_pointers())
if f_get_pointers:
f_get_pointers.write(spec.emit_get_pointers())
# Generate code from specs (processed above)
for category in CATEGORIES:
if category in specs:
for item in specs[category]:
spec = MAPL_DataSpec(category.lower(), item)
if f_specs[category]:
f_specs[category].write(spec.emit_specs())
if f_declare_pointers:
f_declare_pointers.write(spec.emit_declare_pointers())
if f_get_pointers:
f_get_pointers.write(spec.emit_get_pointers())

# Close output files
for category, f in list(f_specs.items()):
Expand All @@ -329,7 +369,4 @@ def open_with_header(filename):
if f_get_pointers:
f_get_pointers.close()





sys.exit(SUCCESS)
Loading

0 comments on commit 6d6869a

Please sign in to comment.