diff --git a/fre/__init__.py b/fre/__init__.py index d0903ee0..0e63b165 100644 --- a/fre/__init__.py +++ b/fre/__init__.py @@ -1,7 +1,6 @@ -from fre.fre import * from fre.frecheck import * from fre.frelist import * -from fre.fremake import * +from .make import * from fre.frepp import * from fre.frerun import * from fre.frecatalog import * diff --git a/fre/fre.py b/fre/fre.py index 0869feaa..87cfa0de 100644 --- a/fre/fre.py +++ b/fre/fre.py @@ -13,8 +13,7 @@ from fre import frecheck from fre.frecheck.frecheck import * -from fre import fremake -from fre.fremake.fremake import * +from .make import checkout_create, compile_create, makefile_create, dockerfile_create, fremake_run from fre import frepp from fre.frepp.frepp import * @@ -54,7 +53,7 @@ def freList(): pass @fre.group('make') -def freMake(): +def fremake(): """ - access fre make subcommands """ pass @@ -124,97 +123,187 @@ def mask_atmos_plevel(context, infile, outfile, psfile): """ fre make subcommands to be processed """ -@freMake.command() -@click.option('--uppercase', '-u', is_flag=True, help = 'Print statement in uppercase.') -def checkout(uppercase): - """ - Execute fre make checkout """ - statement = "execute fre make checkout script" - if uppercase: - statement = statement.upper() - click.echo(statement) - -@freMake.command() -@click.option("-y", - "--yamlfile", - type=str, - help="Experiment yaml compile FILE", + +@fremake.command() +@click.option("-y", + "--yamlfile", + type=str, + help="Experiment yaml compile FILE", required=True) # used click.option() instead of click.argument() because we want to have help statements -@click.option("-p", - "--platform", +@click.option("-p", + "--platform", multiple=True, #replaces nargs=-1 since we are using click.option() instead of click.argument() - type=str, + type=str, help="Hardware and software FRE platform space separated list of STRING(s). This sets platform-specific data and instructions", required=True) -@click.option("-t", "--target", +@click.option("-t", "--target", multiple=True, #replaces nargs=-1 since we are using click.option() instead of click.argument() - type=str, - help="FRE target space separated list of STRING(s) that defines compilation settings and linkage directives for experiments. Predefined targets refer to groups of directives that exist in the mkmf template file (referenced in buildDocker.py). Possible predefined targets include 'prod', 'openmp', 'repro', 'debug, 'hdf5'; however 'prod', 'repro', and 'debug' are mutually exclusive (cannot not use more than one of these in the target list). Any number of targets can be used.", + type=str, help="FRE target space separated list of STRING(s) that defines compilation settings and linkage directives for experiments. Predefined targets refer to groups of directives that exist in the mkmf template file (referenced in buildDocker.py). Possible predefined targets include 'prod', 'openmp', 'repro', 'debug, 'hdf5'; however 'prod', 'repro', and 'debug' are mutually exclusive (cannot not use more than one of these in the target list). Any number of targets can be used.", required=True) -@click.option("-f", - "--force-checkout", - is_flag=True, - help="Force checkout to get a fresh checkout to source directory in case the source directory exists") -@click.option("-F", - "--force-compile", - is_flag=True, - help="Force compile to compile a fresh executable in case the executable directory exists") -@click.option("-K", - "--keep-compiled", - is_flag=True, - help="Keep compiled files in the executable directory for future use") -@click.option("--no-link", - is_flag=True, - help="Do not link the executable") -@click.option("-E", - "--execute", - is_flag=True, +@click.option("-e", + "--execute", + is_flag=True, help="Execute all the created scripts in the current session") -@click.option("-n", - "--parallel", +@click.option("-n", + "--parallel", type=int, - metavar='', - default=1, + metavar='', + default=1, help="Number of concurrent model compiles (default 1)") -@click.option("-j", - "--jobs", - type=int, +@click.option("-j", + "--jobs", + type=int, metavar='', - default=4, + default=4, help="Number of jobs to run simultaneously. Used for make -jJOBS and git clone recursive --jobs=JOBS") -@click.option("-npc", - "--no-parallel-checkout", - is_flag=True, +@click.option("-npc", + "--no-parallel-checkout", + is_flag=True, help="Use this option if you do not want a parallel checkout. The default is to have parallel checkouts.") -@click.option("-s", - "--submit", - is_flag=True, +@click.option("-s", + "--submit", + is_flag=True, help="Submit all the created scripts as batch jobs") -@click.option("-v", - "--verbose", - is_flag=True, +@click.option("-v", + "--verbose", + is_flag=True, help="Get verbose messages (repeat the option to increase verbosity level)") -@click.option("-w", - "--walltime", +@click.pass_context +def run_fremake(context, yamlfile, platform, target, execute, parallel, jobs, no_parallel_checkout, submit, verbose): + """ - Perform all fremake functions to run checkout and compile model""" + context.forward(fremake_run) + +#### +@fremake.command() +@click.option("-y", + "--yamlfile", + type=str, + help="Experiment yaml compile FILE", + required=True) # used click.option() instead of click.argument() because we want to have help statements +@click.option("-p", + "--platform", + multiple=True, #replaces nargs=-1 since we are using click.option() instead of click.argument() + type=str, + help="Hardware and software FRE platform space separated list of STRING(s). This sets platform-specific data and instructions", required=True) +@click.option("-t", "--target", + multiple=True, #replaces nargs=-1 since we are using click.option() instead of click.argument() + type=str, + help="FRE target space separated list of STRING(s) that defines compilation settings and linkage directives for experiments. Predefined targets refer to groups of directives that exist in the mkmf template file (referenced in buildDocker.py). Possible predefined targets include 'prod', 'openmp', 'repro', 'debug, 'hdf5'; however 'prod', 'repro', and 'debug' are mutually exclusive (cannot not use more than one of these in the target list). Any number of targets can be used.", + required=True) +@click.option("-j", + "--jobs", + type=int, + metavar='', + default=4, + help="Number of jobs to run simultaneously. Used for make -jJOBS and git clone recursive --jobs=JOBS") +@click.option("-npc", + "--no-parallel-checkout", + is_flag=True, + help="Use this option if you do not want a parallel checkout. The default is to have parallel checkouts.") +@click.option("-e", + "--execute", + is_flag=True, + default=False, + help="Use this to run the created checkout script.") +@click.option("-v", + "--verbose", + is_flag=True, + help="Get verbose messages (repeat the option to increase verbosity level)") +@click.pass_context +def create_checkout(context,yamlfile,platform,target,no_parallel_checkout,jobs,execute,verbose): + """ - Write the checkout script """ + context.forward(checkout_create) + +##### +@fremake.command +@click.option("-y", + "--yamlfile", + type=str, + help="Experiment yaml compile FILE", + required=True) # used click.option() instead of click.argument() because we want to have help statements +@click.option("-p", + "--platform", + multiple=True, #replaces nargs=-1 since we are using click.option() instead of click.argument() + type=str, + help="Hardware and software FRE platform space separated list of STRING(s). This sets platform-specific data and instructions", required=True) +@click.option("-t", "--target", + multiple=True, #replaces nargs=-1 since we are using click.option() instead of click.argument() + type=str, + help="FRE target space separated list of STRING(s) that defines compilation settings and linkage directives for experiments. Predefined targets refer to groups of directives that exist in the mkmf template file (referenced in buildDocker.py). Possible predefined targets include 'prod', 'openmp', 'repro', 'debug, 'hdf5'; however 'prod', 'repro', and 'debug' are mutually exclusive (cannot not use more than one of these in the target list). Any number of targets can be used.", + required=True) +@click.pass_context +def create_makefile(context,yamlfile,platform,target): + """ - Write the makefile """ + context.forward(makefile_create) + +##### + +@fremake.command +@click.option("-y", + "--yamlfile", + type=str, + help="Experiment yaml compile FILE", + required=True) # used click.option() instead of click.argument() because we want to have help statements +@click.option("-p", + "--platform", + multiple=True, #replaces nargs=-1 since we are using click.option() instead of click.argument() + type=str, + help="Hardware and software FRE platform space separated list of STRING(s). This sets platform-specific data and instructions", required=True) +@click.option("-t", "--target", + multiple=True, #replaces nargs=-1 since we are using click.option() instead of click.argument() + type=str, + help="FRE target space separated list of STRING(s) that defines compilation settings and linkage directives for experiments. Predefined targets refer to groups of directives that exist in the mkmf template file (referenced in buildDocker.py). Possible predefined targets include 'prod', 'openmp', 'repro', 'debug, 'hdf5'; however 'prod', 'repro', and 'debug' are mutually exclusive (cannot not use more than one of these in the target list). Any number of targets can be used.", + required=True) +@click.option("-j", + "--jobs", + type=int, + metavar='', + default=4, + help="Number of jobs to run simultaneously. Used for make -jJOBS and git clone recursive --jobs=JOBS") +@click.option("-n", + "--parallel", type=int, - metavar='', - help="Maximum wall time NUM (in minutes) to use") -@click.option("--mail-list", - type=str, - help="Email the comma-separated STRING list of emails rather than $USER@noaa.gov") + metavar='', default=1, + help="Number of concurrent model compiles (default 1)") +@click.option("-e", + "--execute", + is_flag=True, + default=False, + help="Use this to run the created checkout script.") +@click.option("-v", + "--verbose", + is_flag=True, + help="Get verbose messages (repeat the option to increase verbosity level)") +@click.pass_context +def create_compile(context,yamlfile,platform,target,jobs,parallel,execute,verbose): + """ - Write the compile script """ + context.forward(compile_create) + +##### + +@fremake.command +@click.option("-y", + "--yamlfile", + type=str, + help="Experiment yaml compile FILE", + required=True) # used click.option() instead of click.argument() because we want to have help statements +@click.option("-p", + "--platform", + multiple=True, #replaces nargs=-1 since we are using click.option() instead of click.argument() + type=str, + help="Hardware and software FRE platform space separated list of STRING(s). This sets platform-specific data and instructions", required=True) +@click.option("-t", "--target", + multiple=True, #replaces nargs=-1 since we are using click.option() instead of click.argument() + type=str, + help="FRE target space separated list of STRING(s) that defines compilation settings and linkage directives for experiments. Predefined targets refer to groups of directives that exist in the mkmf template file (referenced in buildDocker.py). Possible predefined targets include 'prod', 'openmp', 'repro', 'debug, 'hdf5'; however 'prod', 'repro', and 'debug' are mutually exclusive (cannot not use more than one of these in the target list). Any number of targets can be used.", + required=True) +@click.option("-e", + "--execute", + is_flag=True, + help="Build Dockerfile that has been generated by create-docker.") @click.pass_context -def fremakefunction(context, yamlfile, platform, target, force_checkout, force_compile, keep_compiled, no_link, execute, parallel, jobs, no_parallel_checkout, submit, verbose, walltime, mail_list): - """ - Execute fre make func """ - context.forward(fremake.fremake.fremake) - -# # this is the command that will execute all of `fre make`, but I need to test whether it will be able to pass specific flags to different areas when it they each have different flags -# @freMake.command() -# @click.option('--uppercase', '-u', is_flag=True, help = 'Print statement in uppercase.') -# @click.pass_context -# def executeAll(context, uppercase): -# """ - Execute all commands under fre make""" -# context.forward(checkout) -# context.forward(compile) -# context.forward(container) -# context.forward(list) +def create_dockerfile(context,yamlfile,platform,target,execute): + """ - Write the dockerfile """ + context.forward(dockerfile_create) ############################################# @@ -376,6 +465,7 @@ def install(context, experiment, platform, target): type=str, help="YAML file to be used for parsing", required=True) + @click.pass_context def configure(context,yamlfile,experiment,platform,target): """ - Execute fre pp configure """ diff --git a/fre/fremake/__init__.py b/fre/fremake/__init__.py deleted file mode 100644 index a94ac6b1..00000000 --- a/fre/fremake/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from fre.fremake import * diff --git a/fre/fremake/fremake.py b/fre/fremake/fremake.py deleted file mode 100644 index 0ed40439..00000000 --- a/fre/fremake/fremake.py +++ /dev/null @@ -1,119 +0,0 @@ -#!/usr/bin/python3 -## \date 2023 -## \author Tom Robinson -## \author Dana Singh -## \author Bennett Chang -## \description Script for fremake is used to create and run a code checkout script and compile a model. - -# import subprocess -# import os -# import yaml -# import argparse -# import logging -# import targetfre -# import varsfre -# import yamlfre -# import checkout -# import makefilefre -# import buildDocker -# import buildBaremetal -# from multiprocessing.dummy import Pool -import click - -@click.group() -def make(): - pass - -@make.command() -@click.option("-y", - "--yamlfile", - type=str, - help="Experiment yaml compile FILE", - required=True) # used click.option() instead of click.argument() because we want to have help statements -@click.option("-p", - "--platform", - multiple=True, #replaces nargs=-1 since we are using click.option() instead of click.argument() - type=str, - help="Hardware and software FRE platform space separated list of STRING(s). This sets platform-specific data and instructions", required=True) -@click.option("-t", "--target", - multiple=True, #replaces nargs=-1 since we are using click.option() instead of click.argument() - type=str, - help="FRE target space separated list of STRING(s) that defines compilation settings and linkage directives for experiments. Predefined targets refer to groups of directives that exist in the mkmf template file (referenced in buildDocker.py). Possible predefined targets include 'prod', 'openmp', 'repro', 'debug, 'hdf5'; however 'prod', 'repro', and 'debug' are mutually exclusive (cannot not use more than one of these in the target list). Any number of targets can be used.", - required=True) -@click.option("-f", - "--force-checkout", - is_flag=True, - help="Force checkout to get a fresh checkout to source directory in case the source directory exists") -@click.option("-F", - "--force-compile", - is_flag=True, - help="Force compile to compile a fresh executable in case the executable directory exists") -@click.option("-K", - "--keep-compiled", - is_flag=True, - help="Keep compiled files in the executable directory for future use") -@click.option("--no-link", - is_flag=True, - help="Do not link the executable") -@click.option("-E", - "--execute", - is_flag=True, - help="Execute all the created scripts in the current session") -@click.option("-n", - "--parallel", - type=int, - metavar='', - default=1, - help="Number of concurrent model compiles (default 1)") -@click.option("-j", - "--jobs", - type=int, - metavar='', - default=4, - help="Number of jobs to run simultaneously. Used for make -jJOBS and git clone recursive --jobs=JOBS") -@click.option("-npc", - "--no-parallel-checkout", - is_flag=True, - help="Use this option if you do not want a parallel checkout. The default is to have parallel checkouts.") -@click.option("-s", - "--submit", - is_flag=True, - help="Submit all the created scripts as batch jobs") -@click.option("-v", - "--verbose", - is_flag=True, - help="Get verbose messages (repeat the option to increase verbosity level)") -@click.option("-w", - "--walltime", - type=int, - metavar='', - help="Maximum wall time NUM (in minutes) to use") -@click.option("--mail-list", - type=str, - help="Email the comma-separated STRING list of emails rather than $USER@noaa.gov") -def fremake(yamlfile, platform, target, force_checkout, force_compile, keep_compiled, no_link, execute, parallel, jobs, no_parallel_checkout, submit, verbose, walltime, mail_list): - """ - Fremake is used to create a code checkout script to compile models for FRE experiments. - """ - - # Insert Actual Code - - yml = yamlfile - ps = platform - ts = target - nparallel = parallel - jobs = str(jobs) - pcheck = no_parallel_checkout - - if pcheck: - pc = "" - else: - pc = " &" - - print("End of function") - print(yml) - print(ps) - print(ts) - -if __name__ == "__main__": - make() \ No newline at end of file diff --git a/fre/make/README-fremake.md b/fre/make/README-fremake.md new file mode 100644 index 00000000..22c8d757 --- /dev/null +++ b/fre/make/README-fremake.md @@ -0,0 +1,20 @@ +# **Fremake Canopy** + +Through the fre-cli, `fre make` can be used to create and run a code checkout script and compile a model. + +## **Usage (Users)** + +* Refer to fre-cli README.md for foundational fre-cli usage guide and tips. +* Fremake package repository located at: https://gitlab.gfdl.noaa.gov/portable_climate/fremake_canopy/-/tree/main + +### **Subtools Guide** + +1) **fre make** + - configure + - Postprocessing yaml configuration + - Minimal Syntax: `fre pp configure -y [user-edit yaml file]` + - Module(s) needed: n/a + - Example: `fre pp configure -y /home/$user/pp/ue2/user-edits/edits.yaml` + + +Currently, running fre make create checkout creates 'test' directory and runs correctly if ran with the '-e' flag on initial call. However, if it's already been ran but with the '-e' flag, meaning that the testdir is already created, running it again with '-e' doesnt' seem to run the checkout script. diff --git a/fre/make/__init__.py b/fre/make/__init__.py new file mode 100644 index 00000000..ce6b7065 --- /dev/null +++ b/fre/make/__init__.py @@ -0,0 +1,12 @@ +from .createCheckout import checkout_create +from .createCompile import compile_create +from .createDocker import dockerfile_create +from .createMakefile import makefile_create +from .runFremake import fremake_run + +__all__ = ["checkout_create", + "compile_create", + "dockerfile_create", + "makefile_create", + "fremake_run"] + diff --git a/fre/make/createCheckout.py b/fre/make/createCheckout.py new file mode 100644 index 00000000..8ee2ec4c --- /dev/null +++ b/fre/make/createCheckout.py @@ -0,0 +1,90 @@ +#!/usr/bin/python3 + +from .gfdlfremake import varsfre, platformfre, yamlfre, checkout, targetfre +import click +import os +import logging +import sys + +@click.command() +def checkout_create(yamlfile,platform,target,no_parallel_checkout,jobs,execute,verbose): + # Define variables + yml = yamlfile + ps = platform + ts = target + run = execute + jobs = str(jobs) + pcheck = no_parallel_checkout + + if pcheck: + pc = "" + else: + pc = " &" + + if verbose: + logging.basicConfig(level=logging.INFO) + else: + logging.basicConfig(level=logging.ERROR) + + srcDir="src" + checkoutScriptName = "checkout.sh" + baremetalRun = False # This is needed if there are no bare metal runs + + ## Split and store the platforms and targets in a list + plist = platform + tlist = target + + ## Get the variables in the model yaml + freVars = varsfre.frevars(yml) + + ## Open the yaml file and parse as fremakeYaml + modelYaml = yamlfre.freyaml(yml,freVars) + fremakeYaml = modelYaml.getCompileYaml() + + ## Error checking the targets + for targetName in tlist: + target = targetfre.fretarget(targetName) + + ## Loop through the platforms specified on the command line + ## If the platform is a baremetal platform, write the checkout script and run it once + ## This should be done separately and serially because bare metal platforms should all be using + ## the same source code. + for platformName in plist: + if modelYaml.platforms.hasPlatform(platformName): + pass + else: + raise SystemExit (platformName + " does not exist in " + modelYaml.platformsfile) + (compiler,modules,modulesInit,fc,cc,modelRoot,iscontainer,mkTemplate,containerBuild,ContainerRun,RUNenv)=modelYaml.platforms.getPlatformFromName(platformName) + + ## Create the source directory for the platform + if iscontainer == False: + srcDir = modelRoot + "/" + fremakeYaml["experiment"] + "/src" + if not os.path.exists(srcDir): + os.system("mkdir -p " + srcDir) + #create checkout script: + #if checkout script exists, it is removed and created again + #if checkout script does not exist, it is created + freCheckout = checkout.checkout("checkout.sh",srcDir) + freCheckout.writeCheckout(modelYaml.compile.getCompileYaml(),jobs,pc) + freCheckout.finish(pc) + # Run the checkout script + if run: + freCheckout.run() + else: + sys.exit() + + + click.echo("\nCheckout script created at " + srcDir + "/checkout.sh" + "\n") + else: + ## Run the checkout script + image="ecpe4s/noaa-intel-prototype:2023.09.25" + bldDir = modelRoot + "/" + fremakeYaml["experiment"] + "/exec" + tmpDir = "tmp/"+platformName + freCheckout = checkout.checkoutForContainer("checkout.sh", srcDir, tmpDir) + freCheckout.writeCheckout(modelYaml.compile.getCompileYaml(),jobs,pc) + freCheckout.finish(pc) + click.echo("\nCheckout script created at " + srcDir + "/checkout.sh" + "\n") + + +if __name__ == "__main__": + checkout_create() diff --git a/fre/make/createCompile.py b/fre/make/createCompile.py new file mode 100644 index 00000000..7faf979c --- /dev/null +++ b/fre/make/createCompile.py @@ -0,0 +1,86 @@ +#!/usr/bin/python3 + +from .gfdlfremake import varsfre, platformfre, yamlfre, targetfre, buildBaremetal +from multiprocessing.dummy import Pool +import logging +import os +import click +import sys + +@click.command() +def compile_create(yamlfile,platform,target,jobs,parallel,execute,verbose): + # Define variables + yml = yamlfile + ps = platform + ts = target + nparallel = parallel + jobs = str(jobs) + run = execute + + if verbose: + logging.basicCOnfig(level=logging.INFO) + else: + logging.basicConfig(level=logging.ERROR) + + srcDir="src" + checkoutScriptName = "checkout.sh" + baremetalRun = False # This is needed if there are no bare metal runs + + ## Split and store the platforms and targets in a list + plist = platform + tlist = target + + ## Get the variables in the model yaml + freVars = varsfre.frevars(yml) + + ## Open the yaml file and parse as fremakeYaml + modelYaml = yamlfre.freyaml(yml,freVars) + fremakeYaml = modelYaml.getCompileYaml() + + ## Error checking the targets + for targetName in tlist: + target = targetfre.fretarget(targetName) + + fremakeBuildList = [] + ## Loop through platforms and targets + for platformName in plist: + for targetName in tlist: + target = targetfre.fretarget(targetName) + if modelYaml.platforms.hasPlatform(platformName): + pass + else: + raise SystemExit (platformName + " does not exist in " + modelYaml.platformsfile) + (compiler,modules,modulesInit,fc,cc,modelRoot,iscontainer,mkTemplate,containerBuild,ContainerRun,RUNenv)=modelYaml.platforms.getPlatformFromName(platformName) + ## Make the bldDir based on the modelRoot, the platform, and the target + srcDir = modelRoot + "/" + fremakeYaml["experiment"] + "/src" + ## Check for type of build + if iscontainer == False: + baremetalRun = True + bldDir = modelRoot + "/" + fremakeYaml["experiment"] + "/" + platformName + "-" + target.gettargetName() + "/exec" + os.system("mkdir -p " + bldDir) + ## Create a list of compile scripts to run in parallel + fremakeBuild = buildBaremetal.buildBaremetal(exp = fremakeYaml["experiment"], + mkTemplatePath = mkTemplate, + srcDir = srcDir, + bldDir = bldDir, + target = target, + modules = modules, + modulesInit = modulesInit, + jobs = jobs) + for c in fremakeYaml['src']: + fremakeBuild.writeBuildComponents(c) + fremakeBuild.writeScript() + fremakeBuildList.append(fremakeBuild) + click.echo("\nCompile script created at " + bldDir + "/compile.sh" + "\n") + if run: + #print("ITS GONNA RUN") + if baremetalRun: + pool = Pool(processes=nparallel) # Create a multiprocessing Pool + pool.map(buildBaremetal.fremake_parallel,fremakeBuildList) # process data_inputs iterable with pool +# else: +# fremakeBuild.run() + else: + sys.exit() + +if __name__ == "__main__": + compile_create() diff --git a/fre/make/createDocker.py b/fre/make/createDocker.py new file mode 100644 index 00000000..94de8022 --- /dev/null +++ b/fre/make/createDocker.py @@ -0,0 +1,79 @@ +#!/usr/bin/python3 + +from .gfdlfremake import varsfre, targetfre, makefilefre, platformfre, yamlfre, buildDocker +import click +import os +import sys + +@click.command() +def dockerfile_create(yamlfile, platform, target, execute): + srcDir="src" + checkoutScriptName = "checkout.sh" + baremetalRun = False # This is needed if there are no bare metal runs + ## Split and store the platforms and targets in a list + plist = platform + tlist = target + yml = yamlfile + run = execute + + ## Get the variables in the model yaml + freVars = varsfre.frevars(yml) + ## Open the yaml file and parse as fremakeYaml + modelYaml = yamlfre.freyaml(yml,freVars) + fremakeYaml = modelYaml.getCompileYaml() + + fremakeBuildList = [] + ## Loop through platforms and targets + for platformName in plist: + for targetName in tlist: + targetObject = targetfre.fretarget(targetName) + if modelYaml.platforms.hasPlatform(platformName): + pass + else: + raise SystemExit (platformName + " does not exist in " + modelYaml.platformsfile) + + (compiler,modules,modulesInit,fc,cc,modelRoot,iscontainer,mkTemplate,containerBuild,containerRun,RUNenv)=modelYaml.platforms.getPlatformFromName(platformName) + + ## Make the bldDir based on the modelRoot, the platform, and the target + srcDir = modelRoot + "/" + fremakeYaml["experiment"] + "/src" + ## Check for type of build + if iscontainer == True: + image="ecpe4s/noaa-intel-prototype:2023.09.25" + bldDir = modelRoot + "/" + fremakeYaml["experiment"] + "/exec" + tmpDir = "tmp/"+platformName + + freMakefile = makefilefre.makefileContainer(exp = fremakeYaml["experiment"], + libs = fremakeYaml["container_addlibs"], + srcDir = srcDir, + bldDir = bldDir, + mkTemplatePath = mkTemplate, + tmpDir = tmpDir) + + # Loop through components and send the component name and requires for the Makefile + for c in fremakeYaml['src']: + freMakefile.addComponent(c['component'],c['requires'],c['makeOverrides']) + freMakefile.writeMakefile() + + dockerBuild = buildDocker.container(base = image, + exp = fremakeYaml["experiment"], + libs = fremakeYaml["container_addlibs"], + RUNenv = RUNenv, + target = targetObject) + dockerBuild.writeDockerfileCheckout("checkout.sh", tmpDir+"/checkout.sh") + dockerBuild.writeDockerfileMakefile(freMakefile.getTmpDir() + "/Makefile", freMakefile.getTmpDir()+"/linkline.sh") + for c in fremakeYaml['src']: + dockerBuild.writeDockerfileMkmf(c) + + dockerBuild.writeRunscript(RUNenv,containerRun,tmpDir+"/execrunscript.sh") + + currDir = os.getcwd() + click.echo("\ntmpDir created at " + currDir + "/tmp") + click.echo("Dockerfile created at " + currDir + "\n") + + if run: + dockerBuild.build(containerBuild, containerRun) + else: + sys.exit() + +if __name__ == "__main__": + dockerfile_create() diff --git a/fre/make/createMakefile.py b/fre/make/createMakefile.py new file mode 100644 index 00000000..b5335047 --- /dev/null +++ b/fre/make/createMakefile.py @@ -0,0 +1,71 @@ +#!/usr/bin/python3 + +from .gfdlfremake import makefilefre, varsfre, targetfre, yamlfre +import click +import os +import logging + +@click.command() +def makefile_create(yamlfile,platform,target): + srcDir="src" + checkoutScriptName = "checkout.sh" + baremetalRun = False # This is needed if there are no bare metal runs + ## Split and store the platforms and targets in a list + plist = platform + tlist = target + yml = yamlfile + + + ## Get the variables in the model yaml + freVars = varsfre.frevars(yml) + ## Open the yaml file and parse as fremakeYaml + modelYaml = yamlfre.freyaml(yml,freVars) + fremakeYaml = modelYaml.getCompileYaml() + + fremakeBuildList = [] + ## Loop through platforms and targets + for platformName in plist: + for targetName in tlist: + targetObject = targetfre.fretarget(targetName) + if modelYaml.platforms.hasPlatform(platformName): + pass + else: + raise SystemExit (platformName + " does not exist in " + modelYaml.platformsfile) + (compiler,modules,modulesInit,fc,cc,modelRoot,iscontainer,mkTemplate,containerBuild,ContainerRun,RUNenv)=modelYaml.platforms.getPlatformFromName(platformName) + ## Make the bldDir based on the modelRoot, the platform, and the target + srcDir = modelRoot + "/" + fremakeYaml["experiment"] + "/src" + ## Check for type of build + if iscontainer == False: + baremetalRun = True + bldDir = modelRoot + "/" + fremakeYaml["experiment"] + "/" + platformName + "-" + targetObject.gettargetName() + "/exec" + os.system("mkdir -p " + bldDir) + ## Create the Makefile + freMakefile = makefilefre.makefile(exp = fremakeYaml["experiment"], + libs = fremakeYaml["baremetal_linkerflags"], + srcDir = srcDir, + bldDir = bldDir, + mkTemplatePath = mkTemplate) + # Loop through components and send the component name, requires, and overrides for the Makefile + for c in fremakeYaml['src']: + freMakefile.addComponent(c['component'],c['requires'],c['makeOverrides']) + freMakefile.writeMakefile() + click.echo("\nMakefile created at " + bldDir + "/Makefile" + "\n") + else: + image="ecpe4s/noaa-intel-prototype:2023.09.25" + bldDir = modelRoot + "/" + fremakeYaml["experiment"] + "/exec" + tmpDir = "tmp/"+platformName + freMakefile = makefilefre.makefileContainer(exp = fremakeYaml["experiment"], + libs = fremakeYaml["container_addlibs"], + srcDir = srcDir, + bldDir = bldDir, + mkTemplatePath = mkTemplate, + tmpDir = tmpDir) + + # Loop through compenents and send the component name and requires for the Makefile + for c in fremakeYaml['src']: + freMakefile.addComponent(c['component'],c['requires'],c['makeOverrides']) + freMakefile.writeMakefile() + click.echo("\nMakefile created at " + bldDir + "/Makefile" + "\n") + +if __name__ == "__main__": + makefile_create() diff --git a/fre/make/gfdlfremake/__init__.py b/fre/make/gfdlfremake/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/fre/make/gfdlfremake/buildBaremetal.py b/fre/make/gfdlfremake/buildBaremetal.py new file mode 100644 index 00000000..d3be45f0 --- /dev/null +++ b/fre/make/gfdlfremake/buildBaremetal.py @@ -0,0 +1,97 @@ +#!/usr/bin/python3 +## \date 2023 +## \author Tom Robinson +## \email thomas.robinson@noaa.gov +## \description + +import subprocess +import os +from . import targetfre +## \brief Called for parallel execution purposes. Runs the builds. +## \param fremakeBuildList the fremakeBuild object list passes by pool.map +def fremake_parallel(fremakeBuildList): + fremakeBuildList.run() + +class buildBaremetal(): +## \brief Creates the build script to compile the model +## \param self The buildScript object +## \param exp The experiment name +## \param mkTemplatePath The template used by mkmf to compile the model +## \param srcDir The source directory +## \param bldDir The build directory +## \param modules The list of modules to load before compilation +## \param modulesInit A list of commands with new line characters to initialize modules + def __init__(self,exp,mkTemplatePath,srcDir,bldDir,target,modules,modulesInit,jobs): + self.e = exp + self.t = target.gettargetName() + self.src = srcDir + self.bld = bldDir + self.make = "make --jobs="+str(jobs)+" "+target.getmakeline_add() #make line + self.mkmf = True + self.template = mkTemplatePath + self.modules = "" + for m in modules: + self.modules = self.modules +" "+ m +## Set up the top portion of the compile script + self.setup=[ "#!/bin/sh -fx \n", + "bld_dir="+self.bld+"/ \n", + "src_dir="+self.src+"/ \n", + "mkmf_template="+self.template+" \n"] + if self.modules != "": + self.setup.extend(modulesInit) #extend because this is a list + self.setup.append("module load "+self.modules+" \n") # Append because this is a single string +## Create the build directory + os.system("mkdir -p "+self.bld) +## Create the compile script + self.f=open(self.bld+"/compile.sh","w") + self.f.writelines(self.setup) +## \brief Adds components to the build script +## \param self The build script object +## \param c Component from the compile yaml + def writeBuildComponents(self, c): +# Shorthand for component + comp = c["component"] +# Make the component directory + self.f.write("\n mkdir -p $bld_dir/"+comp+"\n") +# Get the paths needed for compiling + pstring = "" + for paths in c["paths"]: + pstring = pstring+"$src_dir/"+paths+" " +# Run list_paths + self.f.write(" list_paths -l -o $bld_dir/"+comp+"/pathnames_"+comp+" "+pstring+"\n") + self.f.write(" cd $bld_dir/"+comp+"\n") +# Create the mkmf line + if c["requires"] == [] and c["doF90Cpp"]: # If this lib doesnt have any code dependencies and it requires the preprocessor (no -o and yes --use-cpp) + self.f.write(" mkmf -m Makefile -a $src_dir -b $bld_dir -p lib"+comp+".a -t $mkmf_template --use-cpp -c \""+c["cppdefs"]+"\" "+c["otherFlags"]+" $bld_dir/"+comp+"/pathnames_"+comp+" \n") + elif c["requires"] == []: # If this lib doesnt have any code dependencies (no -o) + self.f.write(" mkmf -m Makefile -a $src_dir -b $bld_dir -p lib"+comp+".a -t $mkmf_template -c \""+c["cppdefs"]+"\" "+c["otherFlags"]+" $bld_dir/"+comp+"/pathnames_"+comp+" \n") + else: #Has requirements +#Set up the requirements as a string to inclue after the -o + reqstring = "" + for r in c["requires"]: + reqstring = reqstring+"-I$bld_dir/"+r+" " +#Figure out if we need the preprocessor + if c["doF90Cpp"]: + self.f.write(" mkmf -m Makefile -a $src_dir -b $bld_dir -p lib"+comp+".a -t $mkmf_template --use-cpp -c \""+c["cppdefs"]+"\" -o \""+reqstring+"\" "+c["otherFlags"]+" $bld_dir/"+comp+"/pathnames_"+comp+" \n") + else: + self.f.write(" mkmf -m Makefile -a $src_dir -b $bld_dir -p lib"+comp+".a -t $mkmf_template -c \""+c["cppdefs"]+"\" -o \""+reqstring+"\" "+c["otherFlags"]+" $bld_dir/"+comp+"/pathnames_"+comp+" \n") +## Finishes and writes the build script +## \param self The buildScript object +##TODO: add targets input + def writeScript(self): + self.f.write("cd "+self.bld+"\n") + self.f.write(self.make+"\n") + self.f.close() +## Run the build script +## \param self The dockerfile object +## TODO run as a batch job on the login cluster + def run(self): +###### TODO make the Makefile + os.chmod(self.bld+"/compile.sh", 0o744) + command = [self.bld+"/compile.sh","|","tee",self.bld+"/log.compile"] + try: + subprocess.run(args=command, check=True) + except: + print("There was an error running "+self.bld+"/compile.sh") + raise + diff --git a/fre/make/gfdlfremake/buildDocker.py b/fre/make/gfdlfremake/buildDocker.py new file mode 100644 index 00000000..92f418b0 --- /dev/null +++ b/fre/make/gfdlfremake/buildDocker.py @@ -0,0 +1,153 @@ +#!/usr/bin/python3 +## \date 2023 +## \author Tom Robinson +## \email thomas.robinson@noaa.gov +## \description + +import os +from . import targetfre + +class container(): +## \brief Opens the Dockerfile for writing +## \param self The dockerfile object +## \param base The docker base image to start from +## \param libs Additional libraries defined by user +## \param exp The experiment name +## \param RUNenv The commands that have to be run at the beginning of a RUN in the dockerfile +## to set up the environment + def __init__(self,base,exp,libs,RUNenv,target): + self.base = base + self.e = exp + self.l = libs + self.src = "/apps/"+self.e+"/src" + self.bld = "/apps/"+self.e+"/exec" + self.mkmf = True + self.target = target + self.template = "/apps/mkmf/templates/hpcme-intel21.mk" + if RUNenv == "": + self.setup = ["RUN \\ \n"] + else: + self.setup = ["RUN "+RUNenv[0]+" \\ \n"] + self.setup + for env in RUNenv[1:]: + self.setup.append(" && "+env+" \\ \n") + if self.l: + for l in self.l: + self.setup.append(" && spack load "+l+" \\ \n") + self.mkmfclone=["RUN cd /apps \\ \n", + " && git clone --recursive https://github.com/NOAA-GFDL/mkmf \\ \n", + " && cp mkmf/bin/* /usr/local/bin \n"] + self.bldsetup=["RUN bld_dir="+self.bld+" \\ \n", + " && src_dir="+self.src+" \\ \n", + " && mkmf_template="+self.template+ " \\ \n"] + self.d=open("Dockerfile","w") + self.d.writelines("FROM "+self.base+" \n") +## \brief writes to the checkout part of the Dockerfile and sets up the compile +## \param self The dockerfile object +## \param cScriptName The name of the checkout script in the container +## \param cOnDisk The relative path to the checkout script on disk + def writeDockerfileCheckout(self, cScriptName, cOnDisk): + self.checkoutPath = "/apps/"+self.e+"/src/"+ cScriptName + self.d.write("COPY " + cOnDisk +" "+ self.checkoutPath +" \n") + self.d.write("RUN chmod 744 /apps/"+self.e+"/src/checkout.sh \n") + self.d.writelines(self.setup) + self.d.write(" && /apps/"+self.e+"/src/checkout.sh \n") +# Clone mkmf + self.d.writelines(self.mkmfclone) +## Copies the Makefile into the bldDir in the dockerfile +## \param self The dockerfile object +## \param makefileOnDiskPath The path to Makefile on the local disk +## \param linklineonDiskPath The path to the link line script on the local disk + def writeDockerfileMakefile(self, makefileOnDiskPath, linklineonDiskPath): + # Set up the bldDir + # If no additional libraries defined + if self.l == None: + self.bldCreate=["RUN mkdir -p "+self.bld+" \n", + "COPY "+ makefileOnDiskPath +" "+self.bld+"/Makefile \n"] + self.d.writelines(self.bldCreate) + # If additional libraries defined + if self.l != None: + self.bldCreate=["RUN mkdir -p "+self.bld+" \n", + "COPY "+ makefileOnDiskPath +" "+self.bld+"/Makefile \n", + "RUN chmod +rw "+self.bld+"/Makefile \n", + "COPY "+ linklineonDiskPath +" "+self.bld+"/linkline.sh \n", + "RUN chmod 744 "+self.bld+"/linkline.sh \n"] + self.d.writelines(self.bldCreate) + self.d.writelines(self.setup) + self.d.write(" && "+self.bld+"/linkline.sh \n") + +## \brief Adds components to the build part of the Dockerfile +## \param self The dockerfile object +## \param c Component from the compile yaml + def writeDockerfileMkmf(self, c): +# Set up the compile variables + self.d.writelines(self.bldsetup) +# Shorthand for component + comp = c["component"] +# Make the component directory + self.d.write(" && mkdir -p $bld_dir/"+comp+" \\ \n") +# Get the paths needed for compiling + pstring = "" + for paths in c["paths"]: + pstring = pstring+"$src_dir/"+paths+" " +# Run list_paths + self.d.write(" && list_paths -l -o $bld_dir/"+comp+"/pathnames_"+comp+" "+pstring+" \\ \n") + self.d.write(" && cd $bld_dir/"+comp+" \\ \n") +# Create the mkmf line + if c["requires"] == [] and c["doF90Cpp"]: # If this lib doesnt have any code dependencies and it requires the preprocessor (no -o and yes --use-cpp) + self.d.write(" && mkmf -m Makefile -a $src_dir -b $bld_dir -p lib"+comp+".a -t $mkmf_template --use-cpp -c \""+c["cppdefs"]+"\" "+c["otherFlags"]+" $bld_dir/"+comp+"/pathnames_"+comp+" \n") + elif c["requires"] == []: # If this lib doesnt have any code dependencies (no -o) + self.d.write(" && mkmf -m Makefile -a $src_dir -b $bld_dir -p lib"+comp+".a -t $mkmf_template -c \""+c["cppdefs"]+"\" "+c["otherFlags"]+" $bld_dir/"+comp+"/pathnames_"+comp+" \n") + else: #Has requirements +#Set up the requirements as a string to inclue after the -o + reqstring = "" + for r in c["requires"]: + reqstring = reqstring+"-I$bld_dir/"+r+" " +#Figure out if we need the preprocessor + if c["doF90Cpp"]: + self.d.write(" && mkmf -m Makefile -a $src_dir -b $bld_dir -p lib"+comp+".a -t $mkmf_template --use-cpp -c \""+c["cppdefs"]+"\" -o \""+reqstring+"\" "+c["otherFlags"]+" $bld_dir/"+comp+"/pathnames_"+comp+" \n") + else: + self.d.write(" && mkmf -m Makefile -a $src_dir -b $bld_dir -p lib"+comp+".a -t $mkmf_template -c \""+c["cppdefs"]+"\" -o \""+reqstring+"\" "+c["otherFlags"]+" $bld_dir/"+comp+"/pathnames_"+comp+" \n") + +## \brief Writes a runscript to set up spack loads/environment in order to run the executable in the container; runscript copied into container +## \param self The dockerfile object +## \param RUNEnv The commands that have to be run at the beginning of a RUN in the dockerfile +## \param containerRun The container platform used with `exec` to run the container; apptainer or singularity used +## \param runOnDisk The path to the run script on the local disk + def writeRunscript(self,RUNenv,containerRun,runOnDisk): + #create runscript in tmp - create spack environment, install necessary packages, + self.createscript = ["#!/bin/bash \n", + "# Set up spack loads\n", + RUNenv[0]+"\n"] + with open(runOnDisk,"w") as f: + f.writelines(self.createscript) + f.write("# Load spack packages\n") + for env in RUNenv[1:]: + f.write(env+"\n") + + if self.l: + for l in self.l: + self.spackloads = "spack load "+l+"\n" + f.write(self.spackloads) + + f.write("# Run executable\n") + f.write(self.bld+"/"+self.e+".x\n") + #copy runscript into container in dockerfile + self.d.write("COPY "+runOnDisk+" "+self.bld+"/execrunscript.sh\n") + #make runscript executable + self.d.write("RUN chmod 744 "+self.bld+"/execrunscript.sh\n") + #finish the dockerfile + self.d.writelines(self.setup) + self.d.write(" && cd "+self.bld+" && make -j 4 "+self.target.getmakeline_add()+"\n") + self.d.write('ENTRYPOINT ["/bin/bash"]') + self.d.close() + +## Builds the container image for the model +## \param self The dockerfile object +## \param containerBuild The tool used to build the container; docker or podman used +## \param containerRun The container platform used with `exec` to run the container; apptainer or singularity used + def build(self,containerBuild,containerRun): + os.system(containerBuild+" build -f Dockerfile -t "+self.e+":"+self.target.gettargetName()) + os.system("rm -f "+self.e+".tar "+self.e+".sif") + os.system(containerBuild+" save -o "+self.e+"-"+self.target.gettargetName()+".tar localhost/"+self.e+":"+self.target.gettargetName()) + os.system(containerRun+" build --disable-cache "+self.e+"-"+self.target.gettargetName()+".sif docker-archive://"+self.e+"-"+self.target.gettargetName()+".tar") diff --git a/fre/make/gfdlfremake/checkout.py b/fre/make/gfdlfremake/checkout.py new file mode 100644 index 00000000..d1a1c7e3 --- /dev/null +++ b/fre/make/gfdlfremake/checkout.py @@ -0,0 +1,122 @@ +import os +import subprocess + +## TODO: Add parallelizations using () and simplify +## Creates the clone lines for the checkout script +## \param file Checkout script file +## \param repo the repo(s) to clone +## \param component Model component name +## \param srcDir The source directory +## \param branch The version to clone/checkout +## \param add Additional instrcutions after the clone +## \param multi True if a component has more than one repo to clone +def writeRepo(file,repo,component,srcDir,branch,add,multi,jobs,pc): +## Write message about cloning repo and branch in component + file.write("echo cloning "+repo+" -b "+branch+" into "+srcDir+"/"+component+"\n") +## If this component has multiple repos, clone everything in the component folder +## If it's not multi, then use the component name (comp) as the folder name to clone into + if multi: + file.write("mkdir -p "+component+"\n") + file.write("cd "+component+"\n") + comp="" + else: + comp=component + +## Check if there is a branch/version and then write the clone line; record the pid of that clone in dictionary `pids` if parallel checkout option is defined + if pc: + if branch=="": + file.write("(git clone --recursive --jobs="+jobs+" "+repo+" "+comp+")"+pc+"\n") + if multi: + r=repo.split("/")[4].strip(".git") + file.write("pids+=("+r+"pid:$!)\n") + else: + file.write("pids+=("+comp+"pid:$!)\n") + else: + file.write("(git clone --recursive --jobs="+jobs+" "+repo+" -b "+branch+" "+comp+")"+pc+"\n") + if multi: + r=repo.split("/")[4].strip(".git") + file.write("pids+=("+r+"pid:$!)\n") + else: + file.write("pids+=("+comp+"pid:$!)\n") + else: + if branch=="": + file.write("git clone --recursive --jobs="+jobs+" "+repo+" "+comp+"\n") + else: + file.write("git clone --recursive --jobs="+jobs+" "+repo+" -b "+branch+" "+comp+"\n") + +## Make sure to go back up in the folder structure + if multi: + file.write("cd .. \n") + if add!="": + file.write(add) + +## Class to create the checkout script +class checkout(): +## \brief Opens the checkout script with the specified name +## \param self The checkout script object +## \param fname The file name of the checkout script +## \param srcDir The source directory where fname will be run and source will exist + def __init__(self,fname,srcDir): + self.fname = fname + self.src = srcDir +##TODO: Force checkout + os.system("rm -rf "+self.src+"/*") + self.checkoutScript = open(self.src+"/"+fname, 'w') + self.checkoutScript.write("#!/bin/sh -f \n") + self.checkoutScript.write("export GIT_TERMINAL_PROMPT=0 \n") + +## \brief Writes the contents of the checkout script by looping through the input yaml +## \param self The checkout script object +## \param y The fremake compile yaml + def writeCheckout(self,y,jobs,pc): + self.checkoutScript.write("cd "+self.src +"\n") + for c in y['src']: + if type(c['repo']) is list and type(c['branch']) is list: + for (repo,branch) in zip(c['repo'],c['branch']): + writeRepo(self.checkoutScript,repo,c['component'],self.src,branch,c['additionalInstructions'],True,jobs,pc) + else: + writeRepo(self.checkoutScript,c['repo'],c['component'],self.src,c['branch'],c['additionalInstructions'],False,jobs,pc) +## Add additional instructions + self.checkoutScript.write(c['additionalInstructions']) +## \brief If pc is defined: Loops through dictionary of pids, waits for each pid individually, writes exit code in `check` list; allows checkoutscript to exit if exit code is not 0; closes the checkout script when writing is done +## \param self The checkout script object + def finish (self,pc): + if pc: + self.checkoutScript.write('for id in ${pids[@]}; do\n wait ${id##*:}\n check+=("clone of ${id%%:*} exited with status $?")\ndone\n') + self.checkoutScript.write('for stat in "${check[@]}"; do\n echo $stat \n if [ ${stat##* } -ne 0 ]; then\n exit ${stat##* }\n fi\ndone') + self.checkoutScript.close() + else: + self.checkoutScript.close() +## \brief Changes the permission on the checkout script and runs it +## \param self The checkout script object +## TODO: batch script building + def run (self): + os.chmod(self.src+"/"+self.fname, 0o744) + try: + subprocess.run(args=[self.src+"/"+self.fname], check=True) + except: + print("There was an error with the checkout script "+self.src+"/"+self.fname) + raise +################################################################################################### +## Subclass for container checkout +class checkoutForContainer(checkout): +## \brief Opens the checkout script with the specified name +## \param self The checkout script object +## \param fname The file name of the checkout script +## \param srcDir The source directory where fname will be run and source will exist +## \param tmpdir The relative path on disk that fname will be created (and copied from into the +## container) + def __init__(self,fname,srcDir,tmpdir): + self.fname = fname + self.src = srcDir + self.tmpdir = tmpdir + os.system("mkdir -p "+self.tmpdir) + os.system("rm -rf "+self.tmpdir+"/*") + self.checkoutScript = open(self.tmpdir+"/"+fname, 'w') + self.checkoutScript.write("#!/bin/sh -fx \n") + self.checkoutScript.write("export GIT_TERMINAL_PROMPT=0 \n") + +## \brief Removes the self.tmpdir and contents +## \param self The checkout script object + def cleanup (self): + os.system("rm -rf "+self.tmpdir) diff --git a/fre/make/gfdlfremake/fremake b/fre/make/gfdlfremake/fremake new file mode 100755 index 00000000..b0bc6385 --- /dev/null +++ b/fre/make/gfdlfremake/fremake @@ -0,0 +1,249 @@ +#!/usr/bin/python3 +## \date 2023 +## \author Tom Robinson +## \author Dana Singh +## \description fremake is used to create and run a code checkout script and compile a model. + +import subprocess +import os +import yaml +import argparse +import logging +from . import targetfre, varsfre, yamlfre, checkout, makefilefre, buildDocker, buildBaremetal +from multiprocessing.dummy import Pool + +## Add in cli options +if __name__ == "__main__": + parser = argparse.ArgumentParser(description='Fremake is used to create a code checkout script to compile models for FRE experiments.') + parser.add_argument("-y", + "--yamlfile", + type=str, help="Experiment yaml compile FILE",required=True) + parser.add_argument("-p", + "--platform", + nargs='*', + type=str, help="Hardware and software FRE platform space separated list of STRING(s). This sets platform-specific data and instructions",required=True) + parser.add_argument("-t", + "--target", + nargs='*', + type=str, help="FRE target space separated list of STRING(s) that defines compilation settings and linkage directives for experiments.\n\nPredefined targets refer to groups of directives that exist in the mkmf template file (referenced in buildDocker.py). Possible predefined targets include 'prod', 'openmp', 'repro', 'debug, 'hdf5'; however 'prod', 'repro', and 'debug' are mutually exclusive (cannot not use more than one of these in the target list). Any number of targets can be used.",required=True) + parser.add_argument("-f", + "--force-checkout", + action="store_true", + help="Force checkout to get a fresh checkout to source directory in case the source directory exists") + parser.add_argument("-F", + "--force-compile", + action="store_true", + help="Force compile to compile a fresh executable in case the executable directory exists") + parser.add_argument("-K", + "--keep-compiled", + action="store_true", + help="Keep compiled files in the executable directory for future use") + parser.add_argument("--no-link", + action="store_true", + help="Do not link the executable") + parser.add_argument("-E", + "--execute", + action="store_true", + help="Execute all the created scripts in the current session") + parser.add_argument("-n", + "--parallel", + type=int, + metavar='', default=1, + help="Number of concurrent model compiles (default 1)") + parser.add_argument("-j", + "--jobs", + type=int, + metavar='', default=4, + help="Number of jobs to run simultaneously. Used for make -jJOBS and git clone recursive --jobs=JOBS") + parser.add_argument("-npc", + "--no-parallel-checkout", + action="store_true", + help="Use this option if you do not want a parallel checkout. The default is to have parallel checkouts.") + parser.add_argument("-s", + "--submit", + action="store_true", + help="Submit all the created scripts as batch jobs") + parser.add_argument("-v", + "--verbose", + action="store_true", + help="Get verbose messages (repeat the option to increase verbosity level)") + parser.add_argument("-w NUM", + "--walltime=NUM", + type=int, metavar='', + help="Maximum wall time NUM (in minutes) to use") + parser.add_argument("--mail-list=STRING", + action="store_true", + help="Email the comma=separated STRING list of emails rather than \$USER\@noaa.gov") + + ## Parse the arguments + args = parser.parse_args() + + ## Define arguments as variables + yml = args.yamlfile + ps = args.platform + ts = args.target + nparallel = args.parallel + jobs = str(args.jobs) + pcheck = args.no_parallel_checkout + + ## Define parallelism addition for checkouts + # If pcheck is defined, no parallel checkouts + # If pcheck is not defined, default is to have parallel checkouts + if pcheck: + pc = "" + else: + pc = " &" + + ## Define operation of option(s) above + if args.verbose: + logging.basicCOnfig(level=logging.INFO) + else: + logging.basicConfig(level=logging.ERROR) + +#### Main +srcDir="src" +checkoutScriptName = "checkout.sh" +baremetalRun = False # This is needed if there are no bare metal runs + +## Split and store the platforms and targets in a list +plist = args.platform +tlist = args.target + +## Get the variables in the model yaml +freVars = varsfre.frevars(yml) + +## Open the yaml file and parse as fremakeYaml +modelYaml = yamlfre.freyaml(yml,freVars) +fremakeYaml = modelYaml.getCompileYaml() + +## Error checking the targets +for targetName in tlist: + target = targetfre.fretarget(targetName) + +## Loop through the platforms specified on the command line +## If the platform is a baremetal platform, write the checkout script and run it once +## This should be done separately and serially because bare metal platforms should all be using +## the same source code. +for platformName in plist: + if modelYaml.platforms.hasPlatform(platformName): + pass + else: + raise SystemExit (platformName + " does not exist in " + modelYaml.platformsfile) + (compiler,modules,modulesInit,fc,cc,modelRoot,iscontainer,mkTemplate,containerBuild,containerRun,RUNenv)=modelYaml.platforms.getPlatformFromName(platformName) + + ## Create the checkout script + if iscontainer == False: + ## Create the source directory for the platform + srcDir = modelRoot + "/" + fremakeYaml["experiment"] + "/src" + if not os.path.exists(srcDir): + os.system("mkdir -p " + srcDir) + if not os.path.exists(srcDir+"/checkout.sh"): + freCheckout = checkout.checkout("checkout.sh",srcDir) + freCheckout.writeCheckout(modelYaml.compile.getCompileYaml(),jobs,pc) + freCheckout.finish(pc) + +## TODO: Options for running on login cluster? + freCheckout.run() + +fremakeBuildList = [] +## Loop through platforms and targets +for platformName in plist: + for targetName in tlist: + target = targetfre.fretarget(targetName) + if modelYaml.platforms.hasPlatform(platformName): + pass + else: + raise SystemExit (platformName + " does not exist in " + modelYaml.platformsfile) + (compiler,modules,modulesInit,fc,cc,modelRoot,iscontainer,mkTemplate,containerBuild,containerRun,RUNenv)=modelYaml.platforms.getPlatformFromName(platformName) + + ## Make the source directory based on the modelRoot and platform + srcDir = modelRoot + "/" + fremakeYaml["experiment"] + "/src" + + ## Check for type of build + if iscontainer == False: + baremetalRun = True + ## Make the build directory based on the modelRoot, the platform, and the target + bldDir = modelRoot + "/" + fremakeYaml["experiment"] + "/" + platformName + "-" + target.gettargetName() + "/exec" + os.system("mkdir -p " + bldDir) + + ## Create the Makefile + freMakefile = makefilefre.makefile(exp = fremakeYaml["experiment"], + libs = fremakeYaml["baremetal_linkerflags"], + srcDir = srcDir, + bldDir = bldDir, + mkTemplatePath = mkTemplate) + + + # Loop through components and send the component name, requires, and overrides for the Makefile + for c in fremakeYaml['src']: + freMakefile.addComponent(c['component'],c['requires'],c['makeOverrides']) + freMakefile.writeMakefile() + +## Create a list of compile scripts to run in parallel + fremakeBuild = buildBaremetal.buildBaremetal(exp = fremakeYaml["experiment"], + mkTemplatePath = mkTemplate, + srcDir = srcDir, + bldDir = bldDir, + target = target, + modules = modules, + modulesInit = modulesInit, + jobs = jobs) + + for c in fremakeYaml['src']: + fremakeBuild.writeBuildComponents(c) + fremakeBuild.writeScript() + fremakeBuildList.append(fremakeBuild) + ## Run the build + fremakeBuild.run() + else: +#################################### container stuff below ########################################################### + ## Run the checkout script +# image="hpc-me-intel:2021.1.1" + image="ecpe4s/noaa-intel-prototype:2023.09.25" + bldDir = modelRoot + "/" + fremakeYaml["experiment"] + "/exec" + tmpDir = "tmp/"+platformName + + ## Create the checkout script + freCheckout = checkout.checkoutForContainer("checkout.sh", srcDir, tmpDir) + freCheckout.writeCheckout(modelYaml.compile.getCompileYaml(),jobs,pc) + freCheckout.finish(pc) + + ## Create the makefile +### Should this even be a separate class from "makefile" in makefilefre? ~ ejs + freMakefile = makefilefre.makefileContainer(exp = fremakeYaml["experiment"], + libs = fremakeYaml["container_addlibs"], + srcDir = srcDir, + bldDir = bldDir, + mkTemplatePath = mkTemplate, + tmpDir = tmpDir) + + # Loop through components and send the component name and requires for the Makefile + for c in fremakeYaml['src']: + freMakefile.addComponent(c['component'],c['requires'],c['makeOverrides']) + freMakefile.writeMakefile() + + ## Build the dockerfile + dockerBuild = buildDocker.container(base = image, + exp = fremakeYaml["experiment"], + libs = fremakeYaml["container_addlibs"], + RUNenv = RUNenv, + target = target) + + dockerBuild.writeDockerfileCheckout("checkout.sh", tmpDir+"/checkout.sh") + dockerBuild.writeDockerfileMakefile(freMakefile.getTmpDir() + "/Makefile", freMakefile.getTmpDir()+"/linkline.sh") + + for c in fremakeYaml['src']: + dockerBuild.writeDockerfileMkmf(c) + + dockerBuild.writeRunscript(RUNenv,containerRun,tmpDir+"/execrunscript.sh") + + ## Run the dockerfile; build the container + dockerBuild.build(containerBuild,containerRun) + + #freCheckout.cleanup() + #buildDockerfile(fremakeYaml,image) + +if baremetalRun: + if __name__ == '__main__': + pool = Pool(processes=nparallel) # Create a multiprocessing Pool + pool.map(buildBaremetal.fremake_parallel,fremakeBuildList) # process data_inputs iterable with pool diff --git a/fre/make/gfdlfremake/makefilefre.py b/fre/make/gfdlfremake/makefilefre.py new file mode 100644 index 00000000..8cc6b6ea --- /dev/null +++ b/fre/make/gfdlfremake/makefilefre.py @@ -0,0 +1,176 @@ +import os +import subprocess +import textwrap + +## \brief Writes the link line for bare metal and container builds +## \param self The Makefile object +def linklineBuild(self): + linkline="" +#if additional libraries are defined, populate the link line with the correct information for libraries +## CONTAINER; write a script that will execute in the container, to fill in link line with additional libraries in Makefile + if "tmp" in self.filePath: + with open(self.filePath+"/linkline.sh","a") as fh: + fh.write("set -- ") + for l in self.l: + fh.write(l+" ") + fh.write("\n") + + self.linklinecreate = ''' + line='' + for l in $@; do + loc=$(spack location -i $l) + libraries=$(ls $loc/lib) + if echo "$libraries" | grep -q "_d"; then + for i in $libraries; do + if [ "$i" != "cmake" ] && echo "$i" | grep -q "_d"; then + ln1=${i%.*} + ln2=${ln1#???} + line=$line" -L$loc/lib -l$ln2" + fi + done + else + for i in $libraries; do + if [ "$i" != "cmake" ]; then + ln1=${i%.*} + ln2=${ln1#???} + line=$line" -L$loc/lib -l$ln2" + fi + done + fi + done + ''' + + with open(self.filePath+"/linkline.sh","a") as fh: + fh.writelines(textwrap.dedent(self.linklinecreate)) + fh.write("MF_PATH='/apps/"+self.e+"/exec/Makefile'\n") + fh.write('sed -i "/MK_TEMPLATE = /a LL = $line" $MF_PATH\n') + fh.write("sed -i 's|\($^\) \($(LDFLAGS)\)|\\1 $(LL) \\2|' $MF_PATH\n") + +## BARE METAL; if addlibs defined on bare metal, include those additional libraries in link line + elif "tmp" not in self.filePath: + for l in self.l: # baremetal_linkerflags + linkline = linkline + " " + l + os.system(f"sed -i '/MK_TEMPLATE = /a LL = {linkline}' {self.filePath}/Makefile") + os.system(f"sed -i 's|\($(LDFLAGS)\)|$(LL) \\1|' {self.filePath}/Makefile") + +class makefile(): +## \brief Opens Makefile and sets the experiment and other common variables +## \param self The Makefile object +## \param exp Experiment name +## \param libs Additional libraries/linker flags defined by user +## \param srcDir The path to the source directory +## \param bldDir The path to the build directory +## \param mkTemplatePath The path of the template .mk file for compiling + def __init__(self,exp,libs,srcDir,bldDir,mkTemplatePath): + self.e = exp + self.l = libs + self.src = srcDir + self.bld = bldDir + self.template = mkTemplatePath + self.c =[] #components + self.r=[] #requires + self.o=[] #overrides + os.system("mkdir -p "+self.bld) + self.filePath = self.bld # Needed so that the container and bare metal builds can + # use the same function to create the Makefile + +## \brief Adds a component and corresponding requires to the list +## \param self The Makefile object +## \param c The component +## \param r The requires for that componenet +## \param o The overrides for that component + def addComponent (self,c,r,o): + self.c.append(c) + self.r.append(r) + self.o.append(o) +## \brief Sorts the component by how many requires there are for that component +## \param self The Makefile object +## \param c The component +## \param r The requires for that component +## \param o The overrides for that component + def createLibstring (self,c,r,o): + d=zip(self.c,self.r,self.o) + return(sorted(d,key=lambda values:len(values[1]),reverse=True)) + +## \brief Writes the Makefile. Should be called after all components are added +## \param self The Makefile object + def writeMakefile (self): +# Get the list of all of the libraries + sd=self.createLibstring(self.c,self.r,self.o) + libstring=" " + for i in sd: + lib=i[0] + libstring = libstring+lib+"/lib"+lib+".a " +# Open the Makefile for Writing + with open(self.filePath+"/Makefile","w") as fh: +# Write the header information for the Makefile + fh.write("# Makefile for "+self.e+"\n") + fh.write("SRCROOT = "+self.src+"/\n") + fh.write("BUILDROOT = "+self.bld+"/\n") + fh.write("MK_TEMPLATE = "+self.template+"\n") + fh.write("include $(MK_TEMPLATE)"+"\n") +# Write the main experiment compile + fh.write(self.e+".x: "+libstring+"\n") + fh.write("\t$(LD) $^ $(LDFLAGS) -o $@ $(STATIC_LIBS)"+"\n") + +# Write the link line script with user-provided libraries + if self.l: + linklineBuild(self) + +# Write the individual component library compiles + with open(self.filePath+"/Makefile","a") as fh: + for (c,r,o) in sd: + libstring = " " + for lib in r: + libstring = libstring+lib+"/lib"+lib+".a " + cstring = c+"/lib"+c+".a: " + fh.write(cstring+libstring+" FORCE"+"\n") + if o == "": + fh.write("\t$(MAKE) SRCROOT=$(SRCROOT) BUILDROOT=$(BUILDROOT) MK_TEMPLATE=$(MK_TEMPLATE) --directory="+c+" $(@F)\n") + else: + fh.write("\t$(MAKE) SRCROOT=$(SRCROOT) BUILDROOT=$(BUILDROOT) MK_TEMPLATE=$(MK_TEMPLATE) "+o+" --directory="+c+" $(@F)\n") + fh.write("FORCE:\n") + fh.write("\n") +# Set up the clean + fh.write("clean:\n") + for c in self.c: + fh.write("\t$(MAKE) --directory="+c+" clean\n") +# Set up localize + fh.write("localize:\n") + for c in self.c: + fh.write("\t$(MAKE) -f $(BUILDROOT)"+c+" localize\n") +# Set up distclean + fh.write("distclean:\n") + for c in self.c: + fh.write("\t$(RM) -r "+c+"\n") + fh.write("\t$(RM) -r "+self.e+"\n") + fh.write("\t$(RM) -r Makefile \n") + +### This seems incomplete? ~ ejs +## The makefile class for a container. It gets built into a temporary directory so it can be copied +## into the container. +## \param exp Experiment name +## \param libs Additional libraries/linker flags defined by user +## \param srcDir The path to the source directory +## \param bldDir The path to the build directory +## \param mkTemplatePath The path of the template .mk file for compiling +## \param tmpDir A local path to temporarily store files build to be copied to the container +class makefileContainer(makefile): + def __init__(self,exp,libs,srcDir,bldDir,mkTemplatePath,tmpDir): + self.e = exp + self.l = libs + self.src = srcDir + self.bld = bldDir + self.template = mkTemplatePath + self.tmpDir = tmpDir + self.c =[] #components + self.r=[] #requires + self.o=[] #overrides + os.system("mkdir -p "+self.tmpDir) + self.filePath = self.tmpDir # Needed so that the container and bare metal builds can + # use the same function to create the Makefile + +## \return the tmpDir +## \param self The makefile object + def getTmpDir(self): + return self.tmpDir diff --git a/fre/make/gfdlfremake/platformfre.py b/fre/make/gfdlfremake/platformfre.py new file mode 100644 index 00000000..df37aa56 --- /dev/null +++ b/fre/make/gfdlfremake/platformfre.py @@ -0,0 +1,106 @@ +import yaml + +class platforms (): +## \param self The platform yaml object +## \param fname The path to the platform yaml file +## \param v the fre variables defined in the model Yaml + def __init__(self,fname,v): + with open(fname, 'r') as file: + self.yaml = yaml.safe_load(v.freVarSub(file.read())) +## Check the yaml for errors/omissions + try: + self.yaml["platforms"] + except: + print(fname+" must have a platforms key\n") + raise +## Loop through the platforms + for p in self.yaml["platforms"]: +## Check the platform name + try: + p["name"] + except: + print("At least one of the platforms is missing a name in "+fname+"\n") + raise +## Check the compiler + try: + p["compiler"] + except: + print ("You must specify a compiler in your "+p["name"]+" platform in the file "+fname+"\n") + raise +## Check for the Fortran (fc) and C (cc) compilers + try: + p["fc"] + except: + print ("You must specify the name of the Fortran compiler as fc on the "+p["name"]+" platform in the file "+fname+"\n") + raise + try: + p["cc"] + except: + print ("You must specify the name of the Fortran compiler as cc on the "+p["name"]+" platform in the file "+fname+"\n") + raise +## Check for modules to load + try: + p["modules"] + except: + p["modules"]=[""] +## Check for modulesInit to set up the modules environment + try: + p["modulesInit"] + except: + p["modulesInit"]=[""] +## Get the root for the build + try: + p["modelRoot"] + except: + p["modelRoot"] = "/apps" +## Check if we are working with a container and get the info for that + try: + p["container"] + except: + p["container"] = False + p["RUNenv"] = "" + p["containerBuild"] = "" + p["containerRun"] = "" + if p["container"]: +## Check the container builder + try: + p["containerBuild"] + except: + print ("You must specify the program used to build the container (containerBuild) on the "+p["name"]+" platform in the file "+fname+"\n") + raise + if p["containerBuild"] != "podman" and p["containerBuild"] != "docker": + raise ValueError("Container builds only supported with docker or podman, but you listed "+p["containerBuild"]+"\n") +## Check for container environment set up for RUN commands + try: + p["RUNenv"] + except: + p["RUNenv"] = "" +## Check the container runner + try: + p["containerRun"] + except: + print ("You must specify the program used to run the container (containerRun) on the "+p["name"]+" platform in the file "+fname+"\n") + raise + if p["containerRun"] != "apptainer" and p["containerRun"] != "singularity": + raise ValueError("Container builds only supported with apptainer, but you listed "+p["containerRun"]+"\n") +## set the location of the mkTemplate. In a container, it uses the hpc-me template cloned from mkmf + p["mkTemplate"] = "/apps/mkmf/templates/hpcme-intel21.mk" + else: + try: + p["mkTemplate"] + except: + raise ValueError("The non-container platform "+p["name"]+" must specify a mkTemplate \n") +## \brief Checks if the platform yaml has the named platform + def hasPlatform(self,name): + for p in self.yaml["platforms"]: + if p["name"] == name: + return True + return False +## \brief Get the platform yaml + def getPlatformsYaml(self): + return self.yaml +## \brief Get the platform information from the name of the platform + def getPlatformFromName(self,name): + for p in self.yaml["platforms"]: + if p["name"] == name: + return (p["compiler"], p["modules"], p["modulesInit"], p["fc"], p["cc"], p["modelRoot"],p["container"], p["mkTemplate"],p["containerBuild"], p["containerRun"], p["RUNenv"]) diff --git a/fre/make/gfdlfremake/schema.json b/fre/make/gfdlfremake/schema.json new file mode 100644 index 00000000..7e460d6f --- /dev/null +++ b/fre/make/gfdlfremake/schema.json @@ -0,0 +1,164 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$id": "https://example.com/product.schema.json", + "title": "frecanopy", + "description": "A yaml base for use with fre canopy", + "type": "object", + "properties": { + "platformYaml": { + "description": "Path to the platform yaml file", + "type": "string" + }, + "compileYaml": { + "description": "Path to the compile yaml file", + "type": "string" + }, + "experiment": { + "description": "The name of the model", + "type": "string" + }, + "container_addlibs":{ + "description": "Libraries and packages needed for linking in the container", + "type": ["array","string","null"] + }, + "baremetal_linkerflags":{ + "description": "Linker flags of libraries and packages needed for linking in the bare-metal build", + "type": ["array","string","null"] + }, + "src":{ + "description": "The source code descriptions", + "type": "array", + "items":{"$ref": "#/$defs/comp" } + }, + "platforms":{ + "description": "FRE platforms", + "type": "array", + "items":{"$ref": "#/$defs/plat" } + } + }, + "$defs":{ + "comp": { + "type": "object", + "required": ["component", "repo"], + "properties":{ + "component": { + "type": "string", + "description": "The name of the model component" + }, + "repo": { + "anyOf": [ + { + "type": "string", + "description": "The URL of the code repository" + }, + { + "type": ["array","string"], + "description": "The URL of the code repository" + } + ] + }, + "branch": { + "anyOf": [ + { + "type": "string", + "description": "The version of code to clone" + }, + { + "type": ["array","string"], + "description": "The version of code to clone" + } + ] + }, + "requires": { + "type": ["array","string"], + "description": "list of componets that this component depends on" + }, + "cppdefs": { + "type": "string", + "description": "String of CPPDEFs to include in compiling the component" + }, + "paths": { + "type": ["array","string"], + "description": "A list of the paths in the component to compile" + }, + "additionalInstructions": { + "type": "string", + "description": "Additional instuctions to run after checkout" + }, + "doF90Cpp": { + "type": "boolean", + "description": "True if the preprocessor needs to be run" + }, + "makeOverrides": { + "type": "string", + "description": "Overrides openmp target for MOM6" + }, + "otherFlags": { + "type": "string", + "description": "String of Include flags necessary to retrieve other code needed" + } + }, + "additionalProperties": false + }, + "plat": { + "type": "object", + "required": ["name", "compiler", "fc", "cc"], + "properties":{ + "name": { + "type": "string", + "description": "The name of the platform" + }, + "compiler": { + "type": "string", + "description": "The compiler used to build the model" + }, + "modulesInit": { + "type": ["array","string"], + "description": "Array of commands to run before loading modules" + }, + "modules": { + "type": ["array","string"], + "description": "List (array) of modules to load" + }, + "fc": { + "type": "string", + "description": "The Fortran compiler" + }, + "cc": { + "type": "string", + "description": "The C compiler" + }, + "mkTemplate": { + "type": "string", + "description": "Path to the mk template file" + }, + "modelRoot": { + "type": "string", + "description": "Path to the root for all model install files" + }, + "container": { + "anyOf": + [ + {"type": "string"}, + {"type": "boolean"} + ] + }, + "RUNenv": { + "type": ["array", "string"], + "description": "Commands needed at the beginning of a RUN in dockerfile" + }, + "containerBuild": { + "type": "string", + "description": "Program used to build the container" + }, + "containerRun": { + "type": "string", + "description": "Program used to run the container" + } + }, + "additionalProperties": false + } + }, + "required": ["experiment", "src", "platforms"], + "additionalProperties": true +} diff --git a/fre/make/gfdlfremake/targetfre.py b/fre/make/gfdlfremake/targetfre.py new file mode 100644 index 00000000..b34e381f --- /dev/null +++ b/fre/make/gfdlfremake/targetfre.py @@ -0,0 +1,65 @@ +## Stores information about the target +class fretarget: +## Sets up information about the target and handles errors +## \note The default target is prod +## \param self the fretarget object +## \param t The target string + def __init__(self,t): + self.target = t # The target string +## Split the target string + targ = self.target.split('-') + self.makeline_add = "" + self.debug = False + self.repro = False + self.prod = False +## Parse the target string for prod, repro, and debug. Set up what to add to the +## make line during compile when using mkmf builds + for target in targ: + if target == "debug": + targ = target.upper() + self.makeline_add = self.makeline_add + targ + "=on " + self.debug = True + elif target == "prod": + targ = target.upper() + self.makeline_add = self.makeline_add + targ + "=on " + self.prod = True + elif target == "repro": + targ = target.upper() + self.makeline_add = self.makeline_add + targ + "=on " + self.repro = True +## Check to see if openmp is included in the target and add that to the makeline add string + if target == "openmp": + targ = target.upper() + self.makeline_add = self.makeline_add + targ + "=on " + self.openmp = True + else: + self.openmp = False +## Check to make sure only one of the prod, debug, repro are used + errormsg = "You can only list one mutually exclusive target, but your target '"+self.target+"' lists more than one of the following targets: \n debug \n prod \n repro" + if self.debug: + try: + if self.repro or self.prod == True: + raise ValueError(errormsg) + except ValueError: + raise + elif self.repro: + try: + if self.prod == True: + raise ValueError(errormsg) + except ValueError: + raise + else: + try: + if self.prod == False: + raise ValueError("Your target '"+self.target+"' needs to include one of the following: prod, repro, debug") + except ValueError: + raise +## Returns the name of the target +## \param self The fretarget object + def gettargetName(self): + return self.target +## Returns the makeline_add +## \param self The fretarget object + def getmakeline_add(self): + return self.makeline_add + diff --git a/fre/make/gfdlfremake/varsfre.py b/fre/make/gfdlfremake/varsfre.py new file mode 100644 index 00000000..c153e845 --- /dev/null +++ b/fre/make/gfdlfremake/varsfre.py @@ -0,0 +1,56 @@ +import yaml +import os +import re + +## Removes enclosing braces or parentheses and optionally leading dollar sign from a string +## \param inString A string from which characters will be removed +## \param bound A set of characters to be removed from the beginning and end of the string +## \param leadingDollar (optional) If True, removed leading dollar sign +def removeEnclosing(inString, bound='()', leadingDollar=True): + if leadingDollar: + inString = inString.lstrip('$') + return inString.lstrip(bound[0]).rstrip(bound[1]) + +## Retrieves an environmental variable based on a string of the form $(VAR) +## \param inString A string that specifies the variable to be retrieved +def getEnvSub(inString): + return os.getenv(removeEnclosing(inString.group(), bound='{}')) + +## Replaces all instances of Linux environment variables (${VAR}) in a string using getEnvSub +## \param string A string to have the variables replaced +def envReplace(string): + return re.sub("\$\{\w+\}", getEnvSub, string) + +## Reads stores and replaces the fre variables set in the model YAML +class frevars(): +## Grabs the FRE variables from the model yaml +## \param self The frevars object +## \param y The model yaml file name + def __init__(self,y): + with open(y, 'r') as file: + self.modelyaml = yaml.safe_load(file) +## Substitutes fre variables with the format $(variable) into the input string +## \string The string that contains the variables +## \returns String with the fre variables filled in + def freVarReplace(self,string): + ## Retrieves a value from the modelyaml based on a key of the form $(VAR) + ## \param inString A string that specifies the value to be retrieved + def getVarYamlSub(inString): + return self.modelyaml[removeEnclosing(inString.group())] + return re.sub("\$\(\w+\)", getVarYamlSub, string) + +## Wrapper that relaces environment ${} and FRE $() variables +## \param self the FRE yaml varaibles (FRE properties) +## \param string The YAML string that is having its variables replaced +## \returns string with the environment and FRE variables replaced + def freVarSub(self, string): + tmpString = envReplace(string) + returnString = self.freVarReplace(tmpString) + return returnString + +## Wrapper that takes in a string (yaml) and fills in the FRE and Environment variables +## \param y Path to yaml file whose variables need to be filled in + def fillInYamlWithVars(self,y): + with open(y, 'r') as file: + yamlString=read(file) + return self.freVarSub(yamlString) diff --git a/fre/make/gfdlfremake/yamlfre.py b/fre/make/gfdlfremake/yamlfre.py new file mode 100644 index 00000000..79a89be5 --- /dev/null +++ b/fre/make/gfdlfremake/yamlfre.py @@ -0,0 +1,164 @@ +import yaml +import json +from jsonschema import validate, ValidationError, SchemaError +from . import platformfre + +## Open the yaml file and parse as fremakeYaml +## \param fname the name of the yaml file to parse +## \param v the FRE yaml varaibles (FRE properties) +def parseCompile(fname,v): +## Open the yaml file and parse as fremakeYaml + with open(fname, 'r') as file: + # Substitute the variables and read the updated yaml string + y = yaml.safe_load(v.freVarSub(file.read())) + return y +## \brief Checks the yaml for variables. Required variables will dump and error. Non-required variables will +## set a default value for the variable +## \param var A variable in the yaml +## \param val a default value for var +## \param req if true, the variable is required in the yaml and an exception will be raised +## \param err An error message to print if the variable is required and doesn't exist +def yamlVarCheck(var,val="",req=False,err="error"): + try: + var + except: + if req: + print (err) + raise + else: + var = val +## This will read the compile yaml for FRE and then fill in any of the missing non-required variables +class compileYaml(): +## Read get the compile yaml and fill in the missing pieces +## \param self the compile Yaml object +## \yamlFile The path to the compile yaml file +## \v The FRE variables set in the model yaml + def __init__(self,yamlFile,v): + self.file = yamlFile + self.yaml = parseCompile(self.file,v) + ## Check the yaml for required things + ## Check for required experiment name + try: + self.yaml["experiment"] + except: + print("You must set an experiment name to compile \n") + raise + ## Check for optional libraries and packages for linking in container + try: + self.yaml["container_addlibs"] + except: + self.yaml["container_addlibs"]="" + ## Check for optional libraries and packages for linking on bare-metal system + try: + self.yaml["baremetal_linkerflags"] + except: + self.yaml["baremetal_linkerflags"]="" +# ## Set up the srcDir +# self.src = modelRoot + "/" + self.yaml["experiment"] + "/src" + ## Check for required src + try: + self.yaml["src"] + except: + print("You must set a src to specify the sources in "+self.yaml["experiment"]+"\n") + raise + ## Loop through the src array + for c in self.yaml['src']: + ## Check for required componenet name + try: + c['component'] + except: + print("You must set the 'componet' name for each src component") + raise + ## Check for required repo url + try: + c['repo'] + except: + print("'repo' is missing from the component "+c['component']+" in "+self.yaml["experiment"]+"\n") + raise + # Check for optional branch. Otherwise set it to blank + try: + c['branch'] + except: + c['branch']="" + # Check for optional cppdefs. Otherwise set it to blank + try: + c['cppdefs'] + except: + c['cppdefs']="" + # Check for optional doF90Cpp. Otherwise set it to False + try: + c['doF90Cpp'] + except: + c['doF90Cpp']=False + # Check for optional additional instructions. Otherwise set it to blank + try: + c['additionalInstructions'] + except: + c['additionalInstructions']="" + # Check for optional paths. Otherwise set it to blank + try: + c['paths'] + except: + c['paths']=[c['component']] + # Check for optional requires. Otherwise set it to blank + try: + c['requires'] + except: + c['requires']=[] + # Check for optional overrides. Otherwise set it to blank + try: + c['makeOverrides'] + except: + c['makeOverrides']="" + # Check for optional flags. Otherwise set it to blank. + try: + c["otherFlags"] + except: + c["otherFlags"]="" + +## Returns the compile yaml + def getCompileYaml(self): + try: + self.yaml + except: + print ("You must initialize the compile YAML object before you try to get the yaml \n") + raise + return self.yaml + +######################################################################################################################### +## \description This will take the models yaml file which has a list of the sub yaml files and combine them into the +## full freyaml that can be used and checked +# platformYaml: platforms.yaml +# layoutYaml: +# compileYaml: compile.yaml +# experiments: + +class freyaml(): +## \param self The freyaml object +## \param modelFileName The name of the model yaml file +## \param v the FRE yaml varaibles object (FRE properties) + def __init__(self,modelFileName,v): + self.freyaml = {} + self.modelfile = modelFileName + with open(self.modelfile, 'r') as file: + self.modelyaml = yaml.safe_load(v.freVarSub(file.read())) + self.freyaml.update(self.modelyaml) + self.compilefile = self.modelyaml["compileYaml"] + self.compile = compileYaml(self.compilefile,v) + self.compileyaml = self.compile.getCompileYaml() + self.freyaml.update(self.compileyaml) + self.platformsfile = self.modelyaml["platformYaml"] + self.platforms = platformfre.platforms(self.platformsfile,v) + self.platformsyaml = self.platforms.getPlatformsYaml() + self.freyaml.update(self.platformsyaml) +## Validate the YAML + with open("schema.json", 'r') as f: + s = f.read() + schema = json.loads(s) + validate(instance=self.freyaml, schema=schema) +## \return platform yaml dictionary + def getPlatformsYaml(self): + return self.platformsyaml +## \return compile yaml dictionary + def getCompileYaml(self): + return self.compileyaml diff --git a/fre/make/gfdlfremake/yamls/am5.yaml b/fre/make/gfdlfremake/yamls/am5.yaml new file mode 100644 index 00000000..4b7bf8d4 --- /dev/null +++ b/fre/make/gfdlfremake/yamls/am5.yaml @@ -0,0 +1,6 @@ +platformYaml: platforms.yaml +compileYaml: compile.yaml +release: f1a1r1 +INTEL: "intel-classic" +FMSincludes: "-IFMS/fms2_io/include -IFMS/include -IFMS/mpp/include" +momIncludes: "-Imom6/MOM6-examples/src/MOM6/pkg/CVMix-src/include" diff --git a/fre/make/gfdlfremake/yamls/compile.yaml b/fre/make/gfdlfremake/yamls/compile.yaml new file mode 100644 index 00000000..5200599c --- /dev/null +++ b/fre/make/gfdlfremake/yamls/compile.yaml @@ -0,0 +1,66 @@ +experiment: "am5" +container_addlibs: +baremetal_linkerflags: +src: + - component: "FMS" + repo: "https://github.com/NOAA-GFDL/FMS.git" + cppdefs: "-DINTERNAL_FILE_NML -Duse_libMPI -Duse_netCDF" + branch: "2022.01" + cppdefs: "-DHAVE_GETTID -Duse_libMPI -Duse_netCDF" + otherFlags: "$(FMSincludes)" + - component: "am5_phys" + requires: ["FMS"] + repo: "https://gitlab.gfdl.noaa.gov/FMS/am5_phys.git" + branch: "2022.01" + otherFlags: "$(FMSincludes)" + - component: "GFDL_atmos_cubed_sphere" + requires: ["FMS", "am5_phys"] + repo: "https://github.com/NOAA-GFDL/GFDL_atmos_cubed_sphere.git" + cppdefs: "-DSPMD -DCLIMATE_NUDGE -DINTERNAL_FILE_NML" + branch: "2022.01" + paths: ["GFDL_atmos_cubed_sphere/driver/GFDL", + "GFDL_atmos_cubed_sphere/model", + "GFDL_atmos_cubed_sphere/driver/SHiELD/cloud_diagnosis.F90", + "GFDL_atmos_cubed_sphere/driver/SHiELD/gfdl_cloud_microphys.F90", + "GFDL_atmos_cubed_sphere/tools", + "GFDL_atmos_cubed_sphere/GFDL_tools"] + otherFlags: "$(FMSincludes)" + - component: "atmos_drivers" + requires: ["FMS", "am5_phys", "GFDL_atmos_cubed_sphere"] + repo: "https://github.com/NOAA-GFDL/atmos_drivers.git" + cppdefs: "-DSPMD -DCLIMATE_NUDGE" + branch: "2022.01" + paths: ["atmos_drivers/coupled"] + otherFlags: "$(FMSincludes)" + - component: "ice_sis" + requires: ["FMS", "ice_param", "mom6"] + repo: "https://gitlab.gfdl.noaa.gov/FMS/ice_sis.git" + branch: "2021.02" + otherFlags: "$(FMSincludes) $(momIncludes)" + - component: "ice_param" + repo: "https://github.com/NOAA-GFDL/ice_param.git" + cppdefs: "-Duse_yaml -Duse_libMPI -Duse_netCDF" + branch: "2021.02" + requires: ["FMS", "mom6"] + otherFlags: "$(FMSincludes) $(momIncludes)" + - component: "land_lad2" + requires: ["FMS"] + repo: "https://gitlab.gfdl.noaa.gov/FMS/land_lad2.git" + branch: "2022.01" + branch: "land_lad2_2021.02" + doF90Cpp: True + cppdefs: "-DINTERNAL_FILE_NML" + otherFlags: "$(FMSincludes)" + - component: "mom6" + requires: ["FMS"] + paths: ["mom6/MOM6-examples/src/MOM6/config_src/dynamic", "mom6/MOM6-examples/src/MOM6/config_src/coupled_driver", "mom6/MOM6-examples/src/MOM6/src/*/", "mom6/MOM6-examples/src/MOM6/src/*/*/", "mom6/ocean_BGC/generic_tracers", "mom6/ocean_BGC/mocsy/src"] + branch: ["2021.02","dev/gfdl/2018.04.06"] + repo: ["https://github.com/NOAA-GFDL/ocean_BGC.git","https://github.com/NOAA-GFDL/MOM6-examples.git"] + makeOverrides: 'OPENMP=""' + otherFlags: "$(FMSincludes) $(momIncludes)" + - component: "FMScoupler" + paths: ["FMScoupler/full", "FMScoupler/shared"] + repo: "https://github.com/NOAA-GFDL/FMScoupler.git" + branch: "2022.01" + requires: ["FMS", "atmos_drivers", "am5_phys", "land_lad2", "ice_sis", "ice_param", "mom6"] + otherFlags: "$(FMSincludes) $(momIncludes)" diff --git a/fre/make/gfdlfremake/yamls/platforms.yaml b/fre/make/gfdlfremake/yamls/platforms.yaml new file mode 100644 index 00000000..02b7d222 --- /dev/null +++ b/fre/make/gfdlfremake/yamls/platforms.yaml @@ -0,0 +1,26 @@ +platforms: + - name: ncrc5.intel + compiler: intel + modulesInit: [" module use -a /ncrc/home2/fms/local/modulefiles \n","source $MODULESHOME/init/sh \n"] + modules: ["$(INTEL)/2022.2.1","fre/bronx-20",cray-hdf5/1.12.2.3, cray-netcdf/4.9.0.3] + fc: ftn + cc: cc + mkTemplate: "/ncrc/home2/fms/local/opt/fre-commands/bronx-20/site/ncrc5/$(INTEL).mk" + modelRoot: ${HOME}/fremake_canopy/test + - name: ncrc5.intel23 + compiler: intel + modulesInit: [" module use -a /ncrc/home2/fms/local/modulefiles \n","source $MODULESHOME/init/sh \n"] + modules: ["$(INTEL)/2023.1.0","fre/bronx-20",cray-hdf5/1.12.2.3, cray-netcdf/4.9.0.3] + fc: ftn + cc: cc + mkTemplate: "/ncrc/home2/fms/local/opt/fre-commands/bronx-20/site/ncrc5/$(INTEL).mk" + modelRoot: ${HOME}/fremake_canopy/test + - name: hpcme.2023 + compiler: intel + RUNenv: [". /spack/share/spack/setup-env.sh", "spack load libyaml", "spack load netcdf-fortran@4.5.4", "spack load hdf5@1.14.0"] + modelRoot: /apps + fc: mpiifort + cc: mpiicc + container: True + containerBuild: "podman" + containerRun: "apptainer" diff --git a/fre/make/gfdlfremake/yamls/schema.json b/fre/make/gfdlfremake/yamls/schema.json new file mode 120000 index 00000000..c92d7461 --- /dev/null +++ b/fre/make/gfdlfremake/yamls/schema.json @@ -0,0 +1 @@ +../schema.json \ No newline at end of file diff --git a/fre/make/runFremake.py b/fre/make/runFremake.py new file mode 100644 index 00000000..0550cc5f --- /dev/null +++ b/fre/make/runFremake.py @@ -0,0 +1,140 @@ +#!/usr/bin/python3 +## \date 2023 +## \author Tom Robinson +## \author Dana Singh +## \author Bennett Chang +## \description Script for fremake is used to create and run a code checkout script and compile a model. + +import click +import subprocess +import os +import yaml +import logging +from .gfdlfremake import targetfre, varsfre, yamlfre, checkout, makefilefre, buildDocker, buildBaremetal +from multiprocessing.dummy import Pool + + +@click.command() +def fremake_run(yamlfile, platform, target, execute, parallel, jobs, no_parallel_checkout, submit, verbose): + + yml = yamlfile + ps = platform + ts = target + nparallel = parallel + jobs = str(jobs) + pcheck = no_parallel_checkout + + if pcheck: + pc = "" + else: + pc = " &" + + if verbose: + logging.basicConfig(level=logging.INFO) + else: + logging.basicConfig(level=logging.ERROR) + +#### Main + srcDir="src" + checkoutScriptName = "checkout.sh" + baremetalRun = False # This is needed if there are no bare metal runs + ## Split and store the platforms and targets in a list + plist = platform + tlist = target + ## Get the variables in the model yaml + freVars = varsfre.frevars(yml) + ## Open the yaml file and parse as fremakeYaml + modelYaml = yamlfre.freyaml(yml,freVars) + fremakeYaml = modelYaml.getCompileYaml() + ## Error checking the targets + for targetName in tlist: + target = targetfre.fretarget(targetName) + ## Loop through the platforms specified on the command line + ## If the platform is a baremetal platform, write the checkout script and run it once + ## This should be done separately and serially because bare metal platforms should all be using + ## the same source code. + for platformName in plist: + if modelYaml.platforms.hasPlatform(platformName): + pass + else: + raise SystemExit (platformName + " does not exist in " + modelYaml.platformsfile) + (compiler,modules,modulesInit,fc,cc,modelRoot,iscontainer,mkTemplate,containerBuild,ContainerRun,RUNenv)=modelYaml.platforms.getPlatformFromName(platformName) + ## Create the source directory for the platform + if iscontainer == False: + srcDir = modelRoot + "/" + fremakeYaml["experiment"] + "/src" + if not os.path.exists(srcDir): + os.system("mkdir -p " + srcDir) + if not os.path.exists(srcDir+"/checkout.sh"): + freCheckout = checkout.checkout("checkout.sh",srcDir) + freCheckout.writeCheckout(modelYaml.compile.getCompileYaml(),jobs,pc) + freCheckout.finish(pc) + + ## TODO: Options for running on login cluster? + freCheckout.run() + + fremakeBuildList = [] + ## Loop through platforms and targets + for platformName in plist: + for targetName in tlist: + target = targetfre.fretarget(targetName) + if modelYaml.platforms.hasPlatform(platformName): + pass + else: + raise SystemExit (platformName + " does not exist in " + modelYaml.platformsfile) + (compiler,modules,modulesInit,fc,cc,modelRoot,iscontainer,mkTemplate,containerBuild,ContainerRun,RUNenv)=modelYaml.platforms.getPlatformFromName(platformName) + ## Make the bldDir based on the modelRoot, the platform, and the target + srcDir = modelRoot + "/" + fremakeYaml["experiment"] + "/src" + ## Check for type of build + if iscontainer == False: + baremetalRun = True + bldDir = modelRoot + "/" + fremakeYaml["experiment"] + "/" + platformName + "-" + target.gettargetName() + "/exec" + os.system("mkdir -p " + bldDir) + ## Create the Makefile + freMakefile = makefilefre.makefile(fremakeYaml["experiment"],srcDir,bldDir,mkTemplate) + # Loop through components and send the component name, requires, and overrides for the Makefile + for c in fremakeYaml['src']: + freMakefile.addComponent(c['component'],c['requires'],c['makeOverrides']) + freMakefile.writeMakefile() + ## Create a list of compile scripts to run in parallel + fremakeBuild = buildBaremetal.buildBaremetal(fremakeYaml["experiment"],mkTemplate,srcDir,bldDir,target,modules,modulesInit,jobs) + for c in fremakeYaml['src']: + fremakeBuild.writeBuildComponents(c) + fremakeBuild.writeScript() + fremakeBuildList.append(fremakeBuild) + # ## Run the build + # fremakeBuild.run() + else: + #################################### container stuff below ########################################################### + ## Run the checkout script + # image="hpc-me-intel:2021.1.1" + image="ecpe4s/noaa-intel-prototype:2023.09.25" + bldDir = modelRoot + "/" + fremakeYaml["experiment"] + "/exec" + tmpDir = "tmp/"+platformName + freCheckout = checkout.checkoutForContainer("checkout.sh", srcDir, tmpDir) + freCheckout.writeCheckout(modelYaml.compile.getCompileYaml(),jobs,pc) + freCheckout.finish(pc) + ## Create the makefile + ### Should this even be a separate class from "makefile" in makefilefre? ~ ejs + freMakefile = makefilefre.makefileContainer(fremakeYaml["experiment"],srcDir,bldDir,mkTemplate,tmpDir) + # Loop through compenents and send the component name and requires for the Makefile + for c in fremakeYaml['src']: + freMakefile.addComponent(c['component'],c['requires'],c['makeOverrides']) + freMakefile.writeMakefile() + ##### NEED MAKEFILE + dockerBuild = buildDocker.container(image,fremakeYaml["experiment"],RUNenv,target) + dockerBuild.writeDockerfileCheckout("checkout.sh", tmpDir+"/checkout.sh") + dockerBuild.writeDockerfileMakefile(freMakefile.getTmpDir() + "/Makefile") + for c in fremakeYaml['src']: + dockerBuild.writeDockerfileMkmf(c) + dockerBuild.build() + # freCheckout.cleanup() + #buildDockerfile(fremakeYaml,image) + os.system("podman build -f Dockerfile -t "+fremakeYaml["experiment"]+":latest") + if baremetalRun: + if __name__ == '__main__': + pool = Pool(processes=nparallel) # Create a multiprocessing Pool + pool.map(buildBaremetal.fremake_parallel,fremakeBuildList) # process data_inputs iterable with pool + + +if __name__ == "__main__": + fremake_run() diff --git a/meta.yaml b/meta.yaml index f831c92b..a4a16470 100644 --- a/meta.yaml +++ b/meta.yaml @@ -37,6 +37,7 @@ test: imports: - click - fre + - fre.make - fre.frepp - fre.frepp.install - fre.frepp.status @@ -51,6 +52,12 @@ test: - fre pp status --help - fre pp run --help - fre pp validate --help + - fre make --help + - fre make create-checkout --help + - fre make create-compile --help + - fre make create-dockerfile --help + - fre make create-makefile --help + - fre make run-fremake --help - fre app --help - fre app mask-atmos-plevel --help - fre cmor --help diff --git a/setup.py b/setup.py index d0e81cff..496e1515 100644 --- a/setup.py +++ b/setup.py @@ -12,8 +12,7 @@ 'click', 'pyyaml', 'jsonschema', - 'metomi-rose', - 'gfdlfremake' + 'metomi-rose' ], entry_points={ 'console_scripts': [