Skip to content

Commit

Permalink
Added option to run multiple analytics workflows during history match…
Browse files Browse the repository at this point in the history
…ing (#337)

* Added option to run multiple analytics workflows during history matching

* Added changelog item

* Made sure that if no analysis is request there are no errors

* Added blank line

* Test new analysis keyword in CI/CD

* Fix hyperopt issues with new analysis keyword

* Use first analytics item defined in hyperopt

* black

* Check if attr exists

* Add comment that hyperopt looks at first analysis workflow

* Put back equinor:master
  • Loading branch information
wouterjdb authored Feb 26, 2021
1 parent 3e8becb commit f0d5db3
Show file tree
Hide file tree
Showing 7 changed files with 96 additions and 54 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ This project adheres to [Semantic Versioning](https://semver.org/).
- [#272](https://github.com/equinor/flownet/pull/272) Adds resampling of observation dates at requested frequency by finding nearest date among existing observation dates (i.e., no interpolated dates added)

### Changes
- [#337](https://github.com/equinor/flownet/pull/337) You can now add multiple analytics workflows in your FlowNet config. This is also a breaking change in that it requires you to change the ert.analysis keyword to being a list.
- [#322](https://github.com/equinor/flownet/pull/322) RSVD input through csv files can now be done either as one table used for all EQLNUM regions, or as one table for each EQLNUM region. The csv file needs a header with column names "depth", "rs" and "eqlnum" (the latter only when multiple tables are defined).

## [0.4.0] - 2020-11-18
Expand Down
79 changes: 54 additions & 25 deletions src/flownet/config_parser/_config_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -453,6 +453,8 @@ def _to_abs_path(path: Optional[str]) -> str:
},
"loss": {
MK.Type: types.NamedDict,
MK.Description: "Definition of the hyperopt loss function. The definitions "
"refer to the first analysis workflow ONLY.",
MK.Content: {
"keys": {
MK.Type: types.List,
Expand Down Expand Up @@ -553,31 +555,54 @@ def _to_abs_path(path: Optional[str]) -> str:
"and webviz",
},
"analysis": {
MK.Type: types.NamedDict,
MK.Type: types.List,
MK.Description: "List of analysis workflows to run.",
MK.Content: {
"metric": {
MK.Type: types.List,
MK.Content: {
MK.Item: {MK.Type: types.String, MK.AllowNone: True}
},
MK.Transformation: _to_upper,
MK.Description: "List of accuracy metrics to be computed "
"in FlowNet analysis workflow",
},
"quantity": {
MK.Type: types.List,
MK.Item: {
MK.Type: types.NamedDict,
MK.Description: "Definitions of the analysis workflow.",
MK.Content: {
MK.Item: {MK.Type: types.String, MK.AllowNone: True}
"metric": {
MK.Type: types.List,
MK.Content: {
MK.Item: {
MK.Type: types.String,
MK.AllowNone: True,
}
},
MK.Transformation: _to_upper,
MK.Description: "List of accuracy metrics to be computed "
"in FlowNet analysis workflow",
},
"quantity": {
MK.Type: types.List,
MK.Content: {
MK.Item: {
MK.Type: types.String,
MK.AllowNone: True,
}
},
MK.Transformation: _to_upper,
MK.Description: "List of summary vectors for which accuracy "
"is to be computed",
},
"start": {
MK.Type: types.Date,
MK.AllowNone: True,
MK.Description: "Start date in YYYY-MM-DD format.",
},
"end": {
MK.Type: types.Date,
MK.AllowNone: True,
MK.Description: "End date in YYYY-MM-DD format.",
},
"outfile": {
MK.Type: types.String,
MK.AllowNone: True,
MK.Description: "The filename of the output of the workflow. "
"In case multiple analysis workflows are run this name should be unique.",
},
},
MK.Transformation: _to_upper,
MK.Description: "List of summary vectors for which accuracy "
"is to be computed",
},
"start": {MK.Type: types.Date, MK.AllowNone: True},
"end": {MK.Type: types.Date, MK.AllowNone: True},
"outfile": {
MK.Type: types.String,
MK.AllowNone: True,
},
},
},
Expand Down Expand Up @@ -1850,14 +1875,18 @@ def parse_config(
)

for key in config.flownet.hyperopt.loss.keys:
if not key in config.ert.analysis.quantity:
if (
not len(config.ert.analysis) > 0
or not key in config.ert.analysis[0].quantity
):
raise ValueError(
f"Key {key} is not defined as an analysis quantity ({config.flownet.hyperopt.loss.keys})."
)

if (
config.ert.analysis.metric
and config.flownet.hyperopt.loss.metric not in config.ert.analysis.metric
hasattr(config.ert, "analysis")
and len(config.ert.analysis) > 0
and config.flownet.hyperopt.loss.metric not in config.ert.analysis[0].metric
):
raise ValueError(
f"Key {config.flownet.hyperopt.loss.metric} is not defined as an analysis"
Expand Down
43 changes: 29 additions & 14 deletions src/flownet/ert/_create_ert_setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -206,13 +206,6 @@ def create_ert_setup( # pylint: disable=too-many-arguments
with open(output_folder / "parameters.pickled", "wb") as fh:
pickle.dump(parameters, fh)

if hasattr(config.ert, "analysis"):
analysis_metric = "[" + ",".join(list(config.ert.analysis.metric)) + "]"
analysis_quantity = "[" + ",".join(list(config.ert.analysis.quantity)) + "]"
else:
analysis_metric = str(None)
analysis_quantity = str(None)

configuration = {
"pickled_network": output_folder.resolve() / "network.pickled",
"pickled_schedule": output_folder.resolve() / "schedule.pickled",
Expand All @@ -221,8 +214,6 @@ def create_ert_setup( # pylint: disable=too-many-arguments
"random_seed": None,
"debug": args.debug if hasattr(args, "debug") else False,
"pred_schedule_file": getattr(config.ert, "pred_schedule_file", None),
"analysis_metric": analysis_metric,
"analysis_quantity": analysis_quantity,
}

if not prediction_setup:
Expand Down Expand Up @@ -263,16 +254,39 @@ def create_ert_setup( # pylint: disable=too-many-arguments
else config.ert.static_include_files
)

shutil.copyfile(
_MODULE_FOLDER / ".." / "static" / "SAVE_ITERATION_ANALYTICS_WORKFLOW",
output_folder / "SAVE_ITERATION_ANALYTICS_WORKFLOW",
)

shutil.copyfile(
_MODULE_FOLDER / ".." / "static" / "SAVE_ITERATION_ANALYTICS_WORKFLOW_JOB",
output_folder / "SAVE_ITERATION_ANALYTICS_WORKFLOW_JOB",
)

analytics_workflow_template = _TEMPLATE_ENVIRONMENT.get_template(
"SAVE_ITERATION_ANALYTICS_WORKFLOW.jinja2"
)
if hasattr(config.ert, "analysis"):
for i, analysis_item in enumerate(config.ert.analysis):
with open(
output_folder / f"SAVE_ITERATION_ANALYTICS_WORKFLOW_{i}", "w"
) as fh:
fh.write(
analytics_workflow_template.render(
{
"reference_simulation": path_ref_sim,
"perforation_strategy": config.flownet.perforation_handling_strategy,
"run_path": config.ert.runpath,
"ecl_base": config.ert.eclbase,
"analysis_start": analysis_item.start,
"analysis_end": analysis_item.end,
"analysis_quantity": "["
+ ",".join(list(analysis_item.quantity))
+ "]",
"analysis_metric": "["
+ ",".join(list(analysis_item.metric))
+ "]",
"analysis_outfile": analysis_item.outfile,
}
)
)

shutil.copyfile(args.config, output_folder / args.config.name)
with open(os.path.join(output_folder, "pipfreeze.output"), "w") as fh:
subprocess.call(["pip", "freeze"], stdout=fh)
Expand All @@ -285,6 +299,7 @@ def create_ert_setup( # pylint: disable=too-many-arguments
else:
# Otherwise create an empty one.
(output_folder / f"{section}.inc").touch()

if not prediction_setup:
if parameters is not None:
create_observation_file(
Expand Down
4 changes: 3 additions & 1 deletion src/flownet/hyperparameter/_run_hyper.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,9 @@ def flownet_ahm_run(x: list, args: argparse.Namespace):
run_flownet_history_matching(config, run_args)

df_analytics = pd.read_csv(
(run_args.output_folder / config.ert.analysis.outfile).with_suffix(".csv")
(run_args.output_folder / config.ert.analysis[0].outfile).with_suffix(
".csv"
)
).drop_duplicates()

hyperopt_loss = 0.0
Expand Down
1 change: 0 additions & 1 deletion src/flownet/static/SAVE_ITERATION_ANALYTICS_WORKFLOW

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
SAVE_ITERATION_ANALYTICS_WORKFLOW_JOB {{ reference_simulation }} {{ perforation_strategy }} {{ run_path }} {{ ecl_base }} {{ analysis_start }} {{ analysis_end }} {{ analysis_quantity }} {{ analysis_metric }} {{ analysis_outfile }}
21 changes: 8 additions & 13 deletions src/flownet/templates/ahm_config.ert.jinja2
Original file line number Diff line number Diff line change
Expand Up @@ -3,20 +3,11 @@
INSTALL_JOB DELETE_IN_CURRENT_ITERATION ./DELETE_IN_CURRENT_ITERATION

REFCASE <CONFIG_PATH>/SYNTHETIC_REFCASE
{%- if config.ert.analysis.metric: %}
DEFINE <ANALYSIS_METRIC> {{ analysis_metric }}
DEFINE <ANALYSIS_QUANTITY> {{ analysis_quantity }}
DEFINE <ANALYSIS_START> {{ config.ert.analysis.start }}
DEFINE <ANALYSIS_END> {{ config.ert.analysis.end }}
DEFINE <ANALYSIS_OUTFILE> {{ config.ert.analysis.outfile }}
DEFINE <PERFORATION_STRATEGY> {{ config.flownet.perforation_handling_strategy }}
DEFINE <REFERENCE_SIMULATION> {{ reference_simulation }}
{%- endif %}

DEFINE <RUN_PATH> {{ config.ert.runpath }}
DEFINE <ECL_BASE> {{ config.ert.eclbase }}
DEFINE <RANDOM_SAMPLES> ./parameters.json


INSTALL_JOB CREATE_FLOWNET_MODEL ./CREATE_FLOWNET_MODEL
{%- if not debug: %}
INSTALL_JOB DELETE_IN_CURRENT_ITERATION ./DELETE_IN_CURRENT_ITERATION
Expand All @@ -25,10 +16,14 @@ INSTALL_JOB DELETE_IN_CURRENT_ITERATION ./DELETE_IN_CURRENT_ITERATION
LOAD_WORKFLOW_JOB ./SAVE_ITERATION_PARAMETERS_WORKFLOW_JOB
LOAD_WORKFLOW SAVE_ITERATION_PARAMETERS_WORKFLOW
HOOK_WORKFLOW SAVE_ITERATION_PARAMETERS_WORKFLOW PRE_SIMULATION
{%- if config.ert.analysis.metric: %}

{%- if config.ert.analysis: %}
LOAD_WORKFLOW_JOB ./SAVE_ITERATION_ANALYTICS_WORKFLOW_JOB
LOAD_WORKFLOW SAVE_ITERATION_ANALYTICS_WORKFLOW
HOOK_WORKFLOW SAVE_ITERATION_ANALYTICS_WORKFLOW POST_SIMULATION

{% for item in config.ert.analysis %}
LOAD_WORKFLOW SAVE_ITERATION_ANALYTICS_WORKFLOW_{{ loop.index - 1 }}
HOOK_WORKFLOW SAVE_ITERATION_ANALYTICS_WORKFLOW_{{ loop.index - 1 }} POST_SIMULATION
{% endfor %}
{%- endif %}

GEN_KW FLOWNET_PARAMETERS ./EMPTYFILE ./EMPTYFILE ./parameters.ertparam
Expand Down

0 comments on commit f0d5db3

Please sign in to comment.