Skip to content

Commit

Permalink
Temporary s3,s4 mappings fix + automatic s3 prec time period
Browse files Browse the repository at this point in the history
  • Loading branch information
RoryPTB committed Feb 14, 2024
1 parent 1c9dcb2 commit 0004cc8
Show file tree
Hide file tree
Showing 6 changed files with 48 additions and 21 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [3.7, 3.8, 3.9]
python-version: [3.10, 3.11, 3.12]
env:
BUFR_ORIGINATING_CENTRE: 123
BUFR_ORIGINATING_SUBCENTRE: 123
Expand Down
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,8 @@ logs
.vscode/settings.json
# Ignore decoded CSV files
decoded_*.csv
# Ignore bash scripts in data folder
data/*.sh

# pycharm
.idea
Expand Down
4 changes: 2 additions & 2 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM wmoim/dim_eccodes_baseimage:2.28.0
FROM wmoim/dim_eccodes_baseimage:2.31.0

ENV TZ="Etc/UTC" \
DEBIAN_FRONTEND="noninteractive" \
Expand All @@ -11,7 +11,7 @@ RUN echo "Acquire::Check-Valid-Until \"false\";\nAcquire::Check-Date \"false\";"
&& apt-get update -y \
&& apt-get install -y ${DEBIAN_PACKAGES} \
&& apt-get install -y python3 python3-pip libeccodes-tools \
&& pip3 install --no-cache-dir https://github.com/wmo-im/csv2bufr/archive/refs/tags/v0.7.4.zip \
&& pip3 install --no-cache-dir https://github.com/wmo-im/csv2bufr/archive/refs/tags/v0.8.0.zip \
&& pip3 install --no-cache-dir https://github.com/wmo-im/pymetdecoder/archive/refs/tags/v0.1.10.zip

# Environment variables
Expand Down
16 changes: 16 additions & 0 deletions data/reinstall.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
#!/bin/bash

# Navigate to the pymetdecoder directory
cd /local/pymetdecoder

# Uninstall the pymetdecoder package
pip uninstall -y pymetdecoder

# Install the pymetdecoder package from the local setup.py file
python3 setup.py install

# Navigate to the data directory
cd /local/data

# Clear the terminal screen
clear
4 changes: 2 additions & 2 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
attrs==22.2.0
click==8.1.3
numpy==1.21.6
numpy==1.24.0
click
csv2bufr
41 changes: 25 additions & 16 deletions synop2bufr/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -1014,7 +1014,11 @@ def rad_convert(rad, time):
# The time period is expected to be in hours
output['ps3_time_period'] = -1 * decoded['precipitation_s3']['time_before_obs']['value'] # noqa
except Exception:
output['ps3_time_period'] = None
# Regional manual (1/12.11, 2/12.12, 3/12.10, etc.) states that
# the precipitation time period is 3 hours,
# or another period required for regional exchange.
# This means that if tR is not given, it is assumed to be 3 hours.
output['ps3_time_period'] = -3

# Precipitation indicator iR is needed to determine whether the
# section 1 and section 3 precipitation groups are missing because there
Expand Down Expand Up @@ -1580,13 +1584,13 @@ def update_data_mapping(mapping: list, update: dict):
# - verticalSignificance: used 7 times (for N,
# low-high cloud amount, low-high cloud drift)
s3_mappings = [
{"eccodes_key":
f"#{idx+8}#verticalSignificanceSurfaceObservations", # noqa
"value": f"data:vs_s3_{idx+1}"},
{"eccodes_key": f"#{idx+3}#cloudAmount",
"value": f"data:cloud_amount_s3_{idx+1}",
"valid_min": "const:0",
"valid_max": "const:8"},
# {"eccodes_key":
# f"#{idx+8}#verticalSignificanceSurfaceObservations", # noqa
# "value": f"data:vs_s3_{idx+1}"},
# {"eccodes_key": f"#{idx+3}#cloudAmount",
# "value": f"data:cloud_amount_s3_{idx+1}",
# "valid_min": "const:0",
# "valid_max": "const:8"},
{"eccodes_key": f"#{idx+5}#cloudType",
"value": f"data:cloud_genus_s3_{idx+1}"},
{"eccodes_key": f"#{idx+2}#heightOfBaseOfCloud",
Expand Down Expand Up @@ -1615,14 +1619,14 @@ def update_data_mapping(mapping: list, update: dict):
# NOTE: Some of the ecCodes keys are used in
# the above, so we must add 'num_s3_clouds'
s4_mappings = [
{"eccodes_key":
f"#{idx+num_s3_clouds+8}#verticalSignificanceSurfaceObservations", # noqa
"value": f"const:{vs_s4}"},
{"eccodes_key":
f"#{idx+num_s3_clouds+3}#cloudAmount",
"value": f"data:cloud_amount_s4_{idx+1}",
"valid_min": "const:0",
"valid_max": "const:8"},
# {"eccodes_key":
# f"#{idx+num_s3_clouds+8}#verticalSignificanceSurfaceObservations", # noqa
# "value": f"const:{vs_s4}"},
# {"eccodes_key":
# f"#{idx+num_s3_clouds+3}#cloudAmount",
# "value": f"data:cloud_amount_s4_{idx+1}",
# "valid_min": "const:0",
# "valid_max": "const:8"},
{"eccodes_key":
f"#{idx+num_s3_clouds+5}#cloudType",
"value": f"data:cloud_genus_s4_{idx+1}"},
Expand All @@ -1636,6 +1640,11 @@ def update_data_mapping(mapping: list, update: dict):
for m in s4_mappings:
mapping['data'] = update_data_mapping(
mapping=mapping['data'], update=m)
# Now section 3 and 4 cloud groups have been
# added to the mapping file, write the file
# for debugging purposes
with open('updated_mappings.json', 'w') as f:
json.dump(mapping, f, indent=2)
except Exception as e:
LOGGER.error(e)
LOGGER.error(f"Missing station height for station {tsi}")
Expand Down

0 comments on commit 0004cc8

Please sign in to comment.