Skip to content

Commit

Permalink
Issue20 (#21)
Browse files Browse the repository at this point in the history
* Issue #18 - start of updating the datasource for 2020 timeseries pits and some todos inthe file

* new sources

* issue #18 working towards modified 2020 timeseries pits upload script

* path logic

* make sure to not use gap filled density at this point

* Issue #18 - file for 2021 timeseries pits

* Issue #18 no perimeter depth files for 2021 TS pits

* having issues creating the test database

* Modify create script for sqlalchemy>2.0

* Switch to 2020 V1 pits - there are some data format and header issues in the V2 data

* Use db_session function

* Slight tweaks to 2021 timeseries script

* Script to delete pits

* start using insitupy for metadata handling

* working through handling metadata

* 2020 V2 data, allow split header line logic. ALSO - use the non-gap-filled density because the gap filled density files break the logic as they don't show the profile at all

* get rid of spaces in flags

* Script for 2021 pits is working

* start working on SWE files for pits

* move towards row based SRID and timezone ability

* bulk swe property upload script working

* start script to add in met timeseries data

* Script working to upload met data for GM to the database

* Issue #20 - bump insitupy for new variables

* adjustments to variable mapping

* bump insitupy for more depth mappings

* Remove Python 3.7 compatability

* fixing reqs in build

* Fixing tests and build. SMP profile depths were not inverted

* Repeat script here
  • Loading branch information
micah-prime authored Aug 15, 2024
1 parent 3b67a3c commit 99c9f58
Show file tree
Hide file tree
Showing 5 changed files with 175 additions and 3 deletions.
3 changes: 2 additions & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
wheel>0.34.0, <0.35.0
snowexsql>=0.4.1, <0.5.0
snowexsql>=0.4.1,<0.5.0
snowmicropyn
matplotlib>=3.2.2
moto==3.1.11
Expand All @@ -9,3 +9,4 @@ rasterio>=1.1.5
boto3>=1.23.7,<1.24
timezonefinder>=6.0,<7.0
insitupy==0.1.2
metloom==0.6.1
1 change: 1 addition & 0 deletions scripts/download/nsidc_sources.txt
Original file line number Diff line number Diff line change
Expand Up @@ -8,3 +8,4 @@ https://n5eil01u.ecs.nsidc.org/SNOWEX/SNEX20_UNM_GPR.001/2020.01.28/SNEX20_UNM_G
https://n5eil01u.ecs.nsidc.org/SNOWEX/SNEX20_SD_TLI.001/2019.09.29/SNEX20_SD_TLI_clean.csv
https://n5eil01u.ecs.nsidc.org/SNOWEX/SNEX20_TS_SP.002/
https://n5eil01u.ecs.nsidc.org/SNOWEX/SNEX21_TS_SP.001/
https://n5eil01u.ecs.nsidc.org/SNOWEX/SNEX_Met.001/
152 changes: 152 additions & 0 deletions scripts/upload/add_met_timeseries.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,152 @@
"""
Uploads SnowEx temporary met stations to the database
Source: https://nsidc.org/data/snex_met/versions/1
User guide: https://nsidc.org/sites/default/files/documents/user-guide/snex_met-v001-userguide.pdf
1. Data must be downloaded via sh ../download/download_nsidc.sh
2A. python run.py # To run all together all at once
2B. python add_met_timeseries.py # To run individually
"""

import glob
import time
from os.path import abspath, join
from metloom.pointdata.snowex import SnowExMetInfo

import pandas as pd
from snowexsql.db import get_db
from snowex_db.upload import *
from snowex_db import db_session


def main():
# Site name
start = time.time()
site_name = 'Grand Mesa'
timezone = 'MST'

# Read in the Grand Mesa Snow Depths Data
base = abspath(join('../download/data/SNOWEX/SNEX_Met.001/'))

# Start the Database
db_name = 'localhost/test'

# Variables we will use
variable_unit_map = {
"RH_10ft": {
"units": "percent",
"notes": "Relative humidity measured at 10 ft tower level",
"instrument": "Campbell Scientific HC2S3"
},
# "RH_20ft": "percent",
"BP_kPa_Avg": {
"units": "kPa",
"notes": "Barometric pressure",
"instrument": "Campbell Scientific CS106",
},
# "AirTC_20ft_Avg": "degrees Celcius",
"AirTC_10ft_Avg": {
"units": "degrees Celcius",
"notes": "Air temperature measured at 10 ft tower level",
"instrument": "Campbell Scientific HC2S3"
},
# "WSms_20ft_Avg": "m/s",
"WSms_10ft_Avg": {
"units": "m/s",
"notes": "Vector mean wind speed measured at 10 ft tower level",
"instrument": "R.M. Young 05103",
},
"WindDir_10ft_D1_WVT": {
"units": "degrees",
"notes": "Vector mean wind direction measured at 10 ft tower level",
"instrument": "R.M. Young 05103",
},
# "WindDir_20ft_D1_WVT": "degrees",
"SUp_Avg": {
"units": "W/m^2",
"notes": "Shortwave radiation measured with upward-facing sensor",
"instrument": "Kipp and Zonnen CNR4",
},
"SDn_Avg": {
"units": "W/m^2",
"notes": "Shortwave radiation measured with downward-facing sensor",
"instrument": "Kipp and Zonnen CNR4",
},
"LUpCo_Avg": {
"units": "W/m^2",
"notes": "Longwave radiation measured with upward-facing sensor",
"instrument": "Kipp and Zonnen CNR4",
},
"LDnCo_Avg": {
"units": "W/m^2",
"notes": "Longwave radiation measured with downward-facing sensor",
"instrument": "Kipp and Zonnen CNR4",
},
# "SM_5cm_Avg": None,
"SM_20cm_Avg": {
"units": None,
"notes": "Soil moisture measured at 10 cm below the soil",
"instrument": "Stevens Water Hydraprobe II",
},
# "SM_50cm_Avg": None,
# "TC_5cm_Avg": "degrees Celcius",
"TC_20cm_Avg": {
"units": "degrees Celcius",
"notes": "Soil temperature measured at 10 cm below the soil",
"instrument": "Stevens Water Hydraprobe II",
},
# "TC_50cm_Avg": "degrees Celcius",
# "DistanceSensToGnd(m)",
"SnowDepthFilter(m)": {
"units": "m",
"notes": "Temperature corrected, derived snow surface height (filtered)",
"instrument": "Campbell Scientific SR50A",
},
}

errors = 0
with db_session(
db_name, credentials='credentials.json'
) as (session, engine):

for stn_obj in SnowExMetInfo:
f = join(base, stn_obj.path)
# Read in the file
df = pd.read_csv(f)
# add location info
df["latitude"] = [stn_obj.latitude] * len(df)
df["longitude"] = [stn_obj.longitude] * len(df)
df = df.set_index("TIMESTAMP")
# SITE ID - use station id
df["site"] = [stn_obj.station_id] * len(df)
df["observer"] = ["P. Houser"] * len(df)

# Split variables into their own files
for v, info in variable_unit_map.items():
unit = info["units"]

df_cut = df.loc[
:, [v, "latitude", "longitude", "site"]
]
df_cut["instrument"] = [info["instrument"]] * len(df_cut)

new_f = f.replace(".csv", f"local_mod_{v}.csv")
df_cut.to_csv(new_f, index_label="datetime")
csv = PointDataCSV(
new_f,
depth_is_metadata=False,
units=unit,
site_name=site_name,
in_timezone=timezone,
epsg=26912,
doi="https://doi.org/10.5067/497NQVJ0CBEX")

csv.submit(session)
errors += len(csv.errors)

return errors


if __name__ == '__main__':
main()
1 change: 1 addition & 0 deletions scripts/upload/add_time_series_pits_2020.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ def main():
Add 2020 timeseries pits
"""
db_name = 'localhost/snowex'

# Version 2 DOI
# https://nsidc.org/data/snex20_ts_sp/versions/2
doi = "https://doi.org/10.5067/KZ43HVLZV6G4"
Expand Down
21 changes: 19 additions & 2 deletions snowex_db/metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -434,7 +434,19 @@ class DataHeader(object):
'depth_m': 'depth',
'date_dd_mmm_yy': 'date',
'time_gmt': 'time',
'elev_m': 'elevation'
'elev_m': 'elevation',
'rh_10ft': 'relative_humidity_10ft',
'bp_kpa_avg': 'barometric_pressure',
'airtc_10ft_avg': 'air_temp_10ft',
'wsms_10ft_avg': 'wind_speed_10ft',
'winddir_10ft_d1_wvt': 'wind_direction_10ft',
'sup_avg': 'incoming_shortwave',
'sdn_avg': 'outgoing_shortwave',
'lupco_avg': 'incoming_longwave',
'ldnco_avg': 'outgoing_longwave',
'sm_20cm_avg': 'soil_moisture_20cm',
'tc_20cm_avg': 'soil_temp_20cm',
'snowdepthfilter(m)': 'depth'
}

# Known possible profile types anything not in here will throw an error
Expand All @@ -444,6 +456,10 @@ class DataHeader(object):
'specific_surface_area', 'equivalent_diameter',
'grain_size', 'hand_hardness', 'grain_type',
'manual_wetness', 'two_way_travel', 'depth', 'swe',
'relative_humidity_10ft', 'barometric_pressure',
'air_temp_10ft', 'wind_speed_10ft', 'wind_direction_10ft',
'incoming_shortwave', 'outgoing_shortwave', 'incoming_longwave',
'outgoing_longwave', 'soil_moisture_20cm', 'soil_temp_20cm'
'snow_void'
]

Expand Down Expand Up @@ -474,6 +490,7 @@ def __init__(self, filename, **kwargs):
kwargs: keyword values to pass to the database as metadata
"""
self.log = get_logger(__name__)
self._fname = filename

self.extra_header = assign_default_kwargs(
self, kwargs, self.defaults, leave=['epsg'])
Expand Down Expand Up @@ -509,7 +526,7 @@ def __init__(self, filename, **kwargs):

def submit(self, session):
"""
Submit meta data to the database as site info, Do not use on profile
Submit metadata to the database as site info, Do not use on profile
headers. Only use on site_details files.
Args:
Expand Down

0 comments on commit 99c9f58

Please sign in to comment.