#!/usr/bin/env python """ Parse sodar data and assert what data and info goes into creating and updating monthly netcdf files. >> (parse, create, update) = load_processors('proc_rdi_rawdata_sodar') >> data = parse(lines) >> create(platform_info, sensor_info, data) >> update(platform_info, sensor_info, data) """ from sodar import rawData import numpy as n from datetime import timedelta from procutil import scanf_datetime, dt2es def parser(platform_info, sensor_info, lines): """ Parse and assign wind profile data from raw Sodar file. """ rawDataObject = rawdata.RawData('\n'.join(lines)) numIntervals = len(rawDataObject) numAltitudes = sensor_info[num_altitudes] data = { 'dt' : n.array(n.ones((numIntervals,), dtype=object) * n.nan), 'es' : n.array(n.ones((numIntervals,), dtype=long) * n.nan), 'val1' : n.array(n.ones((numIntervals,), dtype=int) * n.nan), 'val2' : n.array(n.ones((numIntervals,), dtype=int) * n.nan), 'val3' : n.array(n.ones((numIntervals,), dtype=int) * n.nan), 'val4' : n.array(n.ones((numIntervals,), dtype=int) * n.nan), 'spu1' : n.array(n.ones((numIntervals,), dtype=int) * n.nan), 'spu2' : n.array(n.ones((numIntervals,), dtype=int) * n.nan), 'spu3' : n.array(n.ones((numIntervals,), dtype=int) * n.nan), 'spu4' : n.array(n.ones((numIntervals,), dtype=int) * n.nan), 'nois1' : n.array(n.ones((numIntervals,), dtype=int) * n.nan), 'nois2' : n.array(n.ones((numIntervals,), dtype=int) * n.nan), 'nois3' : n.array(n.ones((numIntervals,), dtype=int) * n.nan), 'nois4' : n.array(n.ones((numIntervals,), dtype=int) * n.nan), 'femax' : n.array(n.ones((numIntervals,), dtype=int) * n.nan), 'softw' : n.array(n.ones((numIntervals,), dtype=int) * n.nan), 'fe11' : n.array(n.ones((numIntervals,), dtype=int) * n.nan), 'fe12' : n.array(n.ones((numIntervals,), dtype=int) * n.nan), 'fe21' : n.array(n.ones((numIntervals,), dtype=int) * n.nan), 'fe22' : n.array(n.ones((numIntervals,), dtype=int) * n.nan), 'snr1' : n.array(n.ones((numIntervals,), dtype=int) * n.nan), 'snr2' : n.array(n.ones((numIntervals,), dtype=int) * n.nan), 'snr3' : n.array(n.ones((numIntervals,), dtype=int) * n.nan), 'snr4' : n.array(n.ones((numIntervals,), dtype=int) * n.nan), 'jam' : n.array(n.ones((numIntervals,), dtype=int) * n.nan), 'z' : n.array(n.ones((numAltitudes,), dtype=float) * n.nan), 'u' : n.array(n.ones((numIntervals, numAltitudes), dtype=float) * n.nan), 'v' : n.array(n.ones((numIntervals, numAltitudes), dtype=float) * n.nan), 'w' : n.array(n.ones((numIntervals, numAltitudes), dtype=float) * n.nan), 'echo' : n.array(n.ones((numIntervals, numAltitudes), dtype = int) * n.nan), } for sample in rawDataObject: i = rawDataObject.index(sample) dt = {'month' : int(sample['MONTH']), 'day' : int(sample['DAY']), 'year' : int(sample['YEAR']), 'hour' : int(sample['HOUR']), 'min' : int(sample['MIN']), } dt = '%(month)02d-%(day)02d-%(year)04d %(hour)02d:%02d(min)' % dt dt = scanf_datetime(dt, fmt='%m-%d-%Y %H:%M') if sensor_info['utc_offset']: dt = dt + timedelta(hours=sensor_info['utc_offset']) data['dt'][i] = dt data['es'][i] = dt2es(dt) return data def creator(platform_info, sensor_info, data): # # title_str = sensor_info['description']+' at '+ platform_info['location'] global_atts = { 'title' : title_str, 'institution' : 'Unversity of North Carolina at Chapel Hill (UNC-CH)', 'institution_url' : 'http://nccoos.unc.edu', 'institution_dods_url' : 'http://nccoos.unc.edu', 'metadata_url' : 'http://nccoos.unc.edu', 'references' : 'http://nccoos.unc.edu', 'contact' : 'Sara Haines (haines@email.unc.edu)', # 'source' : 'fixed-profiler (acoustic doppler) observation', 'history' : 'Data processed by NCCOOS', 'comment' : 'File created using pycdf'+pycdfVersion()+' and numpy '+pycdfArrayPkg(), # conventions 'Conventions' : 'CF-1.0; SEACOOS-CDL-v2.0', # SEACOOS CDL codes 'format_category_code' : 'fixed-profiler', 'institution_code' : platform_info['instituion'], 'platform_code' : platform_info['id'], 'package_code' : sensor_info['id'], # institution specific 'project' : 'North Carolina Coastal Ocean Observing System (NCCOOS)', 'project_url' : 'http://nccoos.unc.edu', # timeframe of data contained in file yyyy-mm-dd HH:MM:SS 'start_date' : data['sample_dt'].strftime("%Y-%m-%d %H:%M:%S"), 'end_date' : data['sample_dt'].strftime("%Y-%m-%d %H:%M:%S"), 'release_date' : now.strftime("%Y-%m-%d %H:%M:%S"), # 'creation_date' : now.strftime("%Y-%m-%d %H:%M:%S"), 'modification_date' : now.strftime("%Y-%m-%d %H:%M:%S"), 'process_level' : 'level1', # # must type match to data (e.g. fillvalue is real if data is real) '_FillValue' : -99999., } var_atts = { # coordinate variables 'time' : {'short_name': 'time', 'long_name': 'Time', 'standard_name': 'time', 'units': 'seconds since 1970-1-1 00:00:00 -0', # UTC 'axis': 'T', }, 'lat' : {'short_name': 'lat', 'long_name': 'Latitude', 'standard_name': 'latitude', 'reference':'geographic coordinates', 'units': 'degrees_north', 'valid_range':(-90.,90.), 'axis': 'Y', }, 'lon' : {'short_name': 'lon', 'long_name': 'Longtitude', 'standard_name': 'longtitude', 'reference':'geographic coordinates', 'units': 'degrees_east', 'valid_range':(-180.,180.), 'axis': 'Y', }, 'z' : {'short_name': 'z', 'long_name': 'Height', 'standard_name': 'height', 'reference':'zero at sea-surface', 'units': 'm', 'axis': 'Z', }, # data variables 'u': {'long_name': 'East/West Component of Current', 'standard_name': 'eastward_current', 'units': 'm s-1', 'reference': 'clockwise from True East', }, 'v': {'long_name': 'North/South Component of Current', 'standard_name': 'northward_current', 'units': 'm s-1', 'reference': 'clockwise from True North', }, 'w': {'long_name': 'Upward/Downward Component of Current', 'standard_name': 'upward_current', 'units': 'm s-1', 'positive': 'up', }, 'back_scatter':{'long_name': 'Backscatter', 'standard_name': 'back_scatter', 'units': 'decibels', }, 'wtemp': {'long_name': 'Water Temperature', 'standard_name': 'water_temperature', 'units': 'degrees Celsius', }, } # integer values ntime=NC.UNLIMITED nlat=1 nlon=1 nz=sensor_info['nbins'] # dimension names use tuple so order of initialization is maintained dimensions = ('ntime', 'nlat', 'nlon', 'nz') # using tuple of tuples so order of initialization is maintained # using dict for attributes order of init not important # use dimension names not values # (varName, varType, (dimName1, [dimName2], ...)) var_inits = ( # coordinate variables ('time', NC.INT, ('ntime',)), ('lat', NC.FLOAT, ('nlat',)), ('lon', NC.FLOAT, ('nlon',)), ('z', NC.FLOAT, ('nz',)), # data variables ('u', NC.FLOAT, ('ntime', 'nz')), ('v', NC.FLOAT, ('ntime', 'nz')), ('w', NC.FLOAT, ('ntime', 'nz')), ('back_scatter', NC.FLOAT, ('ntime', 'nz')), ('wtemp', NC.FLOAT, ('ntime',)), ) # var data var_data = ( ('lat', platform_info['lat']), ('lon', platform_info['lon']), ('z', []), ('u', []), ('v', []), ('w', []), ('back_scatter', []), ('wtemp', []), ) return (global_atts, dimensions, var_inits, var_data) def updater(platform_info, sensor_info, data): # global_atts = { # timeframe of data contained in file yyyy-mm-dd HH:MM:SS 'end_date' : data['sample_dt'].strftime("%Y-%m-%d %H:%M:%S"), 'release_date' : now.strftime("%Y-%m-%d %H:%M:%S"), # 'creation_date' : now.strftime("%Y-%m-%d %H:%M:%S"), 'modification_date' : now.strftime("%Y-%m-%d %H:%M:%S"), } # var data var_data = ( ('u', data['u']), ('v', data['v']), ('w', data['w']), ('back_scatter', data['back_scatter']), ('wtemp', data['wtemp']), ) return (global_atts, var_data) #