Changeset 320
- Timestamp:
- 04/27/10 11:55:31
- Files:
-
- raw2proc/trunk/raw2proc/billymitchell_config_20090623.py (modified) (1 diff)
- raw2proc/trunk/raw2proc/bogue_config_20090915.py (added)
- raw2proc/trunk/raw2proc/crow_config_20050325.py (modified) (1 diff)
- raw2proc/trunk/raw2proc/crow_config_20090122.py (modified) (2 diffs)
- raw2proc/trunk/raw2proc/dukeforest_config_20070514.py (modified) (1 diff)
- raw2proc/trunk/raw2proc/hampton_config_20080930.py (modified) (1 diff)
- raw2proc/trunk/raw2proc/ims_config_20070920.py (modified) (1 diff)
- raw2proc/trunk/raw2proc/meet_config_20010510.py (modified) (3 diffs)
- raw2proc/trunk/raw2proc/meet_config_20040305.py (added)
- raw2proc/trunk/raw2proc/meet_config_20050324.py (modified) (1 diff)
- raw2proc/trunk/raw2proc/meet_config_20090122.py (modified) (1 diff)
- raw2proc/trunk/raw2proc/morgan_config_20080701.py (modified) (1 diff)
- raw2proc/trunk/raw2proc/ncutil.py (modified) (5 diffs)
- raw2proc/trunk/raw2proc/proc_avp_ysi_6600_v1_CDL2.py (modified) (8 diffs)
- raw2proc/trunk/raw2proc/proc_avp_ysi_6600_v2_CDL2.py (modified) (9 diffs)
- raw2proc/trunk/raw2proc/proc_cr1000_flow.py (modified) (3 diffs)
- raw2proc/trunk/raw2proc/proc_cr1000_wq.py (modified) (2 diffs)
- raw2proc/trunk/raw2proc/proc_cr10x_flow_v1.py (modified) (4 diffs)
- raw2proc/trunk/raw2proc/proc_cr10x_flow_v2.py (modified) (4 diffs)
- raw2proc/trunk/raw2proc/proc_cr10x_wq_v1.py (modified) (2 diffs)
- raw2proc/trunk/raw2proc/proc_cr10x_wq_v2.py (modified) (2 diffs)
- raw2proc/trunk/raw2proc/proc_jpier_ascii_met.py (modified) (4 diffs)
- raw2proc/trunk/raw2proc/proc_nortek_wds_dw.py (modified) (2 diffs)
- raw2proc/trunk/raw2proc/proc_nortek_wpa_adcp.py (modified) (3 diffs)
- raw2proc/trunk/raw2proc/procutil.py (modified) (5 diffs)
- raw2proc/trunk/raw2proc/raw2proc.py (modified) (2 diffs)
- raw2proc/trunk/raw2proc/spin_bogue_adcp.py (deleted)
- raw2proc/trunk/raw2proc/spin_bogue_adcpwaves.py (deleted)
- raw2proc/trunk/raw2proc/spin_crow_csv.py (deleted)
- raw2proc/trunk/raw2proc/spin_crow_flow.py (deleted)
- raw2proc/trunk/raw2proc/spin_crow_wq.py (deleted)
- raw2proc/trunk/raw2proc/spin_hampton_avp.py (deleted)
- raw2proc/trunk/raw2proc/spin_hampton_met.py (deleted)
- raw2proc/trunk/raw2proc/spin_jpier_adcp.py (deleted)
- raw2proc/trunk/raw2proc/spin_jpier_met.py (deleted)
- raw2proc/trunk/raw2proc/spin_lsrb_adcp.py (deleted)
- raw2proc/trunk/raw2proc/spin_meet_csv.py (deleted)
- raw2proc/trunk/raw2proc/spin_meet_flow.py (deleted)
- raw2proc/trunk/raw2proc/spin_meet_wq.py (deleted)
- raw2proc/trunk/raw2proc/spin_morgan_avp.py (deleted)
- raw2proc/trunk/raw2proc/spin_morgan_met.py (deleted)
- raw2proc/trunk/raw2proc/spin_stones_avp.py (deleted)
- raw2proc/trunk/raw2proc/spin_stones_met.py (deleted)
- raw2proc/trunk/raw2proc/split_cr1000_by_month.py (added)
- raw2proc/trunk/raw2proc/split_cr1000_this_month.py (added)
- raw2proc/trunk/raw2proc/split_cr10x_by_month.py (added)
- raw2proc/trunk/raw2proc/test_concat_platform_package_data.py (deleted)
- raw2proc/trunk/raw2proc/test_raw2proc.py (deleted)
Legend:
- Unmodified
- Added
- Removed
- Modified
- Copied
- Moved
raw2proc/trunk/raw2proc/billymitchell_config_20090623.py
r309 r320 24 24 'num_altitudes' : 39, 25 25 'sensor_elevation' : 0, # meters 26 'plot_module' : 'billymitchell_sodar_plot', 27 'plot_names' : ('timeseries', 'wind_vectors', 'wind_barbs'), 26 28 }, 27 29 } raw2proc/trunk/raw2proc/crow_config_20050325.py
r233 r320 24 24 'process_module' : 'proc_cr10x_flow_v2', 25 25 'utc_offset' : 4, # hours offset to utc 26 'press_offset' : 0./12., # pressure gauge offset to staff gauge 26 27 # 'nbins' : 69, 27 28 # 'bin_size' : 0.5, # meters raw2proc/trunk/raw2proc/crow_config_20090122.py
r233 r320 24 24 'process_module' : 'proc_cr1000_flow', 25 25 'utc_offset' : 4, # hours offset to utc 26 'press_offset' : 0./12., # pressure gauge offset to staff gauge 27 'plot_module' : 'crow_flow_plot', 28 'plot_names' : ('timeseries',), 26 29 # 'nbins' : 69, 27 30 # 'bin_size' : 0.5, # meters … … 36 39 'process_module' : 'proc_cr1000_wq', 37 40 'utc_offset' : 4, # hours offset to utc 41 'plot_module' : 'crow_wq_plot', 42 'plot_names' : ('timeseries',), 38 43 }, 39 44 } raw2proc/trunk/raw2proc/dukeforest_config_20070514.py
r292 r320 8 8 # 9 9 'config_start_date' : '2007-05-14 00:00:00', 10 'config_end_date' : None, # None or yyyy-mm-dd HH:MM:SS10 'config_end_date' : '2007-07-11 00:00:00', # None or yyyy-mm-dd HH:MM:SS 11 11 'packages' : ('pa0',), 12 12 } raw2proc/trunk/raw2proc/hampton_config_20080930.py
r233 r320 33 33 'utc_offset' : 5., # hours offset to Eastern Standard 34 34 'anemometer_height' : 2., # meters 35 'latest_dir' : '/seacoos/data/nccoos/latest_v2.0',36 'latest_vars' : ('time','lat','lon','z','u','v','wspd', 'wdir'),35 # 'latest_dir' : '/seacoos/data/nccoos/latest_v2.0', 36 # 'latest_vars' : ('time','lat','lon','z','u','v','wspd', 'wdir'), 37 37 }, 38 38 } raw2proc/trunk/raw2proc/ims_config_20070920.py
r292 r320 8 8 # 9 9 'config_start_date' : '2007-09-20 00:00:00', 10 'config_end_date' : None, # None or yyyy-mm-dd HH:MM:SS10 'config_end_date' : '2008-12-11 00:00:00', # None or yyyy-mm-dd HH:MM:SS 11 11 'packages' : ('pa0',), 12 12 } raw2proc/trunk/raw2proc/meet_config_20010510.py
r233 r320 13 13 # 14 14 'config_start_date' : '2001-05-01 00:00:00', 15 'config_end_date' : '2004- 12-31 00:00:00', # None or yyyy-mm-dd HH:MM:SS15 'config_end_date' : '2004-03-05 08:00:00', # None or yyyy-mm-dd HH:MM:SS 16 16 'packages' : ('flow', 'wq'), 17 17 } … … 19 19 'flow' : { 'id' : 'flow', 20 20 'description' : 'Stream Flow and Rain Data', 21 # flow and wq data originially on same sample period so not split 22 # during this configuration period so raw flow data in raw wq 21 23 'raw_dir' : '/seacoos/data/nccoos/level0/meet/wq/', 22 24 'raw_file_glob' : 'mow_wq_*', … … 24 26 'process_module' : 'proc_cr10x_flow_v1', 25 27 'utc_offset' : 4, # hours offset to utc 28 'press_offset' : 0./12., # pressure gauge offset to staff gauge 26 29 # 'nbins' : 69, 27 30 # 'bin_size' : 0.5, # meters raw2proc/trunk/raw2proc/meet_config_20050324.py
r233 r320 24 24 'process_module' : 'proc_cr10x_flow_v2', 25 25 'utc_offset' : 4, # hours offset to utc 26 'press_offset' : 0./12., # pressure gauge offset to staff gauge 26 27 # 'nbins' : 69, 27 28 # 'bin_size' : 0.5, # meters raw2proc/trunk/raw2proc/meet_config_20090122.py
r233 r320 24 24 'process_module' : 'proc_cr1000_flow', 25 25 'utc_offset' : 4, # hours offset to utc 26 # 'nbins' : 69, 27 # 'bin_size' : 0.5, # meters 28 # 'transducer_ht' : 0.5, # meters above the bottom 29 # 'blanking_ht' : 1.6, # meters above transducer 26 'press_offset' : 0./12., # pressure gauge offset to staff gauge 27 'plot_module' : 'meet_flow_plot', 28 'plot_names' : ('timeseries',), 29 'csv_dir' : '/seacoos/data/nccoos/latest_csv', 30 'csv_vars' : ('time', 'rain','press_flow','press_wl'), 30 31 }, 31 32 'wq' : {'id' : 'wq', 32 'description' : 'Water Quality', 33 'raw_dir' : '/seacoos/data/nccoos/level0/meet/wq/', 34 'raw_file_glob' : 'mow_wq_*', 35 'proc_dir' : '/seacoos/data/nccoos/level1/meet/wq', 36 'process_module' : 'proc_cr1000_wq', 37 'utc_offset' : 4, # hours offset to utc 38 }, 33 'description' : 'Water Quality', 34 'raw_dir' : '/seacoos/data/nccoos/level0/meet/wq/', 35 'raw_file_glob' : 'mow_wq_*', 36 'proc_dir' : '/seacoos/data/nccoos/level1/meet/wq', 37 'process_module' : 'proc_cr1000_wq', 38 'utc_offset' : 4, # hours offset to utc 39 'plot_module' : 'meet_wq_plot', 40 'plot_names' : ('timeseries',), 41 'csv_dir' : '/seacoos/data/nccoos/latest_csv', 42 'csv_vars' : ('time', 'wtemp','cond','do_sat', 'do_mg', 'ph', 'turb', 'battvolts'), 43 }, 39 44 } 40 45 raw2proc/trunk/raw2proc/morgan_config_20080701.py
r221 r320 12 12 'config_end_date' : None, # None or yyyy-mm-dd HH:MM:SS 13 13 'packages' : ('avp', 'met'), 14 14 } 15 15 16 16 sensor_info = { raw2proc/trunk/raw2proc/ncutil.py
r213 r320 1 1 #!/usr/bin/env python 2 # Last modified: Time-stamp: <200 8-10-23 11:31:24haines>2 # Last modified: Time-stamp: <2009-12-30 15:45:27 haines> 3 3 """ 4 4 Create, update and load utilities for netcdf files … … 50 50 varName, varData = var 51 51 # print varName 52 # print varData 52 53 ncvar = nc.var(varName) 53 54 # e.g. lat = array(var_data['lat']) … … 199 200 200 201 202 def nc_file_check(fns): 203 """Check file or list of files to ensure it is a netcdf file 204 If it is not, remove a file or files from the list""" 205 if isinstance(fns, str): 206 try: 207 nc = CDF(fns) 208 nc.close() 209 new_fns = fns 210 except CDFError, msg: 211 print "CDFError:", msg, fns 212 new_fns = None 213 214 else: 215 new_fns = [] 216 for fn in fns: 217 try: 218 nc = CDF(fn) 219 nc.close() 220 new_fns.append(fn) 221 except CDFError, msg: 222 print "CDFError:", msg, fn 223 224 return tuple(new_fns) 225 226 201 227 def nc_load(ncFile, varsLoad='all', nameType='variable_name', 202 228 ga_flag=True, va_flag=True): … … 205 231 206 232 :Parameters: 207 ncFile : string 233 ncFile : string or list of strings 208 234 Path and name of file to load 235 If list, then CDFMF 209 236 210 237 :Other Parameters: … … 229 256 230 257 """ 231 try: 232 nc = CDF(ncFile, NC.NOWRITE) 258 259 try: 260 if isinstance(ncFile, str): 261 # if only one file and it is a string 262 nc = CDF(ncFile) 263 else: 264 # if multiple filenames 265 nc = CDFMF(tuple(set(ncFile))) 233 266 234 267 ncdims = nc.dimensions(full=1) raw2proc/trunk/raw2proc/proc_avp_ysi_6600_v1_CDL2.py
r219 r320 1 1 #!/usr/bin/env python 2 # Last modified: Time-stamp: <2009- 01-08 19:45:28haines>2 # Last modified: Time-stamp: <2009-12-16 16:33:00 haines> 3 3 """ 4 4 how to parse data, and assert what data and info goes into … … 133 133 'z' : numpy.array(numpy.ones((N,nbins), dtype=float)*numpy.nan), 134 134 # 135 'ysi_sn' : numpy.array(['' for i in range(N)] , dtype='|S20'),136 'ysi_id' : numpy.array(['' for i in range(N)] , dtype='|S20'),135 # 'ysi_sn' : numpy.array(['' for i in range(N)] , dtype='|S20'), 136 # 'ysi_id' : numpy.array(['' for i in range(N)] , dtype='|S20'), 137 137 # 138 138 'stime' : numpy.array(numpy.ones((N,nbins), dtype=long)*numpy.nan), … … 182 182 else: 183 183 profile_dt = scanf_datetime(profile_str, fmt='%m-%d-%Y %H:%M:%S') 184 elif re.search("Profile Location:", line , re.IGNORECASE):184 elif re.search("Profile Location:", line): 185 185 have_location = True 186 186 # profile location: P180, Instrument Serial No: 0001119E 187 187 # Profile Location: Hampton Shoal Serial No: 000109DD, ID: Delta 188 188 sw = re.findall(r'\w+:\s(\w+)*', line) 189 if len(sw)>=2: ysi_sn = sw[1] 190 else: ysi_sn = 'not known' 191 if len(sw)>=3: ysi_id = sw[2] 192 else: ysi_id = 'not known' 189 # ysi_sn = sw[1] 190 # ysi_id = sw[2] 193 191 194 192 # initialize for new profile at zero for averaging samples within each bin … … 207 205 have_head = head.all() 208 206 207 elif re.search("Error", line): 208 # ignore this line 209 if verbose: 210 print 'skipping bad data line ... ' + str(line) 211 continue 212 209 213 elif (len(ysi)==13 and have_head): 210 214 if j>=nbins: … … 239 243 data['dt'][i] = profile_dt # profile datetime 240 244 data['time'][i] = dt2es(profile_dt) # profile time in epoch seconds 241 data['ysi_sn'][i] = ysi_sn242 data['ysi_id'][i] = ysi_id245 # data['ysi_sn'][i] = ysi_sn 246 # data['ysi_id'][i] = ysi_id 243 247 # 244 248 data['stime'][i] = stime # sample time in epoch seconds … … 345 349 'units': 'seconds since 1970-1-1 00:00:00 -0', # UTC 346 350 }, 347 'ysi_id' : {'short_name':'ysi_id',348 'long_name':'Identification name of YSI Sonde',349 'standard_name': 'identification_name'350 },351 'ysi_sn' : {'short_name':'ysi_sn',352 'long_name':'Serial number of YSI Sonde',353 'standard_name': 'serial_number'354 },351 # 'ysi_id' : {'short_name':'ysi_id', 352 # 'long_name':'Identification name of YSI Sonde', 353 # 'standard_name': 'identification_name' 354 # }, 355 # 'ysi_sn' : {'short_name':'ysi_sn', 356 # 'long_name':'Serial number of YSI Sonde', 357 # 'standard_name': 'serial_number' 358 # }, 355 359 'wtemp': {'short_name': 'wtemp', 356 360 'long_name': 'Water Temperature', 357 361 'standard_name': 'water_temperature', 358 'units': 'degrees 362 'units': 'degrees_Celsius', 359 363 }, 360 364 'cond': {'short_name': 'cond', … … 391 395 ('lon', 1), 392 396 ('z', sensor_info['nbins']), 393 ('nchar', 20),394 397 ) 395 398 … … 407 410 # ('ysi_sn', NC.CHAR, ('time', 'nchar')), 408 411 # ('ysi_id', NC.CHAR, ('time', 'nchar')), 409 ('stime', NC. INT, ('time', 'z')),412 ('stime', NC.FLOAT, ('time', 'z')), 410 413 ('wtemp', NC.FLOAT, ('time', 'z')), 411 414 ('cond', NC.FLOAT, ('time', 'z')), raw2proc/trunk/raw2proc/proc_avp_ysi_6600_v2_CDL2.py
r221 r320 1 1 #!/usr/bin/env python 2 # Last modified: Time-stamp: <2009- 01-09 12:07:49haines>2 # Last modified: Time-stamp: <2009-12-16 15:23:36 haines> 3 3 """ 4 4 how to parse data, and assert what data and info goes into … … 125 125 'wd' : numpy.array(numpy.ones((N,), dtype=long)*numpy.nan), 126 126 'wl' : numpy.array(numpy.ones((N,), dtype=long)*numpy.nan), 127 'ysi_sn' : numpy.array(['' for i in range(N)] , dtype='|S20'),128 'ysi_id' : numpy.array(['' for i in range(N)] , dtype='|S20'),127 # 'ysi_sn' : numpy.array(['' for i in range(N)] , dtype='|S20'), 128 # 'ysi_id' : numpy.array(['' for i in range(N)] , dtype='|S20'), 129 129 # 130 130 'stime' : numpy.array(numpy.ones((N,nbins), dtype=long)*numpy.nan), … … 179 179 # Profile Location: Stones Bay Serial No: 00016B79, ID: AVP1_SERDP 180 180 sw = re.findall(r'\w+:\s(\w+)*', line) 181 ysi_sn = sw[1]182 ysi_id = sw[2]181 # ysi_sn = sw[1] 182 # ysi_id = sw[2] 183 183 # initialize for new profile at zero for averaging samples within each bin 184 184 wtemp = numpy.array(numpy.ones(nbins,), dtype=float)*numpy.nan … … 205 205 # get sample datetime from data 206 206 sample_str = '%02d-%02d-%02d %02d:%02d:%02d' % tuple(ysi[0:6]) 207 if sensor_info['utc_offset']: 208 sample_dt = scanf_datetime(sample_str, fmt='%m-%d-%y %H:%M:%S') + \ 209 timedelta(hours=sensor_info['utc_offset']) 210 else: 207 # month, day, year 208 try: 211 209 sample_dt = scanf_datetime(sample_str, fmt='%m-%d-%y %H:%M:%S') 212 210 except ValueError: 211 # day, month, year (month and day switched in some cases) 212 try: 213 sample_dt = scanf_datetime(sample_str, fmt='%d-%m-%y %H:%M:%S') 214 except: 215 sample_dt = datetime(1970,1,1) 216 217 if sensor_info['utc_offset']: 218 sample_dt = sample_dt + timedelta(hours=sensor_info['utc_offset']) 219 213 220 if j<nbins: 214 221 stime[j] = dt2es(sample_dt) # sample time … … 223 230 chl[j] = ysi[12] # chlorophyll (ug/l) 224 231 do[j] = ysi[13] # dissolved oxygen (mg/l) 225 232 226 233 j = j+1 227 234 … … 232 239 data['wd'][i] = -1.*wd 233 240 data['wl'][i] = platform_info['mean_water_depth'] - (-1*wd) 234 data['ysi_sn'][i] = ysi_sn235 data['ysi_id'][i] = ysi_id241 # data['ysi_sn'][i] = ysi_sn 242 # data['ysi_id'][i] = ysi_id 236 243 237 244 data['stime'][i] = stime # sample time in epoch seconds … … 356 363 'units': 'm', 357 364 }, 358 'ysi_id' : {'short_name':'ysi_id',359 'long_name':'Identification name of YSI Sonde',360 'standard_name': 'identification_name'361 },362 'ysi_sn' : {'short_name':'ysi_sn',363 'long_name':'Serial number of YSI Sonde',364 'standard_name': 'serial_number'365 },365 # 'ysi_id' : {'short_name':'ysi_id', 366 # 'long_name':'Identification name of YSI Sonde', 367 # 'standard_name': 'identification_name' 368 # }, 369 # 'ysi_sn' : {'short_name':'ysi_sn', 370 # 'long_name':'Serial number of YSI Sonde', 371 # 'standard_name': 'serial_number' 372 # }, 366 373 'wtemp': {'short_name': 'wtemp', 367 374 'long_name': 'Water Temperature', 368 375 'standard_name': 'water_temperature', 369 'units': 'degrees 376 'units': 'degrees_Celsius', 370 377 }, 371 378 'cond': {'short_name': 'cond', … … 407 414 ('lon', 1), 408 415 ('z', sensor_info['nbins']), 409 ('nchar', 20),410 416 ) 411 417 … … 425 431 # ('ysi_sn', NC.CHAR, ('time', 'nchar')), 426 432 # ('ysi_id', NC.CHAR, ('time', 'nchar')), 427 ('stime', NC. INT, ('time', 'z')),433 ('stime', NC.FLOAT, ('time', 'z')), 428 434 ('wtemp', NC.FLOAT, ('time', 'z')), 429 435 ('cond', NC.FLOAT, ('time', 'z')), raw2proc/trunk/raw2proc/proc_cr1000_flow.py
r233 r320 1 1 #!/usr/bin/env python 2 # Last modified: Time-stamp: <2009- 06-25 15:12:53haines>2 # Last modified: Time-stamp: <2009-12-07 14:32:39 haines> 3 3 """ 4 4 how to parse data, and assert what data and info goes into … … 74 74 'sontek_wl' : numpy.array(numpy.ones((N,), dtype=float)*numpy.nan), 75 75 'sontek_flow' : numpy.array(numpy.ones((N,), dtype=float)*numpy.nan), 76 'press' : numpy.array(numpy.ones((N,), dtype=float)*numpy.nan), 76 77 'press_wl' : numpy.array(numpy.ones((N,), dtype=float)*numpy.nan), 77 78 'press_flow' : numpy.array(numpy.ones((N,), dtype=float)*numpy.nan), 79 'press_csi_ft' : numpy.array(numpy.ones((N,), dtype=float)*numpy.nan), 80 'press_csi_cfs' : numpy.array(numpy.ones((N,), dtype=float)*numpy.nan), 78 81 } 79 82 … … 110 113 data['time'][i] = dt2es(sample_dt) # sample time in epoch seconds 111 114 115 # SMH -- 2009-12-05 modification 116 # press_csi water level and flow conversion on the data logger is not correct 117 # this will be reverted to original pressure reading and wl and flow recomputed. 112 118 if len(csi)==6: 113 119 # MOW has all six fields but no sontek now 114 120 data['rain'][i] = csi[1] # 15 min rain count (inches) 115 data['sontek_wl'][i] = csi[2] # sontek water level (ft)116 data['sontek_flow'][i] = csi[3] # sontek flow (cfs)117 data['press_ wl'][i] = csi[4] #pressure water level (ft)118 data['press_ flow'][i] = csi[5] # flowflow (cfs)121 # data['sontek_wl'][i] = csi[2] # sontek water level (ft) 122 # data['sontek_flow'][i] = csi[3] # sontek flow (cfs) 123 data['press_csi_ft'][i] = csi[4] # csi reported pressure water level (ft) 124 data['press_csi_cfs'][i] = csi[5] # csi reported flow (cfs) 119 125 i=i+1 120 126 elif len(csi)==4: 121 # CBC is not reporting pressure level and flow (THIS IS NOT RIGHT) 122 # Need Kevin Simpson at YSI to fix this. 127 # CBC is not reporting pressure level and flow -- no pressure sensor! 123 128 data['rain'][i] = csi[1] # 15 min rain count (inches) 124 129 data['sontek_wl'][i] = csi[2] # sontek water level (ft) 125 130 data['sontek_flow'][i] = csi[3] # sontek flow (cfs) 126 data['press_wl'][i] = 0. # pressure water level (ft)127 data['press_flow'][i] = 0. # flow flow (cfs)128 131 i=i+1 129 132 else: 130 133 print ' ... skipping line %d -- %s ' % (i,line) 131 continue 134 continue 132 135 133 136 # if re.search 134 137 # for line 138 139 # revert press_csi_ft back to raw pressure reading (eventually 140 # want csi to just report the raw pressure reading so we can just 141 # do this ourselves. 142 data['press'] = (data['press_csi_ft']+1.5)/27.6778 # raw pressure (psi) 143 # convert psi to height of water column based on hydrostatic eqn 144 data['press_wl'] = data['press']*2.3059+sensor_info['press_offset'] # (feet) 145 146 # flow based on parameter as computed by data logger 147 # data['press_flow'] = data['press_csi_cfs'] 148 149 # flow based on calculation from data logger but applied to offset calibration 150 # SMH does not know what equation is based on or how these values are derived 151 data['press_flow'] = ((data['press_wl']*12))*10.81 - 8.81 # cfs 135 152 136 153 # check that no data[dt] is set to Nan or anything but datetime raw2proc/trunk/raw2proc/proc_cr1000_wq.py
r233 r320 1 1 #!/usr/bin/env python 2 # Last modified: Time-stamp: <2009- 07-08 18:05:37haines>2 # Last modified: Time-stamp: <2009-12-15 08:31:52 haines> 3 3 """ 4 4 how to parse data, and assert what data and info goes into … … 213 213 'long_name': 'Water Temperature', 214 214 'standard_name': 'water_temperature', 215 'units': 'degrees 215 'units': 'degrees_Celsius', 216 216 }, 217 217 'cond': {'short_name': 'cond', raw2proc/trunk/raw2proc/proc_cr10x_flow_v1.py
r233 r320 1 1 #!/usr/bin/env python 2 # Last modified: Time-stamp: <2009- 03-19 14:44:02haines>2 # Last modified: Time-stamp: <2009-12-07 15:02:51 haines> 3 3 """ 4 4 how to parse data, and assert what data and info goes into … … 88 88 'time' : numpy.array(numpy.ones((N,), dtype=long)*numpy.nan), 89 89 'rain' : numpy.array(numpy.ones((N,), dtype=float)*numpy.nan), 90 'press' : numpy.array(numpy.ones((N,), dtype=float)*numpy.nan), 90 91 'press_wl' : numpy.array(numpy.ones((N,), dtype=float)*numpy.nan), 91 92 'press_flow' : numpy.array(numpy.ones((N,), dtype=float)*numpy.nan), 93 'press_csi_ft' : numpy.array(numpy.ones((N,), dtype=float)*numpy.nan), 94 'press_csi_cfs' : numpy.array(numpy.ones((N,), dtype=float)*numpy.nan), 92 95 } 93 96 … … 142 145 # data['sontek_flow'][i] = csi[6] # sontek flow (cfs) 143 146 144 data['press_ wl'][i] = csi[10] # pressure water level (ft)147 data['press_csi_ft'][i] = csi[10] # pressure water level (ft) 145 148 data['rain'][i] = csi[11] # 15 sec rain count ?? 146 data['press_ flow'][i] = csi[12] # flow flow (cfs)149 data['press_csi_cfs'][i] = csi[12] # flow flow (cfs) 147 150 # data['battvolts'][i] = csi[13] # battery (volts) 148 151 … … 151 154 # if-elif 152 155 # for line 156 157 # revert press_csi_ft back to raw pressure reading (eventually 158 # want csi to just report the raw pressure reading so we can just 159 # do this ourselves. 160 data['press'] = (data['press_csi_ft']+1.5)/27.6778 # raw pressure (psi) 161 # convert psi to height of water column based on hydrostatic eqn 162 data['press_wl'] = data['press']*2.3059+sensor_info['press_offset'] # (feet) 163 164 # flow based on parameter as computed by data logger 165 # data['press_flow'] = data['press_csi_cfs'] 166 167 # flow based on calculation from data logger but applied to offset calibration 168 # SMH does not know what equation is based on or how these values are derived 169 data['press_flow'] = ((data['press_wl']*12))*10.81 - 8.81 # cfs 153 170 154 171 # check that no data[dt] is set to Nan or anything but datetime raw2proc/trunk/raw2proc/proc_cr10x_flow_v2.py
r233 r320 1 1 #!/usr/bin/env python 2 # Last modified: Time-stamp: <2009- 03-19 14:30:42 haines>2 # Last modified: Time-stamp: <2009-12-07 15:03:02 haines> 3 3 """ 4 4 how to parse data, and assert what data and info goes into … … 108 108 'sontek_wl' : numpy.array(numpy.ones((N,), dtype=float)*numpy.nan), 109 109 'sontek_flow' : numpy.array(numpy.ones((N,), dtype=float)*numpy.nan), 110 'press' : numpy.array(numpy.ones((N,), dtype=float)*numpy.nan), 110 111 'press_wl' : numpy.array(numpy.ones((N,), dtype=float)*numpy.nan), 111 112 'press_flow' : numpy.array(numpy.ones((N,), dtype=float)*numpy.nan), 113 'press_csi_ft' : numpy.array(numpy.ones((N,), dtype=float)*numpy.nan), 114 'press_csi_cfs' : numpy.array(numpy.ones((N,), dtype=float)*numpy.nan), 112 115 } 113 116 … … 154 157 data['sontek_wl'][i] = csi[5] # sontek water level (ft) 155 158 data['sontek_flow'][i] = csi[6] # sontek flow (cfs) 156 data['press_ wl'][i] = csi[7] # pressure water level (ft)157 data['press_ flow'][i] = csi[8] # flow flow (cfs)159 data['press_csi_ft'][i] = csi[7] # pressure water level (ft) 160 data['press_csi_cfs'][i] = csi[8] # flow flow (cfs) 158 161 elif len(csi)==7: 159 162 # … … 168 171 # if re.search 169 172 # for line 173 174 # revert press_csi_ft back to raw pressure reading (eventually 175 # want csi to just report the raw pressure reading so we can just 176 # do this ourselves. 177 data['press'] = (data['press_csi_ft']+1.5)/27.6778 # raw pressure (psi) 178 # convert psi to height of water column based on hydrostatic eqn 179 data['press_wl'] = data['press']*2.3059+sensor_info['press_offset'] # (feet) 180 181 # flow based on parameter as computed by data logger 182 # data['press_flow'] = data['press_csi_cfs'] 183 184 # flow based on calculation from data logger but applied to offset calibration 185 # SMH does not know what equation is based on or how these values are derived 186 data['press_flow'] = ((data['press_wl']*12))*10.81 - 8.81 # cfs 170 187 171 188 # check that no data[dt] is set to Nan or anything but datetime raw2proc/trunk/raw2proc/proc_cr10x_wq_v1.py
r233 r320 1 1 #!/usr/bin/env python 2 # Last modified: Time-stamp: <200 8-12-16 16:47:38haines>2 # Last modified: Time-stamp: <2009-12-15 08:32:19 haines> 3 3 """ 4 4 how to parse data, and assert what data and info goes into … … 222 222 'long_name': 'Water Temperature', 223 223 'standard_name': 'water_temperature', 224 'units': 'degrees 224 'units': 'degrees_Celsius', 225 225 }, 226 226 'cond': {'short_name': 'cond', raw2proc/trunk/raw2proc/proc_cr10x_wq_v2.py
r233 r320 1 1 #!/usr/bin/env python 2 # Last modified: Time-stamp: <200 8-12-16 10:55:03 haines>2 # Last modified: Time-stamp: <2009-12-15 08:32:33 haines> 3 3 """ 4 4 how to parse data, and assert what data and info goes into … … 251 251 'long_name': 'Water Temperature', 252 252 'standard_name': 'water_temperature', 253 'units': 'degrees 253 'units': 'degrees_Celsius', 254 254 }, 255 255 'cond': {'short_name': 'cond', raw2proc/trunk/raw2proc/proc_jpier_ascii_met.py
r211 r320 1 1 #!/usr/bin/env python 2 # Last modified: Time-stamp: <200 8-10-01 12:45:54 haines>2 # Last modified: Time-stamp: <2009-12-15 08:40:54 haines> 3 3 """ 4 4 how to parse data, and assert what data and info goes into … … 198 198 'long_name': 'Air Temperature', 199 199 'standard_name': 'air_temperature', 200 'units': 'degrees 200 'units': 'degrees_Celsius', 201 201 }, 202 202 'humidity' : {'short_name': 'humidity', … … 208 208 'long_name': 'Dew Temperature', 209 209 'standard_name': 'dew_temp', 210 'units': 'degrees 210 'units': 'degrees_Celsius', 211 211 }, 212 212 'air_pressure' : {'short_name': 'air_pressure', … … 231 231 'long_name': 'Wind Chill', 232 232 'standard_name': 'wind_chill', 233 'units': 'degrees 233 'units': 'degrees_Celsius', 234 234 }, 235 235 'rainfall_rate' : {'short_name': 'rR', raw2proc/trunk/raw2proc/proc_nortek_wds_dw.py
r211 r320 1 1 #!/usr/bin/env python 2 # Last modified: Time-stamp: <200 8-10-01 12:47:05haines>2 # Last modified: Time-stamp: <2009-10-28 17:27:56 haines> 3 3 """ 4 4 how to parse data, and assert what data and info goes into … … 300 300 # (polar_waves_cur_wds.m, version 8) 301 301 pi = numpy.pi 302 ac = numpy.cos(D*pi/180)303 as = numpy.sin(D*pi/180)304 305 ch0 = (ac *Stheta*Dtheta).sum()306 sh0 = (as *Stheta*Dtheta).sum()302 ac1 = numpy.cos(D*pi/180) 303 as1 = numpy.sin(D*pi/180) 304 305 ch0 = (ac1*Stheta*Dtheta).sum() 306 sh0 = (as1*Stheta*Dtheta).sum() 307 307 Dm = numpy.arctan2(sh0,ch0)*180/pi 308 308 if Dm<0: Dm = Dm+360. 309 309 310 ch0s = (ac *Stheta_s*Dtheta).sum()311 sh0s = (as *Stheta_s*Dtheta).sum()310 ch0s = (ac1*Stheta_s*Dtheta).sum() 311 sh0s = (as1*Stheta_s*Dtheta).sum() 312 312 Dms = numpy.arctan2(sh0s,ch0s)*180/pi 313 313 if Dms<0: Dms = Dms+360. 314 314 315 ch0w = (ac *Stheta_w*Dtheta).sum()316 sh0w = (as *Stheta_w*Dtheta).sum()315 ch0w = (ac1*Stheta_w*Dtheta).sum() 316 sh0w = (as1*Stheta_w*Dtheta).sum() 317 317 Dmw = numpy.arctan2(sh0w,ch0w)*180/pi 318 318 if Dmw<0: Dmw = Dmw+360. raw2proc/trunk/raw2proc/proc_nortek_wpa_adcp.py
r213 r320 1 1 #!/usr/bin/env python 2 # Last modified: Time-stamp: <20 08-10-16 17:02:54haines>2 # Last modified: Time-stamp: <2010-01-22 12:11:55 haines> 3 3 """ 4 4 how to parse data, and assert what data and info goes into … … 168 168 bin_number = wpa[0] 169 169 j = wpa[0]-1 170 print j 170 171 hab[j] = wpa[1] 171 172 … … 368 369 'long_name': 'Water Temperature at Transducer', 369 370 'standard_name': 'water_temperature', 370 'units': 'deg 371 'units': 'deg_C', 371 372 }, 372 373 } raw2proc/trunk/raw2proc/procutil.py
r233 r320 1 1 #!/usr/bin/env python 2 # Last modified: Time-stamp: <20 09-06-19 15:42:44haines>2 # Last modified: Time-stamp: <2010-04-07 17:41:26 haines> 3 3 """Utilities to help data processing 4 4 … … 289 289 # fn_glob = 'bogue_dspec_plot*' 290 290 291 def addnan(dt, data): 291 def addnan(dt, data, maxdelta=None): 292 """ 293 insert NaN for time gaps 294 295 :Parameters: 296 dt : numpy.array of datetime 297 data : numpy.array of data 298 maxdelta : size of time gap (fraction or number of days) to insert 299 [default is two times its own sample interval] 300 301 :Returns: 302 new_dt : numpy.array of datetime 303 new_data : numpy.array of data 304 305 306 """ 292 307 # dt to be only 1-dimension and data to be 1- or 2-dimensional 293 308 … … 300 315 delta = numpy.diff(dn) 301 316 sample_interval = numpy.median(delta) 302 maxdelta = 1.5*sample_interval 317 if maxdelta==None: 318 maxdelta = 2.*sample_interval 303 319 # print maxdelta 304 320 igap = (delta > maxdelta).nonzero()[0] … … 309 325 # convert sample interval to dt object 310 326 sample_interval = timedelta(0.5*sample_interval) 311 # for each gap in data create NaN312 data_insert = [numpy.nan for gap in igap]313 327 # for each gap in time create datetime value 314 328 dt_insert = [dt[gap]+sample_interval for gap in igap] 315 329 # insert new sample times at indices of the gaps 316 330 new_dt = numpy.insert(numpy.array(dt), igap+1, dt_insert) 317 # insert NaN data at the indices that match the above times 318 new_data = numpy.insert(numpy.array(data), igap+1, data_insert, axis=0) 331 # insert NaN value at the gaps (insert placed just before obs) 332 new_data = numpy.insert(numpy.array(data), igap+1, numpy.nan, axis=0) 333 # if all the data is NaN, then autoscale crocks. This prevents 334 # throwing an error (but be careful if using for anything other than grafs) 335 if numpy.isnan(new_data).all(): 336 new_data[-1]=0. 319 337 return (new_dt, new_data) 320 338 321 # unit conversions 339 # 340 341 # unit conversion using udunits 342 def udconvert(val, units_from, units_to): 343 """Convert units using NCAR UDUNITS-2 344 345 Convert data to another unit using UDUNITS-2 API. 346 347 :Parameters: 348 val : scalar or list of scalars, numpy.array 349 Data to be converted 350 units_from : string 351 Units from which the values to be converted 352 units_to : string 353 Units to which the values will be converted 354 355 :Returns: 356 val_to : float scalar, list, or numpy.array 357 Data that is converted to new units 358 units_to : string 359 Units to which the data are now converted 360 361 Files 362 ----- 363 XML file that can be edited to change and add new conversions 364 /usr/local/share/udunits/udunits-common.xml 365 366 Not recommended to edit but useful info on UDUNITS-2 367 udunits2-accepted.xml 368 udunits2-base.xml 369 udunits2-derived.xml 370 udunits2-prefixes.xml 371 udunits2.xml 372 373 """ 374 import udunits 375 cnv = udunits.udunits(units_from, units_to) 376 377 if cnv[0]==0: 378 val_to = val*cnv[1] + cnv[2] 379 # if val_to > 99: 380 # val_to_str = '%.4g (%s)' % (val_to, valunits_to) 381 # else: 382 # val_to_str = '%.2g (%s)' % (val_to, valunits_to) 383 else: 384 print cnv 385 return (None, None) 386 387 # TO DO: Need to handle errors in a better fashion 388 # [-1, 'Unable to parse from', 'NTU', -3, 'Conversion not possible'] 389 # [-2, 'Unable to parse to', 'NTU', -3, 'Conversion not possible'] 390 # [-3, 'Conversion not possible'] 391 return (val_to, units_to) 392 393 # the following to be deprecated by udunits2 API 322 394 def meters2feet(meters): 323 395 """Convert meters to feet: <feet> = <meters>*3.28084 """ … … 438 510 # global replace _FillValue 439 511 nc_replace_fillvalue(ofn, -99999.0) 512 513 def proc2csv(pi, si, yyyy_mm): 514 """Select specific variables and times from current monthly netCDF 515 and post file of csv data. TEST MODE. 516 517 For each active config file, load specific variables from NCCOOS 518 monthly netCDF, make any necessary changes to data or attributes 519 conform to CSV output, subset data, and 520 create new file in csv directory. 521 522 NOTE: See auto() function for similar action. 523 524 """ 525 526 platform = pi['id'] 527 package = si['id'] 528 # input file 529 si['proc_filename'] = '%s_%s_%s.nc' % (platform, package, yyyy_mm) 530 ifn = os.path.join(si['proc_dir'], si['proc_filename']) 531 # output file 532 si['csv_filename'] = 'nccoos_%s_%s_latest.csv' % (platform, package) 533 ofn = os.path.join(si['csv_dir'], si['csv_filename']) 534 f = open(ofn, 'w') 535 536 if os.path.exists(ifn): 537 print ' ... ... csv : %s ' % (ofn,) 538 # get dt from current month file 539 (es, units) = nc_get_time(ifn) 540 dt = [es2dt(e) for e in es] 541 last_dt = dt[-1] 542 else: 543 # no input then report fact csv file 544 print ' ... ... csv: NO csv data reported ' 545 f.write('"No DATA REPORTED", " \\- ", " \\- "\n') 546 f.close() 547 return 548 549 # determine which index of data is within the specified timeframe (last 2 days) 550 n = len(dt) 551 idx = numpy.array([False for i in range(n)]) 552 for i, val in enumerate(dt): 553 if val>last_dt-timedelta(days=1) and val<=last_dt+timedelta(seconds=360): 554 idx[i] = True 555 dt = numpy.array(dt) 556 dt = dt[idx] 557 558 # read in data and unpack tuple 559 d = nc_load(ifn, si['csv_vars']) 560 global_atts, var_atts, dim_inits, var_inits, var_data = d 561 562 # dts = es2dt(dt[-1]) 563 # set timezone info to UTC (since data from level1 should be in UTC!!) 564 last_dt = last_dt.replace(tzinfo=tzutc()) 565 # return new datetime based on computer local 566 last_dt_local = last_dt.astimezone(tzlocal()) 567 568 diff = abs(last_dt - last_dt_local) 569 if diff.days>0: 570 last_dt_str = last_dt.strftime("%H:%M %Z on %b %d, %Y") + \ 571 ' (' + last_dt_local.strftime("%H:%M %Z, %b %d") + ')' 572 else: 573 last_dt_str = last_dt.strftime("%H:%M %Z") + \ 574 ' (' + last_dt_local.strftime("%H:%M %Z") + ')' \ 575 + last_dt.strftime(" on %b %d, %Y") 576 577 # uses dateutil.tz.tzutc() from dateutil 578 now_utc_dt = datetime.now(tzutc()) 579 now_utc_dt = now_utc_dt.replace(second=0, microsecond=0) 580 # uses dateutil.tz.tzlocal() from dateutil to get timezone settings as known by the operating system 581 now_local_dt = datetime.now(tzlocal()) 582 now_local_dt = now_local_dt.replace(second=0, microsecond=0) 583 # if more than a day difference between local time and UTC, specify dates for each 584 # otherwise date for one is sufficient (cuts down on clutter) 585 diff = abs(now_local_dt - now_utc_dt) 586 if diff.days>0: 587 now_str = now_utc_dt.strftime("%H:%M %Z on %b %d, %Y") + \ 588 ' (' + now_local_dt.strftime("%H:%M %Z, %b %d") + ')' 589 else: 590 now_str = now_utc_dt.strftime("%H:%M %Z") + \ 591 ' (' + now_local_dt.strftime("%H:%M %Z") + ')' \ 592 + now_utc_dt.strftime(" on %b %d, %Y") 593 594 # how old is the data 595 stale_diff = abs(now_utc_dt - last_dt) 596 if stale_diff.days>0 or stale_diff.seconds>=8*60*60: 597 stale_str = display_time_diff(stale_diff) 598 else: 599 stale_str = '' # use empty string to keep background white 600 601 varNames = [vn for vn, vt, vd in var_inits] 602 var_data = list(var_data) 603 for i in range(len(varNames)): 604 vn, vd = var_data[i] 605 vd = vd[idx] 606 607 # (1) var name and units (first td) 608 var_name_str = '%s (%s)' % (var_atts[vn]['long_name'], var_atts[vn]['short_name']) 609 valunits = var_atts[vn]['units'] 610 if vn=='rain': 611 val = vd.sum() 612 var_name_str = 'Rain Total (24 hrs)' 613 else: 614 val = vd[-1] 615 616 # if can take the length of val 617 # probably a list, tuple 618 # there will be more than one value of which we want a mean (ignoring NaN') 619 if bool('__len__' in dir(val)): 620 val = numpy.mean(numpy.ma.masked_where(numpy.isnan(val), val)) 621 622 # to metric 623 import udunits 624 625 sn = var_atts[vn]['standard_name'] 626 valunits_from = valunits 627 if 'temperature' in sn or sn in ('wind_chill', 'dew_point'): 628 if valunits_from == 'degrees Celsius': 629 valunits_from = 'degC' 630 valunits_to = 'degC' 631 elif 'velocity' in sn: 632 valunits_to = 'm s-1' 633 elif 'flux' in sn or sn in ('discharge',): 634 if valunits_from == 'cfs': 635 valunits_from = 'ft^3/sec' 636 valunits_to = 'm^3/s' 637 elif 'rain' in sn: 638 valunits_to = 'mm' 639 elif 'level' in sn or 'height' in sn or 'depth' in sn: 640 valunits_to = 'm' 641 else: 642 # can't find a conversion we want so convert to itself 643 valunits_to = valunits_from 644 645 cnv = udunits.udunits(valunits_from, valunits_to) 646 647 if cnv[0]==0: 648 val_to = val*cnv[1] + cnv[2] 649 if val_to > 99: 650 metric_str = '%.4g (%s)' % (val_to, valunits_to) 651 else: 652 metric_str = '%.2g (%s)' % (val_to, valunits_to) 653 # handle errors 654 # [-1, 'Unable to parse from', 'NTU', -3, 'Conversion not possible'] 655 # [-2, 'Unable to parse to', 'NTU', -3, 'Conversion not possible'] 656 # [-3, 'Conversion not possible'] 657 elif cnv[0]==-1 or cnv[0]==-2: 658 if val > 99: 659 metric_str = '%.4g (%s)' % (val, valunits) 660 else: 661 metric_str = '%.2g (%s)' % (val, valunits) 662 else: 663 metric_str = '\-' 664 665 # to english units 666 if 'temperature' in sn or sn in ('wind_chill', 'dew_point'): 667 if valunits_from == 'degrees Celsius': 668 valunits_from = 'degC' 669 valunits_to = 'degF' 670 elif 'velocity' in sn: 671 valunits_to = 'knots' 672 elif 'flux' in sn or sn in ('discharge',): 673 if valunits_from == 'cfs': 674 valunits_from = 'ft^3/sec' 675 valunits_to = 'ft^3/s' 676 elif 'rain' in sn: 677 valunits_to = 'in' 678 elif 'level' in sn or 'height' in sn or 'depth' in sn: 679 valunits_to = 'ft' 680 else: 681 valunits_to = valunits_from 682 # 683 cnv = udunits.udunits(valunits_from, valunits_to) 684 if cnv[0]==0: 685 val_to = val*cnv[1] + cnv[2] 686 if val > 99: 687 english_str ='%.4g (%s)' % (val_to, valunits_to) 688 else: 689 english_str = '%.2g (%s)' % (val_to, valunits_to) 690 # handle errors 691 # [-1, 'Unable to parse from', 'NTU', -3, 'Conversion not possible'] 692 # [-2, 'Unable to parse to', 'NTU', -3, 'Conversion not possible'] 693 # [-3, 'Conversion not possible'] 694 elif cnv[0]==-1 or cnv[0]==-2: 695 if val > 99: 696 english_str = '%.4g (%s)' % (val, valunits) 697 else: 698 english_str = '%.2g (%s)' % (val, valunits) 699 else: 700 english_str = '\-' 701 702 if metric_str == english_str: 703 english_str = '\-' 704 705 if vn=='time': 706 f.write('"**%s:** %s", ""\n' % ('Sample Time', last_dt_str)) 707 else: 708 f.write('"%s", "%s", "%s"\n' % (var_name_str, metric_str, english_str)) 709 710 f.close() 711 712 713 714 raw2proc/trunk/raw2proc/raw2proc.py
r233 r320 1 1 #!/usr/bin/env python 2 # Last modified: Time-stamp: <2009- 06-23 16:41:19haines>2 # Last modified: Time-stamp: <2009-12-05 21:32:18 haines> 3 3 """Process raw data to monthly netCDF data files 4 4 … … 325 325 # print ' ... ... latest : %s ' % si['latest_dir'] 326 326 proc2latest(pi, si, yyyy_mm) 327 328 if 'csv_dir' in si.keys(): 329 proc2csv(pi, si, yyyy_mm) 327 330 # 328 331 else: