NCCOOS Trac Projects: Top | Web | Platforms | Processing | Viz | Sprints | Sandbox | (Wind)

root/raw2proc/trunk/raw2proc/proc_cr1000_wind.py

Revision 488 (checked in by haines, 12 years ago)

removed test_ and scr_ files not necessary for SVN

Line 
1 #!/usr/bin/env python
2 # Last modified:  Time-stamp: <2012-04-23 14:13:36 haines>
3 """
4 how to parse data, and assert what data and info goes into
5 creating and updating monthly netcdf files
6
7 parse data met data collected on Campbell Scientific DataLogger (loggernet) (csi)
8
9 parser : sample date and time,
10
11 creator : lat, lon, z, time,
12 updator : time,
13
14
15 Examples
16 --------
17
18 >> (parse, create, update) = load_processors('proc_csi_adcp_v2')
19 or
20 >> si = get_config(cn+'.sensor_info')
21 >> (parse, create, update) = load_processors(si['adcp']['proc_module'])
22
23 >> lines = load_data(filename)
24 >> data = parse(platform_info, sensor_info, lines)
25 >> create(platform_info, sensor_info, data) or
26 >> update(platform_info, sensor_info, data)
27
28 """
29
30
31 from raw2proc import *
32 from procutil import *
33 from ncutil import *
34
35 now_dt = datetime.utcnow()
36 now_dt.replace(microsecond=0)
37
38 def parser(platform_info, sensor_info, lines):
39     """
40     Example wind data
41
42     Stats (avg, std, and max) for wind sampled every second for one minute DURING a 6 minute time period.  Stats are NOT over 6 minutes, as
43     the time stamp would have you believe.
44    
45     "TOA5","CR1000_B1","CR1000","37541","CR1000.Std.21","CPU:NCWIND_12_Buoy_All.CR1","58723","AWind_6Min"
46     "TIMESTAMP","RECORD","W1_SpeedAvg","W1_DirAvg","W1_SpeedMax","W1_SpeedStd","W2_SpeedAvg","W2_DirAvg","W2_SpeedMax","W2_SpeedStd"
47     "TS","RN","","Deg","","","","Deg","",""
48     "","","WVc","WVc","Max","Std","WVc","WVc","Max","Std"
49     "2011-12-01 00:01:59",6507,8.32,319.1,10.09,0.781,8.15,310.9,10.09,0.832
50     "2011-12-01 00:07:59",6508,9.43,323.3,11.27,1.094,9.11,315.8,10.68,1.015
51     "2011-12-01 00:13:59",6509,9.94,308.6,12.35,1.077,9.74,301.3,11.96,1.027
52     "2011-12-01 00:19:59",6510,8.86,304.5,10.98,1.003,8.8,296.4,11.27,1.066
53     "2011-12-01 00:25:59",6511,9.02,310.8,10.98,1.023,8.95,302.4,10.78,0.964
54     "2011-12-01 00:31:59",6512,9.58,304.9,11.76,1.156,9.39,296.7,11.76,1.167
55    
56     """
57
58     import numpy
59     from datetime import datetime
60     from time import strptime
61
62     # get sample datetime from filename
63     fn = sensor_info['fn']
64     sample_dt_start = filt_datetime(fn)
65
66     # how many samples (don't count header 4 lines)
67     nsamp = len(lines[4:])
68
69     N = nsamp
70     data = {
71         'dt' : numpy.array(numpy.ones((N,), dtype=object)*numpy.nan),
72         'time' : numpy.array(numpy.ones((N,), dtype=long)*numpy.nan),
73         'wspd1' : numpy.array(numpy.ones((N,), dtype=float)*numpy.nan),
74         'wspd1_std' : numpy.array(numpy.ones((N,), dtype=float)*numpy.nan),
75         'wgust1' : numpy.array(numpy.ones((N,), dtype=float)*numpy.nan),
76         'wdir1' : numpy.array(numpy.ones((N,), dtype=float)*numpy.nan),
77         'wspd2' : numpy.array(numpy.ones((N,), dtype=float)*numpy.nan),
78         'wspd2_std' : numpy.array(numpy.ones((N,), dtype=float)*numpy.nan),
79         'wgust2' : numpy.array(numpy.ones((N,), dtype=float)*numpy.nan),
80         'wdir2' : numpy.array(numpy.ones((N,), dtype=float)*numpy.nan),
81         }
82
83     # sample count
84     i = 0
85
86     for line in lines[4:]:
87         csi = []
88         # split line
89         sw = re.split(',', line)
90         if len(sw)<=0:
91             print ' ... skipping line %d ' % (i,)
92             continue
93
94         # replace any "NAN" text with a number
95         for index, s in enumerate(sw):
96             m = re.search(NAN_RE_STR, s)
97             if m:
98                 sw[index] = '-99999'
99
100         # parse date-time, and all other float and integers
101         for s in sw[1:]:
102             m = re.search(REAL_RE_STR, s)
103             if m:
104                 csi.append(float(m.groups()[0]))
105
106         if  sensor_info['utc_offset']:
107             sample_dt = scanf_datetime(sw[0], fmt='"%Y-%m-%d %H:%M:%S"') + \
108                         timedelta(hours=sensor_info['utc_offset'])
109         else:
110             sample_dt = scanf_datetime(sw[0], fmt='"%Y-%m-%d %H:%M:%S"')
111
112         data['dt'][i] = sample_dt # sample datetime
113         data['time'][i] = dt2es(sample_dt) # sample time in epoch seconds
114        
115         if len(csi)==9:
116             #
117             # data['samplenum'][i] = csi[0] # sample number assigned by datalogger in table
118             data['wspd1'][i] = csi[1] #
119             data['wdir1'][i] = csi[2] #
120             data['wgust1'][i] = csi[3] # relative humidity std
121             data['wspd1_std'][i] = csi[4] # air temperature avg (deg C)
122             data['wspd2'][i] = csi[5] # air temperature std (deg C)
123             data['wdir2'][i] = csi[6] # precip gauge cummulative
124             data['wgust2'][i] = csi[7] # PSP avg
125             data['wspd2_std'][i] = csi[8] # PSP std
126             i=i+1
127         else:
128             print ' ... skipping line %d -- %s ' % (i,line)
129             continue
130
131         # if re.search
132     # for line
133
134
135     # check that no data[dt] is set to Nan or anything but datetime
136     # keep only data that has a resolved datetime
137     keep = numpy.array([type(datetime(1970,1,1)) == type(dt) for dt in data['dt'][:]])
138     if keep.any():
139         for param in data.keys():
140             data[param] = data[param][keep]
141
142     return data
143  
144
145 def creator(platform_info, sensor_info, data):
146     #
147     #
148     # subset data only to month being processed (see raw2proc.process())
149     i = data['in']
150
151     title_str = sensor_info['description']+' at '+ platform_info['location']
152     global_atts = {
153         'title' : title_str,
154         'institution' : platform_info['institution'],
155         'institution_url' : platform_info['institution_url'],
156         'institution_dods_url' : platform_info['institution_dods_url'],
157         'metadata_url' : platform_info['metadata_url'],
158         'references' : platform_info['references'],
159         'contact' : platform_info['contact'],
160         #
161         'source' : platform_info['source']+' '+sensor_info['source'],
162         'history' : 'raw2proc using ' + sensor_info['process_module'],
163         'comment' : 'File created using pycdf'+pycdfVersion()+' and numpy '+pycdfArrayPkg(),
164         # conventions
165         'Conventions' : platform_info['conventions'],
166         # SEACOOS CDL codes
167         'format_category_code' : platform_info['format_category_code'],
168         'institution_code' : platform_info['institution_code'],
169         'platform_code' : platform_info['id'],
170         'package_code' : sensor_info['id'],
171         # institution specific
172         'project' : platform_info['project'],
173         'project_url' : platform_info['project_url'],
174         # timeframe of data contained in file yyyy-mm-dd HH:MM:SS
175         # first date in monthly file
176         'start_date' : data['dt'][i][0].strftime("%Y-%m-%d %H:%M:%S"),
177         # last date in monthly file
178         'end_date' : data['dt'][i][-1].strftime("%Y-%m-%d %H:%M:%S"),
179         'release_date' : now_dt.strftime("%Y-%m-%d %H:%M:%S"),
180         #
181         'creation_date' : now_dt.strftime("%Y-%m-%d %H:%M:%S"),
182         'modification_date' : now_dt.strftime("%Y-%m-%d %H:%M:%S"),
183         'process_level' : 'level1',
184         #
185         # must type match to data (e.g. fillvalue is real if data is real)
186         '_FillValue' : -99999.,
187         }
188
189
190     var_atts = {
191         # coordinate variables
192         'time' : {'short_name': 'time',
193                   'long_name': 'Time',
194                   'standard_name': 'time',
195                   'units': 'seconds since 1970-1-1 00:00:00 -0', # UTC
196                   'axis': 'T',
197                   },
198         'lat' : {'short_name': 'lat',
199                  'long_name': 'Latitude',
200                  'standard_name': 'latitude',
201                  'reference':'geographic coordinates',
202                  'units': 'degrees_north',
203                  'valid_range':(-90.,90.),
204                  'axis': 'Y',
205                  },
206         'lon' : {'short_name': 'lon',
207                  'long_name': 'Longitude',
208                  'standard_name': 'longitude',
209                  'reference':'geographic coordinates',
210                  'units': 'degrees_east',
211                  'valid_range':(-180.,180.),
212                  'axis': 'Y',
213                  },
214         'z' : {'short_name': 'z',
215                'long_name': 'Altitude',
216                'standard_name': 'altitude',
217                'reference':'zero at mean sea level',
218                'positive' : 'up',
219                'units': 'm',
220                'axis': 'Z',
221                },
222         # data variables
223         'wspd1' : {'short_name': 'wspd',
224                   'long_name': 'Wind Speed',
225                   'standard_name': 'wind_speed',
226                   'units': 'm s-1',
227                   'can_be_normalized': 'no',
228                   'z' : sensor_info['anemometer1_height'],
229                   'z_units' : 'meter',
230                   },
231         'wdir1' : {'short_name': 'wdir',
232                   'long_name': 'Wind Direction from',
233                   'standard_name': 'wind_from_direction',
234                   'reference': 'clockwise from Magnetic North',
235                   'valid_range': (0., 360),
236                   'units': 'degrees',
237                   'z' : sensor_info['anemometer1_height'],
238                   'z_units' : 'meter',
239                   },
240         'wgust1' : {'short_name': 'wgust',
241                   'long_name': 'Wind Gust',
242                   'standard_name': 'wind_gust',
243                   'units': 'm s-1',
244                   'can_be_normalized': 'no',
245                   'z' : sensor_info['anemometer1_height'],
246                   'z_units' : 'meter',
247                   },
248         'wspd1_std' : {'short_name': 'wspd std',
249                   'long_name': 'Standard Deviation of Wind Speed ',
250                   'standard_name': 'wind_speed standard_deviation',
251                   'units': 'm s-1',
252                   'can_be_normalized': 'no',
253                   'z' : sensor_info['anemometer1_height'],
254                   'z_units' : 'meter',
255                   },
256         # Second anemometer
257         'wspd2' : {'short_name': 'wspd',
258                   'long_name': 'Wind Speed',
259                   'standard_name': 'wind_speed',
260                   'units': 'm s-1',
261                   'can_be_normalized': 'no',
262                   'z' : sensor_info['anemometer2_height'],
263                   'z_units' : 'meter',
264                   },
265         'wdir2' : {'short_name': 'wdir',
266                   'long_name': 'Wind Direction from',
267                   'standard_name': 'wind_from_direction',
268                   'reference': 'clockwise from Magnetic North',
269                   'valid_range': (0., 360),
270                   'units': 'degrees',
271                   'z' : sensor_info['anemometer2_height'],
272                   'z_units' : 'meter',
273                   },
274         'wgust2' : {'short_name': 'wgust',
275                   'long_name': 'Wind Gust',
276                   'standard_name': 'wind_gust',
277                   'units': 'm s-1',
278                   'can_be_normalized': 'no',
279                   'z' : sensor_info['anemometer2_height'],
280                   'z_units' : 'meter',
281                   },
282         'wspd2_std' : {'short_name': 'wspd std',
283                   'long_name': 'Standard Deviation of Wind Speed ',
284                   'standard_name': 'wind_speed standard_deviation',
285                   'units': 'm s-1',
286                   'can_be_normalized': 'no',
287                   'z' : sensor_info['anemometer2_height'],
288                   'z_units' : 'meter',
289                   },
290         }
291
292     # dimension names use tuple so order of initialization is maintained
293     dim_inits = (
294         ('ntime', NC.UNLIMITED),
295         ('nlat', 1),
296         ('nlon', 1),
297         ('nz', 1),
298         )
299    
300     # using tuple of tuples so order of initialization is maintained
301     # using dict for attributes order of init not important
302     # use dimension names not values
303     # (varName, varType, (dimName1, [dimName2], ...))
304     var_inits = (
305         # coordinate variables
306         ('time', NC.INT, ('ntime',)),
307         ('lat', NC.FLOAT, ('nlat',)),
308         ('lon', NC.FLOAT, ('nlon',)),
309         ('z',  NC.FLOAT, ('nz',)),
310         # data variables
311         ('wspd1', NC.FLOAT, ('ntime',)),
312         ('wdir1', NC.FLOAT, ('ntime',)),
313         ('wgust1', NC.FLOAT, ('ntime',)),
314         ('wspd1_std', NC.FLOAT, ('ntime',)),
315         ('wspd2', NC.FLOAT, ('ntime',)),
316         ('wdir2', NC.FLOAT, ('ntime',)),
317         ('wgust2', NC.FLOAT, ('ntime',)),
318         ('wspd2_std', NC.FLOAT, ('ntime',)),
319         )
320
321     # var data
322     var_data = (
323         ('lat',  platform_info['lat']),
324         ('lon', platform_info['lon']),
325         ('z', platform_info['altitude']),
326         #
327         ('time', data['time'][i]),
328         #
329         ('wspd1', data['wspd1'][i]),
330         ('wdir1', data['wdir1'][i]),
331         ('wgust1', data['wgust1'][i]),
332         ('wspd1_std', data['wspd1_std'][i]),
333         ('wspd2', data['wspd2'][i]),
334         ('wdir2', data['wdir2'][i]),
335         ('wgust2', data['wgust2'][i]),
336         ('wspd2_std', data['wspd2_std'][i]),
337         )
338
339     return (global_atts, var_atts, dim_inits, var_inits, var_data)
340
341 def updater(platform_info, sensor_info, data):
342     #
343     # subset data only to month being processed (see raw2proc.process())
344     i = data['in']
345
346     global_atts = {
347         # update times of data contained in file (yyyy-mm-dd HH:MM:SS)
348         # last date in monthly file
349         'end_date' : data['dt'][i][-1].strftime("%Y-%m-%d %H:%M:%S"),
350         'release_date' : now_dt.strftime("%Y-%m-%d %H:%M:%S"),
351         #
352         'modification_date' : now_dt.strftime("%Y-%m-%d %H:%M:%S"),
353         }
354
355     # data variables
356     # update any variable attributes like range, min, max
357     var_atts = {}
358     # var_atts = {
359     #    'wtemp': {'max': max(data.u),
360     #          'min': min(data.v),
361     #          },
362     #    'cond': {'max': max(data.u),
363     #          'min': min(data.v),
364     #          },
365     #    }
366    
367     # data
368     var_data = (
369         ('time', data['time'][i]),
370         #
371         ('wspd1', data['wspd1'][i]),
372         ('wdir1', data['wdir1'][i]),
373         ('wgust1', data['wgust1'][i]),
374         ('wspd1_std', data['wspd1_std'][i]),
375         ('wspd2', data['wspd2'][i]),
376         ('wdir2', data['wdir2'][i]),
377         ('wgust2', data['wgust2'][i]),
378         ('wspd2_std', data['wspd2_std'][i]),
379         )
380
381     return (global_atts, var_atts, var_data)
382 #
Note: See TracBrowser for help on using the browser.