1 |
# $Id$ |
2 |
|
3 |
""" |
4 |
Time serieas analysis |
5 |
|
6 |
@var __author__: name of author |
7 |
@var __copyright__: copyrights |
8 |
@var __license__: licence agreement |
9 |
@var __url__: url entry point on documentation |
10 |
@var __version__: version |
11 |
@var __date__: date of the version |
12 |
""" |
13 |
|
14 |
|
15 |
__author__="Lutz Gross, l.gross@uq.edu.au" |
16 |
__copyright__=""" Copyright (c) 2006 by ACcESS MNRF |
17 |
http://www.access.edu.au |
18 |
Primary Business: Queensland, Australia""" |
19 |
__license__="""Licensed under the Open Software License version 3.0 |
20 |
http://www.opensource.org/licenses/osl-3.0.php""" |
21 |
__url__="http://www.iservo.edu.au/esys/escript" |
22 |
__version__="$Revision$" |
23 |
__date__="$Date$" |
24 |
|
25 |
|
26 |
import numarray |
27 |
from types import SliceType |
28 |
DEFAULT_BUFFER_SIZE=1000 |
29 |
DEFAULT_FLOAT_TYPE=numarray.Float64 |
30 |
|
31 |
class TimeSeriesBase: |
32 |
"""The TimeSeriesBase class is the base class for all class of the TimeSeries module.""" |
33 |
|
34 |
def __init__(self,debug=False,description="TimeSeriesBase"): |
35 |
self.__debug=debug |
36 |
self.setDescription(description) |
37 |
|
38 |
def __str__(self): |
39 |
return self.__description |
40 |
|
41 |
def setDescription(self,text): |
42 |
self.__description=text |
43 |
|
44 |
def setDebugOn(self): |
45 |
"""switch on degugging mode""" |
46 |
self.__debug=True |
47 |
|
48 |
def setDebugOff(self): |
49 |
"""switch off degugging mode""" |
50 |
self.__debug=False |
51 |
|
52 |
def setDebug(self,flag=False): |
53 |
"""sets debug mode to flag""" |
54 |
if flag: |
55 |
self.setDebugOn() |
56 |
else: |
57 |
self.setDebugOff() |
58 |
|
59 |
def debug(self): |
60 |
"""returns true if debug mode is on""" |
61 |
return self.__debug |
62 |
|
63 |
#============================================================================================================ |
64 |
class TimeSeriesBaseDataset(TimeSeriesBase): |
65 |
"""provides an interface for accessing a set of linearly ordered data.""" |
66 |
def __init__(self,buffer,offset=0,debug=False,description="TimeSeriesDataset"): |
67 |
TimeSeriesBase.__init__(self,debug,description) |
68 |
self.__buffer=buffer |
69 |
self.__offset=offset |
70 |
if self.debug(): print "Debug: %s: offset %d to buffer"%(self,self.getOffset()) |
71 |
|
72 |
def __len__(self): |
73 |
"""needed to handle negative indexing in slicing""" |
74 |
return 0 |
75 |
|
76 |
def getNumComponents(self): |
77 |
"""returns the number of components of the data (may be overwritten by subclass)""" |
78 |
return self.getBaseBuffer().getNumComponents() |
79 |
|
80 |
def getIdOfLastDatum(self): |
81 |
"""returns the identification number of the last datum in the data set (may be overwritten by subclass)""" |
82 |
return self.getBaseBuffer().getIdOfLastDatum()-self.getOffset() |
83 |
|
84 |
def getIdOfFirstDatum(self): |
85 |
"""returns the identification number of the first datum (may be overwritten by subclass)""" |
86 |
return self.getBaseBuffer().getIdOfFirstDatum()-self.getOffset() |
87 |
|
88 |
def getIdOfFirstAvailableDatum(self): |
89 |
"""returns the identification number of the first avaiable datum (may be overwritten by subclass)""" |
90 |
return self.getBaseBuffer().getIdOfFirstAvailableDatum()-self.getOffset() |
91 |
|
92 |
def getOffsetInBaseBuffer(self): |
93 |
"""returns the offset to access elements in getBaseBuffer() (may be overwritten by subclass)""" |
94 |
return self.getOffset() |
95 |
|
96 |
def getIdOfLastUnreferencedDatum(self): |
97 |
"""returns the identification number of the last datum which has been unused by all TimeSeries refering to the TimeSeriesBaseDataset (may be overwritten by subclass)""" |
98 |
return self.getBaseBuffer().getIdOfLastUnreferencedDatum()-self.getOffset() |
99 |
|
100 |
def updateIdOfLastUnreferencedDatum(self,last_unreferenced_datum): |
101 |
"""updates the identification number of the last unused datum (to be overwritten by subclass)""" |
102 |
self.getBaseBuffer().updateIdOfLastUnreferencedDatum(last_unreferenced_datum+self.getOffset()) |
103 |
|
104 |
def append(self,values): |
105 |
"""appends data to the buffer. If the buffer would be full the buffer is rearranged before the data are appended (to be overwritten by subclass)""" |
106 |
self.getBaseBuffer().append(values) |
107 |
|
108 |
def getBaseBufferSize(self): |
109 |
"""returns the size of the buffer (to be overwritten by subclass)""" |
110 |
return self.getBaseBuffer().getBaseBufferSize() |
111 |
|
112 |
def needsRearrangement(self,num_new_data=0): |
113 |
"""returns True if the buffer will be full after num_new_data have been appended (to be overwritten by subclass)""" |
114 |
return self.getBaseBuffer().needsRearrangement(num_new_data) |
115 |
|
116 |
def isEmpty(self): |
117 |
"""returns true if no data are appeneded to buffer""" |
118 |
return self.getNumData()<=0 |
119 |
|
120 |
def getNumData(self): |
121 |
"""returns the number of data (not all of them are accessible)""" |
122 |
return self.getIdOfLastDatum()-self.getIdOfFirstDatum()+1 |
123 |
|
124 |
def getBaseBuffer(self): |
125 |
"""return the buffer referenced by the TimeSeriesBaseDataset""" |
126 |
return self.__buffer |
127 |
|
128 |
def getOffset(self): |
129 |
"""return the offset when referring to dataset elements""" |
130 |
return self.__offset |
131 |
|
132 |
def __getitem__(self,index): |
133 |
"""returns the datum index""" |
134 |
if type(index)==SliceType: |
135 |
start=index.start |
136 |
end=index.stop |
137 |
if start==end: |
138 |
return self[start] |
139 |
else: |
140 |
if start<self.getIdOfFirstDatum() or start>self.getIdOfLastDatum() or \ |
141 |
end-1<self.getIdOfFirstDatum() or end-1>self.getIdOfLastDatum(): raise IndexError,"%s: Index [%d:%d] out of range"%(self,start,end) |
142 |
return self.getBaseBuffer()[start+self.getOffsetInBaseBuffer():end+self.getOffsetInBaseBuffer()] |
143 |
else: |
144 |
if index<self.getIdOfFirstDatum() or index>self.getIdOfLastDatum(): raise IndexError,"%s: Index %d out of range"%(self,index) |
145 |
return self.getBaseBuffer()[index+self.getOffsetInBaseBuffer()] |
146 |
|
147 |
class TimeSeriesBaseBuffer(TimeSeriesBaseDataset): |
148 |
"""An inplementation of TimeSeriesBaseDataset which actually is storing data into a numarray buffer""" |
149 |
def __init__(self,buffer_size=DEFAULT_BUFFER_SIZE,numComponents=1,type=DEFAULT_FLOAT_TYPE,id_of_first_datum=0,debug=False,description="TimeSeriesBaseBuffer"): |
150 |
if numComponents<2: |
151 |
buffer=numarray.zeros((buffer_size,),type) |
152 |
else: |
153 |
buffer=numarray.zeros((buffer_size,numComponents),type) |
154 |
TimeSeriesBaseDataset.__init__(self,buffer,id_of_first_datum-1,debug,description) |
155 |
self.__num_data_in_buffer=0 |
156 |
self.__id_last_unreferenced_datum=id_of_first_datum-1 |
157 |
self.__id_last_datum=id_of_first_datum-1 |
158 |
self.__id_first_datum=id_of_first_datum |
159 |
if self.debug(): print "Debug: %s : buffer of size %d with %d components allocated (first datum is %d)."% \ |
160 |
(self,self.getBaseBufferSize(),self.getNumComponents(),id_of_first_datum) |
161 |
|
162 |
|
163 |
def getBaseBufferSize(self): |
164 |
"""returns the size of the buffer""" |
165 |
return self.getBaseBuffer().shape[0] |
166 |
|
167 |
def getNumComponents(self): |
168 |
"""returns the number of components of the data (overwrites TimeSeriesBaseDataset method)""" |
169 |
if self.getBaseBuffer().rank==1: |
170 |
return 1 |
171 |
else: |
172 |
self.getBaseBuffer().shape[1] |
173 |
|
174 |
def getNumDataInBaseBuffer(self): |
175 |
"""returns the number of data currently in the buffer""" |
176 |
return self.__num_data_in_buffer |
177 |
|
178 |
def getIdOfLastDatum(self): |
179 |
"""returns the identification number of the last datum in the data set (overwrites method from TimeSeriesBaseDataset)""" |
180 |
return self.__id_last_datum |
181 |
|
182 |
def getIdOfFirstDatum(self): |
183 |
"""returns the identification number of the first datum (overwrites method from TimeSeriesBaseDataset)""" |
184 |
return self.__id_first_datum |
185 |
|
186 |
def getOffsetInBaseBuffer(self): |
187 |
"""returns the offset to access elements in the buffer (overwrites method from TimeSeriesBaseDataset)""" |
188 |
return -self.getIdOfLastDatum()+self.getNumDataInBaseBuffer()-1 |
189 |
|
190 |
def getIdOfLastUnreferencedDatum(self): |
191 |
"""returns the identification number of the last datum which has been unused by all TimeSeries refering to the TimeSeriesBaseDataset (overwrites method from TimeSeriesBaseDataset)""" |
192 |
return self.__id_last_unreferenced_datum |
193 |
|
194 |
def updateIdOfLastUnreferencedDatum(self,last_unreferenced_datum): |
195 |
"""updates the identification number of the last unused datum (to be overwritten by subclass)""" |
196 |
self.getBaseBuffer().updateIdOfLastUnreferencedDatum(last_unreferenced_datum-self.getOffset()) |
197 |
|
198 |
def updateIdOfLastUnreferencedDatum(self,last_unreferenced_datum): |
199 |
"""updates the identification number of the last unused datum (overwrites TimeSeriesBaseDataset method)""" |
200 |
if self.__id_last_unreferenced_datum>last_unreferenced_datum: |
201 |
self.__id_last_unreferenced_datum=last_unreferenced_datum |
202 |
if self.debug(): print "Debug: %s: last unused datum is now %s"%(self,last_unreferenced_datum) |
203 |
|
204 |
def needsRearrangement(self,num_new_data=0): |
205 |
"""returns True if the buffer will be full after num_new_data have been appended""" |
206 |
return self.getNumDataInBaseBuffer()+num_new_data>self.getBaseBufferSize() |
207 |
|
208 |
def getIdOfFirstAvailableDatum(self): |
209 |
"""returns the identification number of the first avaiable datum (overwrites TimeSeriesBaseDataset method)""" |
210 |
return self.getIdOfLastDatum()-self.__num_data_in_buffer+1 |
211 |
|
212 |
def append(self,data): |
213 |
"""appends data to the buffer. If the buffer would be full the buffer is rearranged before the data are appended (overwrites TimeSeriesBaseDataset method)""" |
214 |
data=numarray.array(data) |
215 |
nc=self.getNumComponents() |
216 |
if data.rank==0: |
217 |
if nc==1: |
218 |
num_new_data=1 |
219 |
else: |
220 |
raise ValueError,"%s: illegal data shape"%self |
221 |
elif data.rank==1: |
222 |
if nc==1: |
223 |
num_new_data=data.shape[0] |
224 |
else: |
225 |
num_new_data=1 |
226 |
elif data.rank==2: |
227 |
if not nc==data.shape[1]: raise ValueError,"%s: illegal data shape"%self |
228 |
num_new_data=data.shape[0] |
229 |
else: |
230 |
raise ValueError,"%s: illegal rank"%self |
231 |
|
232 |
# check is buffer will be overflown when data are appended: |
233 |
if self.needsRearrangement(num_new_data): |
234 |
nn=self.getNumDataInBaseBuffer() |
235 |
num_protected_data=self.getIdOfLastDatum()-self.getIdOfLastUnreferencedDatum() |
236 |
if num_protected_data+num_new_data>self.getBaseBufferSize(): |
237 |
raise ValueError,"%s: buffer overflow: buffer size has to be bigger than %d"%(self,num_protected_data+num_new_data) |
238 |
if num_protected_data>0: self.getBaseBuffer()[0:num_protected_data]=self.getBaseBuffer()[nn-num_protected_data:nn] |
239 |
self.__num_data_in_buffer=num_protected_data |
240 |
self.__id_last_unreferenced_datum=self.__id_last_datum |
241 |
if self.debug(): |
242 |
print "Debug: %s: rearrangement: first data in buffer is %d."%(self,self.getIdOfLastDatum()-self.getNumDataInBaseBuffer()+1) |
243 |
# copy data over: |
244 |
nn=self.getNumDataInBaseBuffer() |
245 |
self.getBaseBuffer()[nn:nn+num_new_data]=data |
246 |
self.__num_data_in_buffer+=num_new_data |
247 |
self.__id_last_datum+=num_new_data |
248 |
self.__id_last_unreferenced_datum+=num_new_data |
249 |
if self.debug(): print "Debug: %s: %d data appended. Last unreferenced datum is now %d."%(self,num_new_data,self.__id_last_unreferenced_datum) |
250 |
|
251 |
# ====================================== |
252 |
class TimeSeriesControlerView(TimeSeriesBase): |
253 |
"""A TimeSeriesControlerView is attached to a Controler and moves forward in time by increasing the id of the last processed datum. |
254 |
Any implementation of a TimeSeriesControlerView must provide the getControler method which returns the controler""" |
255 |
def __init__(self,id_first_datum=0,debug=False,description="TimeSeries"): |
256 |
TimeSeriesBase.__init__(self,debug,description) |
257 |
self.__id_last_processed_datum=id_first_datum-1 |
258 |
if self.debug(): print "Debug: %s created with first datum %d"%(str(self),id_first_datum) |
259 |
|
260 |
def getIdOfLastProcessedDatum(self): |
261 |
return self.__id_last_processed_datum |
262 |
|
263 |
def updateIdOfLastProcessedDatum(self,id_last_processed_datum): |
264 |
self.__id_last_processed_datum=id_last_processed_datum |
265 |
|
266 |
# def getControler(self): |
267 |
# """returns the Controler of the time series (to be overwritten by subclass)""" |
268 |
# pass |
269 |
|
270 |
class TimeSeries(TimeSeriesBaseDataset,TimeSeriesControlerView): |
271 |
"""makes TimeSeriesBaseDataset look like a TimeSeries and introduces operations |
272 |
Any implementation of a TimeSeriesControlerView must provide the getControler method which returns the controler""" |
273 |
def __init__(self,dataset,debug=False,description="TimeSeries"): |
274 |
TimeSeriesControlerView.__init__(self,dataset.getIdOfFirstDatum(),debug,description) |
275 |
TimeSeriesBaseDataset.__init__(self,dataset,0,debug,description) |
276 |
|
277 |
def getDataset(self): |
278 |
"""returns the TimeSeriesBaseDataset of the time series""" |
279 |
return self.getBaseBuffer() |
280 |
|
281 |
# def getControler(self): |
282 |
# """returns the Controler of the time series (to be overwritten by subclass)""" |
283 |
# pass |
284 |
|
285 |
def __add__(self,arg): |
286 |
if isinstance(arg,TimeSeriesBaseDataset): |
287 |
return TimeSeriesAdd(self,arg) |
288 |
else: |
289 |
return TimeSeriesAddScalar(self,arg) |
290 |
|
291 |
def __sub__(self,arg): |
292 |
return self+(-1.)*arg |
293 |
|
294 |
def __mul__(self,arg): |
295 |
if isinstance(arg,TimeSeriesBaseDataset): |
296 |
return TimeSeriesMult(self,arg) |
297 |
else: |
298 |
return TimeSeriesMultScalar(self,arg) |
299 |
|
300 |
def __div__(self,arg): |
301 |
if isinstance(arg,TimeSeriesBaseDataset): |
302 |
return TimeSeriesDiv(self,arg) |
303 |
else: |
304 |
return TimeSeriesMultScalar(self,1./arg) |
305 |
|
306 |
def __pow__(self,arg): |
307 |
if isinstance(arg,TimeSeriesBaseDataset): |
308 |
return TimeSeriesPower(self,arg) |
309 |
else: |
310 |
return TimeSeriesPowerScalar(self,arg) |
311 |
|
312 |
def __radd__(self,arg): |
313 |
return self.__add__(arg) |
314 |
|
315 |
def __rsub__(self,arg): |
316 |
return arg+(-1.)*self |
317 |
|
318 |
def __rmul__(self,arg): |
319 |
return self.__mul__(arg) |
320 |
|
321 |
def __rdiv__(self,arg): |
322 |
if isinstance(arg,TimeSeriesBaseDataset): |
323 |
return TimeSeriesDiv(arg,self) |
324 |
else: |
325 |
return TimeSeriesDivScalar(self,arg) |
326 |
|
327 |
def __rpow__(self,arg): |
328 |
if isinstance(arg,TimeSeriesBaseDataset): |
329 |
return TimeSeriesPower(arg,self) |
330 |
else: |
331 |
return Exp(numarray.log(arg)*self) |
332 |
|
333 |
def __lshift__(self,arg): |
334 |
return TimeSeriesShift(self,-arg) |
335 |
|
336 |
def __rshift__(self,arg): |
337 |
return TimeSeriesShift(self,arg) |
338 |
|
339 |
def __neg__(self): |
340 |
return (-1.0)*self |
341 |
|
342 |
def __pos__(self): |
343 |
return (1.0)*self |
344 |
|
345 |
class TimeSeriesOperator(TimeSeriesControlerView): |
346 |
"""a TimeSeriesOperator decribes an operation acting on list of TimeSeries time_series_args. It allows to update its output (if there is any) |
347 |
through the update method which is overwritten by a particular implementation of the class. The update method is called to process the data [start:end] using |
348 |
[start-left_wing_size:end+right_wing_size] of its arguments""" |
349 |
def __init__(self,controler,time_series_args=[],left_wing_size=0,right_wing_size=0,debug=False,description="TimeSeriesOperator"): |
350 |
id_first_datum=controler.getIdOfFirstDatum() |
351 |
for i in time_series_args: id_first_datum=max(id_first_datum,i.getIdOfFirstDatum()) |
352 |
TimeSeriesControlerView.__init__(self,id_first_datum+left_wing_size,debug,description) |
353 |
self.__left_wing_size=left_wing_size |
354 |
self.__right_wing_size=right_wing_size |
355 |
self.__time_series_args=time_series_args |
356 |
self.__controler=controler |
357 |
controler.appendOperatorToUpdateList(self) |
358 |
if self.debug(): print "Debug: %s: with left/right wing size %d/%d and %d arguments."%(str(self),left_wing_size,right_wing_size,len(time_series_args)) |
359 |
|
360 |
def __del__(self): |
361 |
self.getControler().removeOperatorFromUpdateList(self) |
362 |
|
363 |
def getControler(self): |
364 |
"""returns the Controler updating the TimeSeriesOperator""" |
365 |
return self.__controler |
366 |
|
367 |
def getLeftWingSize(self): |
368 |
"""returns the left wing size""" |
369 |
return self.__left_wing_size |
370 |
|
371 |
def getRightWingSize(self): |
372 |
"""returns the right wing size""" |
373 |
return self.__right_wing_size |
374 |
|
375 |
def getArguments(self,index=None): |
376 |
"""returns the list of arguments or, index is present, the argument with index index. In the latter case None is returned if no arguments are present""" |
377 |
if index==None: |
378 |
return self.__time_series_args |
379 |
else: |
380 |
if len(self.__time_series_args)>0: |
381 |
return self.__time_series_args[index] |
382 |
else: |
383 |
return None |
384 |
|
385 |
def getArgumentDataset(self,index): |
386 |
"""returns the dataset of in the argument with index index""" |
387 |
arg=self.getArguments(index) |
388 |
if arg==None: |
389 |
return None |
390 |
else: |
391 |
return self.getArguments(index).getDataset() |
392 |
|
393 |
def flush(self): |
394 |
"""calls the update method with all the maximum processable range. It also updates the id of unused datum for all arguments""" |
395 |
start=self.getIdOfLastProcessedDatum()+1 |
396 |
end=self.getControler().getIdOfLastDatum() |
397 |
for i in self.getArguments(): end=min(end,i.getIdOfLastDatum()) |
398 |
if start<=end-self.getRightWingSize(): |
399 |
if self.debug(): print "Debug: %s: range [%d:%d] is updated."%(self,start,end-self.getRightWingSize()) |
400 |
self.update(start,end-self.getRightWingSize()+1) |
401 |
for i in self.getArguments(): i.updateIdOfLastUnreferencedDatum(end-self.getLeftWingSize()) |
402 |
self.updateIdOfLastProcessedDatum(end) |
403 |
|
404 |
def update(self,start,end): |
405 |
"""updates the the data [start:end] using [start-left_wing_size:end+right_wing_size] of its arguments (is overwritten by a particular TimeSeriesOperator)""" |
406 |
pass |
407 |
|
408 |
|
409 |
class TimeSeriesFilter(TimeSeries,TimeSeriesOperator): |
410 |
"""a TimeSeriesFilter is a TimeSeries taht is created trough a TimeSeriesOperator""" |
411 |
def __init__(self,controler,dataset,time_series_args=[],left_wing_size=0,right_wing_size=0,debug=False,description="TimeSeriesFilter"): |
412 |
TimeSeriesOperator.__init__(self,controler,time_series_args,left_wing_size,right_wing_size,debug,description) |
413 |
TimeSeries.__init__(self,dataset,debug,description) |
414 |
|
415 |
def update(self,start,end): |
416 |
"""appends zeros to the dataset. This method should be overwritten by a particular TimeSeriesFilter""" |
417 |
nc=self.getNumComponents() |
418 |
if nc>1: |
419 |
self.getDataset().append(numarray.zeros([nc,end-start])) |
420 |
else: |
421 |
self.getDataset().append(numarray.zeros(end-start)) |
422 |
|
423 |
class Controler(TimeSeries): |
424 |
"""controls a set of TimeSeries""" |
425 |
def __init__(self,buffer_size=DEFAULT_BUFFER_SIZE,debug=False,description="TimeSeriesControler"): |
426 |
TimeSeries.__init__(self,TimeSeriesBaseBuffer(buffer_size,1,DEFAULT_FLOAT_TYPE,0,debug,"node buffer of "+description),debug,"nodes of "+description) |
427 |
self.setFlushRate() |
428 |
self.__update_time_series=list() |
429 |
|
430 |
def __del__(self): |
431 |
self.flush() |
432 |
|
433 |
def getControler(self): |
434 |
"""returns the Controler of the time series (overwrites method of by TimeSeries)""" |
435 |
return self |
436 |
|
437 |
def setFlushRate(self,rate=50): |
438 |
"""set the flush rate, i.e. after rate new time nodes have been checked in the flush method is called.""" |
439 |
self.__flush_rate=rate |
440 |
if self.debug(): print "Debug: %s: flush rate is set to %d"%(self,rate) |
441 |
|
442 |
def needsFlushing(self): |
443 |
"""returns true if the depending TimeSeriesFilters needs to be flushed becuase the time nodes buffer is full or because of the set flush rate""" |
444 |
return self.needsRearrangement(1) or (self.getNumData()+1)%self.__flush_rate==0 |
445 |
|
446 |
def flush(self): |
447 |
"""flushes all dependend TimeSeriesFilters by processing their flush method""" |
448 |
if self.debug(): print "Debug: %s: start flushing"%self |
449 |
for time_serie in self.__update_time_series: time_serie.flush() |
450 |
|
451 |
def appendOperatorToUpdateList(self,time_serie): |
452 |
if not time_serie.getControler()==self: raise ValueError,"%s: TimeSeries %s is not defined on this controler."%(self,time_serie) |
453 |
if not self.isEmpty(): raise ValueError,"%s: you can only check in a time series time_serie is controler is empty."%self |
454 |
self.__update_time_series.append(time_serie) |
455 |
if self.debug(): print "Debug: %s: %s has been added to update list."%(self,time_serie) |
456 |
|
457 |
def removeOperatorFromUpdateList(self,time_serie): |
458 |
self.__update_time_series.remove(time_serie) |
459 |
if self.debug(): print "Debug: %s: %s has been removed from update list."%(self,time_serie) |
460 |
|
461 |
def nextTime(self,value): |
462 |
if self.needsFlushing(): self.flush() |
463 |
self.getDataset().append(value) |
464 |
if self.debug(): print "Debug: %s: new time node %e has been added."%(self,value) |
465 |
|
466 |
class TimeSeriesShift(TimeSeries): |
467 |
"""creates a shift of the time series, i.e. if d[n] is the datum at time t[n], the value at t[n] becomes v[n+shift] on the output""" |
468 |
def __init__(self,time_serie,shift=1): |
469 |
if shift<0: |
470 |
dsc="(%s)<<%d"%(time_serie,-shift) |
471 |
else: |
472 |
dsc="(%s)>>%d"%(time_serie,shift) |
473 |
self.__controler=time_serie.getControler() |
474 |
TimeSeries.__init__(self,TimeSeriesBaseDataset(time_serie.getDataset(),-shift,time_serie.debug(),"buffer view to "+dsc),time_serie.debug(),dsc) |
475 |
|
476 |
def getControler(self): |
477 |
return self.__controler |
478 |
|
479 |
class TimeSeriesAdd(TimeSeriesFilter): |
480 |
"""adds two TimeSeries""" |
481 |
def __init__(self,time_serie_1,time_serie_2): |
482 |
dsc="(%s)+(%s)"%(time_serie_1,time_serie_2) |
483 |
dbg=time_serie_1.debug() or time_serie_2.debug() |
484 |
cntrl=time_serie_1.getControler() |
485 |
if not cntrl==time_serie_2.getControler(): |
486 |
raise ValueError("TimeSeriesAdd: %s and %s have different controler."%(time_serie_1,time_serie_2)) |
487 |
id_first_datum=max(time_serie_1.getIdOfFirstDatum(),time_serie_2.getIdOfFirstDatum()) |
488 |
TimeSeriesFilter.__init__(self,cntrl, \ |
489 |
TimeSeriesBaseBuffer(cntrl.getBaseBufferSize(),time_serie_1.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \ |
490 |
[time_serie_1,time_serie_2],0,0,dbg,dsc) |
491 |
|
492 |
def update(self,start,end): |
493 |
self.append(self.getArgumentDataset(0)[start:end]+self.getArgumentDataset(1)[start:end]) |
494 |
|
495 |
class TimeSeriesAddScalar(TimeSeriesFilter): |
496 |
"""adds a single value to a TimeSeries""" |
497 |
def __init__(self,time_serie,scalar): |
498 |
dsc="(%s)+(%s)"%(time_serie,scalar) |
499 |
dbg=time_serie.debug() |
500 |
cntrl=time_serie.getControler() |
501 |
id_first_datum=time_serie.getIdOfFirstDatum() |
502 |
TimeSeriesFilter.__init__(self,cntrl, \ |
503 |
TimeSeriesBaseBuffer(cntrl.getBaseBufferSize(),time_serie.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \ |
504 |
[time_serie],0,0,dbg,dsc) |
505 |
self.__scalar=scalar |
506 |
|
507 |
def update(self,start,end): |
508 |
self.append(self.getArgumentDataset(0)[start:end]+self.__scalar) |
509 |
|
510 |
class TimeSeriesMult(TimeSeriesFilter): |
511 |
"""multiplies two TimeSeries""" |
512 |
def __init__(self,time_serie_1,time_serie_2): |
513 |
dsc="(%s)*(%s)"%(time_serie_1,time_serie_2) |
514 |
dbg=time_serie_1.debug() or time_serie_2.debug() |
515 |
cntrl=time_serie_1.getControler() |
516 |
if not cntrl==time_serie_2.getControler(): |
517 |
raise ValueError("TimeSeriesMult: %s and %s have different controler."%(time_serie_1,time_serie_2)) |
518 |
id_first_datum=max(time_serie_1.getIdOfFirstDatum(),time_serie_2.getIdOfFirstDatum()) |
519 |
TimeSeriesFilter.__init__(self,cntrl, \ |
520 |
TimeSeriesBaseBuffer(cntrl.getBaseBufferSize(),time_serie_1.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \ |
521 |
[time_serie_1,time_serie_2],0,0,dbg,dsc) |
522 |
|
523 |
def update(self,start,end): |
524 |
self.append(self.getArgumentDataset(0)[start:end]*self.getArgumentDataset(1)[start:end]) |
525 |
|
526 |
class TimeSeriesMultScalar(TimeSeriesFilter): |
527 |
"""multiplies a TimeSeries with a single value""" |
528 |
def __init__(self,time_serie,scalar): |
529 |
dsc="(%s)*%s"%(time_serie,scalar) |
530 |
dbg=time_serie.debug() |
531 |
cntrl=time_serie.getControler() |
532 |
id_first_datum=time_serie.getIdOfFirstDatum() |
533 |
TimeSeriesFilter.__init__(self,cntrl, \ |
534 |
TimeSeriesBaseBuffer(cntrl.getBaseBufferSize(),time_serie.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \ |
535 |
[time_serie],0,0,dbg,dsc) |
536 |
self.__scalar=scalar |
537 |
|
538 |
def update(self,start,end): |
539 |
self.append(self.getArgumentDataset(0)[start:end]*self.__scalar) |
540 |
|
541 |
class TimeSeriesDiv(TimeSeriesFilter): |
542 |
"""divides two TimeSeries""" |
543 |
def __init__(self,time_serie_1,time_serie_2): |
544 |
dsc="(%s)/(%s)"%(time_serie_1,time_serie_2) |
545 |
dbg=time_serie_1.debug() or time_serie_2.debug() |
546 |
cntrl=time_serie_1.getControler() |
547 |
if not cntrl==time_serie_2.getControler(): |
548 |
raise ValueError("TimeSeriesDiv: %s and %s have different controler."%(time_serie_1,time_serie_2)) |
549 |
id_first_datum=max(time_serie_1.getIdOfFirstDatum(),time_serie_2.getIdOfFirstDatum()) |
550 |
TimeSeriesFilter.__init__(self,cntrl, \ |
551 |
TimeSeriesBaseBuffer(cntrl.getBaseBufferSize(),time_serie_1.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \ |
552 |
[time_serie_1,time_serie_2],0,0,dbg,dsc) |
553 |
|
554 |
def update(self,start,end): |
555 |
self.append(self.getArgumentDataset(0)[start:end]/self.getArgumentDataset(1)[start:end]) |
556 |
|
557 |
class TimeSeriesDivScalar(TimeSeriesFilter): |
558 |
"""divides a scalar be a TimeSerie""" |
559 |
def __init__(self,time_serie,scalar): |
560 |
dsc="(%s)/(%s)"%(scalar,time_serie) |
561 |
dbg=time_serie.debug() |
562 |
cntrl=time_serie.getControler() |
563 |
id_first_datum=time_serie.getIdOfFirstDatum() |
564 |
TimeSeriesFilter.__init__(self,cntrl, \ |
565 |
TimeSeriesBaseBuffer(cntrl.getBaseBufferSize(),time_serie.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \ |
566 |
[time_serie],0,0,dbg,dsc) |
567 |
self.__scalar=scalar |
568 |
|
569 |
def update(self,start,end): |
570 |
self.append(self.__scalar/self.getArgumentDataset(0)[start:end]) |
571 |
|
572 |
class TimeSeriesPower(TimeSeriesFilter): |
573 |
"""raise one TimeSeries to the power of an other TimeSeries""" |
574 |
def __init__(self,time_serie_1,time_serie_2): |
575 |
dsc="(%s)**(%s)"%(time_serie_1,time_serie_2) |
576 |
dbg=time_serie_1.debug() or time_serie_2.debug() |
577 |
cntrl=time_serie_1.getControler() |
578 |
if not cntrl==time_serie_2.getControler(): |
579 |
raise ValueError("TimeSeriesPower: %s and %s have different controler."%(time_serie_1,time_serie_2)) |
580 |
id_first_datum=max(time_serie_1.getIdOfFirstDatum(),time_serie_2.getIdOfFirstDatum()) |
581 |
TimeSeriesFilter.__init__(self,cntrl, \ |
582 |
TimeSeriesBaseBuffer(cntrl.getBaseBufferSize(),time_serie_1.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \ |
583 |
[time_serie_1,time_serie_2],0,0,dbg,dsc) |
584 |
|
585 |
def update(self,start,end): |
586 |
self.append(self.getArgumentDataset(0)[start:end]**self.getArgumentDataset(1)[start:end]) |
587 |
|
588 |
class TimeSeriesPowerScalar(TimeSeriesFilter): |
589 |
"""raises a TimeSerie to the power of a scalar""" |
590 |
def __init__(self,time_serie,scalar): |
591 |
dsc="(%s)**(%s)"%(time_serie,scalar) |
592 |
dbg=time_serie.debug() |
593 |
cntrl=time_serie.getControler() |
594 |
id_first_datum=time_serie.getIdOfFirstDatum() |
595 |
TimeSeriesFilter.__init__(self,cntrl, \ |
596 |
TimeSeriesBaseBuffer(cntrl.getBaseBufferSize(),time_serie.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \ |
597 |
[time_serie],0,0,dbg,dsc) |
598 |
self.__scalar=scalar |
599 |
|
600 |
def update(self,start,end): |
601 |
self.append(self.getArgumentDataset(0)[start:end]**self.__scalar) |
602 |
|
603 |
class Exp(TimeSeriesFilter): |
604 |
"""""" |
605 |
def __init__(self,time_serie): |
606 |
dsc="exp(%s)"%(time_serie) |
607 |
dbg=time_serie.debug() |
608 |
cntrl=time_serie.getControler() |
609 |
id_first_datum=time_serie.getIdOfFirstDatum() |
610 |
TimeSeriesFilter.__init__(self,cntrl, \ |
611 |
TimeSeriesBaseBuffer(cntrl.getBaseBufferSize(),time_serie.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \ |
612 |
[time_serie],0,0,dbg,dsc) |
613 |
|
614 |
def update(self,start,end): |
615 |
self.append(numarray.exp(self.getArgumentDataset(0)[start:end])) |
616 |
|
617 |
class Writer(TimeSeriesOperator): |
618 |
"""writes the time series into an output strim ostream which mast have the writeline method. The values are seperated by the string seperator.""" |
619 |
def __init__(self,time_serie,ostream,seperator=",",commend_tag="#"): |
620 |
dsc="write %s to %s"%(time_serie,ostream) |
621 |
dbg=time_serie.debug() |
622 |
cntrl=time_serie.getControler() |
623 |
self.__ostream=ostream |
624 |
self.__seperator=seperator |
625 |
TimeSeriesOperator.__init__(self,cntrl,[time_serie],0,0,dbg,dsc) |
626 |
ostream.writelines("%s time series %s\n"%(commend_tag,str(self))) |
627 |
|
628 |
def update(self,start,end): |
629 |
cntrl=self.getControler() |
630 |
arg=self.getArguments(0) |
631 |
n=arg.getNumComponents() |
632 |
if n<2: |
633 |
for i in range(start,end): self.__ostream.writelines("%s%s%s\n"%(cntrl[i],self.__seperator,arg[i])) |
634 |
else: |
635 |
for i in range(start,end): |
636 |
l="%s"%cntrl[i] |
637 |
for j in range(n): l=l+"%s%s"(self.__seperator,arg[i][j]) |
638 |
self.__ostream.writelines("%s\n"%l) |
639 |
|
640 |
class DataCatcher(TimeSeries): |
641 |
"""collects data into a time series.""" |
642 |
def __init__(self,controler,numComponents=1,description="DataCatcher"): |
643 |
self.__controler=controler |
644 |
dbg=controler.debug() |
645 |
TimeSeries.__init__(self,TimeSeriesBaseBuffer(controler.getBaseBufferSize(),numComponents,DEFAULT_FLOAT_TYPE,controler.getIdOfFirstDatum(),dbg,"buffer for "+description),dbg,description) |
646 |
|
647 |
def getControler(self): |
648 |
return self.__controler |
649 |
|
650 |
def nextValue(self,value): |
651 |
"""append a value to the time series""" |
652 |
id_last=self.getIdOfLastDatum() |
653 |
id_current=self.getControler().getIdOfLastDatum() |
654 |
if id_last+1==id_current: |
655 |
self.getDataset().append(value) |
656 |
elif id_last+1<id_current: |
657 |
if self.isEmpty(): |
658 |
self.getDataset().append(value) |
659 |
id_last+=1 |
660 |
t_last=self.getControler()[id_last] |
661 |
t_current=self.getControler()[id_current] |
662 |
value_last=self[id_last] |
663 |
out=(value_last-value)/(t_last-t_current)*(self.getControler()[id_last+1:id_current+1]-t_current)+value |
664 |
self.getDataset().append(out) |
665 |
else : |
666 |
raise ValueError,"%s: a new time node must be introduced before a new value can be added." |
667 |
self.updateIdOfLastUnreferencedDatum(id_last) |
668 |
|
669 |
|
670 |
class TimeSeriesCumulativeSum(TimeSeriesFilter): |
671 |
"""cummulative sum of the time series values""" |
672 |
def __init__(self,time_serie): |
673 |
dsc="cumsum(%s)"%(time_serie) |
674 |
dbg=time_serie.debug() |
675 |
cntrl=time_serie.getControler() |
676 |
id_first_datum=time_serie.getIdOfFirstDatum() |
677 |
TimeSeriesFilter.__init__(self,cntrl, \ |
678 |
TimeSeriesBaseBuffer(cntrl.getBaseBufferSize(),time_serie.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \ |
679 |
[time_serie],0,0,dbg,dsc) |
680 |
self.__last_value=0 |
681 |
|
682 |
def update(self,start,end): |
683 |
out=numarray.cumsum(self.getArgumentDataset(0)[start:end])+self.__last_value |
684 |
self.__last_value=out[end-start-1] |
685 |
self.append(out) |
686 |
|
687 |
|
688 |
class Reader(TimeSeriesBase): |
689 |
"""reads a list of input streams and creates a time series for each input stream but on the same Controler where the first column |
690 |
is used to create the time nodes""" |
691 |
def __init__(self,list_of_istreams,buffer_size=DEFAULT_BUFFER_SIZE,seperator=",",commend_tag="#",debug=False): |
692 |
TimeSeriesBase.__init__(self,debug=debug,description="reader") |
693 |
if not isinstance(list_of_istreams,list): |
694 |
self.__list_of_istreams=[list_of_istreams] |
695 |
else: |
696 |
self.__list_of_istreams=list_of_istreams |
697 |
self.__cntrl=Controler(buffer_size,debug,"reader controler") |
698 |
self.__seperator=seperator |
699 |
self.__commend_tag=commend_tag |
700 |
self.__time_series={} |
701 |
self.__t={} |
702 |
self.__v={} |
703 |
# set up the time series: |
704 |
for i in self.__list_of_istreams: |
705 |
line=self.__commend_tag |
706 |
while not line=="" and line[0]==self.__commend_tag: |
707 |
line=i.readline().strip() |
708 |
if line=="": |
709 |
list_of_istreams.remove(i) |
710 |
else: |
711 |
d=line.split(self.__seperator) |
712 |
self.__t[i]=float(d[0]) |
713 |
tmp=[] |
714 |
for j in d[1:]: tmp.append(float(j)) |
715 |
self.__v[i]=numarray.array(tmp) |
716 |
self.__time_series[i]=DataCatcher(self.__cntrl,len(d)-1,str(i)) |
717 |
|
718 |
# |
719 |
def run(self): |
720 |
while len(self.__list_of_istreams)>0: |
721 |
if len(self.__time_series)>0: |
722 |
# find list all times with minumum time node: |
723 |
tminargs=[] |
724 |
for i in self.__time_series: |
725 |
if len(tminargs)==0: |
726 |
tminargs.append(i) |
727 |
elif abs(t[tminargs[0]]-self.__t[i])<1.e-8*abs(self.__t[i]): |
728 |
tminargs.append(i) |
729 |
elif self.__t[i]<t[tminargs[0]]: |
730 |
tminargs=[i] |
731 |
# find list all times with minumum time node: |
732 |
self.__cntrl.nextTime(self.__t[tminargs[0]]) |
733 |
for i in tminargs: |
734 |
self.__time_series[i].nextValue(self.__v[i]) |
735 |
# find next line without leading "#" |
736 |
line="#" |
737 |
while not line=="" and line[0]==self.__commend_tag: |
738 |
line=i.readline().strip() |
739 |
# if eof reached iostream is removed for searching |
740 |
if line=="": |
741 |
self.__list_of_istreams.remove(i) |
742 |
else: |
743 |
d=line.split(self.__seperator) |
744 |
self.__t[i]=float(d[0]) |
745 |
tmp=[] |
746 |
for j in d[1:]: tmp.append(float(j)) |
747 |
self.__v[i]=numarray.array(tmp) |
748 |
|
749 |
def getControler(self): |
750 |
"""returns the controler shared by all time series created through the input streams""" |
751 |
return self.__cntrl |
752 |
|
753 |
def getTimeSeries(self,istream=None): |
754 |
"""returns the time series as a tuple. If istream is present its time series is returned""" |
755 |
if istream==None: |
756 |
out=self.__time_series.values() |
757 |
if len(out)>1: |
758 |
return tuple(out) |
759 |
elif len(out)>0: |
760 |
return out[0] |
761 |
else: |
762 |
return None |
763 |
else: |
764 |
return self.__time_series[istream] |
765 |
|
766 |
|
767 |
class Plotter(TimeSeriesOperator): |
768 |
def __init__(self,time_series,window_size=DEFAULT_BUFFER_SIZE/4,file_name=None,format=None): |
769 |
if isinstance(time_series,list): |
770 |
dbg=time_series[0].getControler().debug() |
771 |
text="" |
772 |
for i in time_series: |
773 |
if len(text)==0: |
774 |
text=str(i) |
775 |
else: |
776 |
text=text+","+str(i) |
777 |
TimeSeriesOperator.__init__(self,time_series[0].getControler(),time_series,window_size,0,dbg,"plot(%s)"%text) |
778 |
else: |
779 |
dbg=time_series.getControler().debug() |
780 |
text=str(time_series) |
781 |
TimeSeriesOperator.__init__(self,time_series.getControler(),[time_series],window_size,0,dbg,"plot(%s)"%text) |
782 |
from pyvisi.renderers.gnuplot import LinePlot,Scene,PsImage |
783 |
self.__renderer=Scene() |
784 |
self.__line_plot=LinePlot(self.__renderer) |
785 |
self.__line_plot.setTitle(text) |
786 |
self.__line_plot.setLineStyle("lines") |
787 |
self.__line_plot.setXLabel("time") |
788 |
self.__line_plot.setYLabel("values") |
789 |
self.__file_name=file_name |
790 |
if format==None: |
791 |
self.__format=PsImage() |
792 |
else: |
793 |
self.__format=format |
794 |
self.__window_size=window_size |
795 |
|
796 |
def update(self,start,end): |
797 |
s=max(end-self.__window_size,self.getControler().getIdOfFirstAvailableDatum()) |
798 |
args=[self.getControler()[s:end]] |
799 |
for arg in self.getArguments(): args.append(arg[s:end]) |
800 |
self.__line_plot.setData(*args) |
801 |
self.__line_plot.render() |
802 |
if self.__file_name==None: |
803 |
raise SystemError,"Online viewing is not avilabel yet!" |
804 |
else: |
805 |
self.__renderer.save(fname=self.__file_name, format=self.__format) |
806 |
|
807 |
|
808 |
def viewer(time_serie,seperator=","): |
809 |
"""creates a viewer for a time series""" |
810 |
import sys |
811 |
return Writer(time_serie,sys.stdout,seperator) |
812 |
|
813 |
def differential(time_serie): |
814 |
"""calculates the derivative Dv of the time series v: |
815 |
|
816 |
Dv[n]=(v[n]-v[n-1])/(t[n]-t[n-1]) |
817 |
|
818 |
""" |
819 |
out=(((time_serie<<1)-time_serie)/((time_serie.getControler()<<1)-time_serie.getControler())+ \ |
820 |
((time_serie>>1)-time_serie)/((time_serie.getControler()>>1)-time_serie.getControler()))/2. |
821 |
out.setDescription("d(%s)/dt"%str(time_serie)) |
822 |
out.setDebug(time_serie.debug()) |
823 |
return out |
824 |
|
825 |
def integral(time_serie): |
826 |
"""calculates the intagral Iv of the time series v using the trapozidal rule: |
827 |
|
828 |
Iv[n]=int_{t_0}^{t_n} v ~ sum_{0<i<=n} n (v[i]+v[i-1])/2*(t[i]-t[i-1]) |
829 |
|
830 |
""" |
831 |
out=TimeSeriesCumulativeSum(((time_serie>>1)+time_serie)/2.*(time_serie.getControler()-(time_serie.getControler()>>1))) |
832 |
out.setDescription("I (%s) dt"%str(time_serie)) |
833 |
out.setDebug(time_serie.debug()) |
834 |
return out |
835 |
|
836 |
def smooth(time_serie,range=5): |
837 |
"""smoothes a time series using the at each time the previous and next range values""" |
838 |
i=integral(time_serie) |
839 |
out=((i>>range)-(i<<range))/((time_serie.getControler()>>range)-(time_serie.getControler()<<range)) |
840 |
out.setDescription("smooth(%s,-%d:%d) dt"%(str(time_serie),range,range)) |
841 |
out.setDebug(time_serie.debug()) |
842 |
return out |
843 |
|
844 |
def leakySmooth(time_serie,l=0.99): |
845 |
"""leaky smoother: s(t)=int_{t_0}^{t} v(r) l^{t-r} dr/ int_{t_0}^{t} l^{t-r} dr """ |
846 |
w=l**(-time_serie.getControler()) |
847 |
out=integrate(time_serie*w)/integrate(w) |
848 |
out.setDescription("leaky smoother(%s)"%str(time_serie)) |
849 |
return out |
850 |
|
851 |
# test |
852 |
|
853 |
if __name__=="__main__": |
854 |
# tests the interfaces to data sets: |
855 |
print "Test of Datasets:" |
856 |
print "=================" |
857 |
bf=TimeSeriesBaseBuffer(buffer_size=5,numComponents=1,debug=True,description="TestBaseBuffer") |
858 |
bfv_l=TimeSeriesBaseDataset(bf,offset=1,debug=True,description="offset 1") |
859 |
bfv_r=TimeSeriesBaseDataset(bf,offset=-1,debug=True,description="offset -1") |
860 |
bf.append([1.,2.,3.,4.]) |
861 |
print "should be all 2. :",bfv_l[0] |
862 |
print bf[1] |
863 |
print bfv_r[2] |
864 |
bf.append([5.,6.,7.]) |
865 |
print "should be all 5. :",bfv_l[3],bf[4],bfv_r[5] |
866 |
print "should be all 6. :",bfv_l[4],bf[5],bfv_r[6] |
867 |
print "should be all 7. :",bfv_l[5],bf[6],bfv_r[7] |
868 |
print "should be all [6., 7.] :",bfv_l[4:6],bf[5:7],bfv_r[6:8] |
869 |
|
870 |
print "Test of Controler" |
871 |
print "=================" |
872 |
b=Controler(buffer_size=15,debug=True) |
873 |
s3=b>>3 |
874 |
s1=b>>1 |
875 |
s_3=b<<3 |
876 |
print s_3 |
877 |
print b |
878 |
print b+s3 |
879 |
sum=(s_3+b)+(b+s3) |
880 |
|
881 |
for i in range(30): |
882 |
b.nextTime(i*1.) |
883 |
b.flush() |
884 |
print "should be all 28. :",s_3.getDataset()[25],b.getDataset()[28],s3.getDataset()[31] |
885 |
print "should be all 29. :",s_3.getDataset()[26],b.getDataset()[29],s3.getDataset()[32] |
886 |
print "should be all 96. :",sum.getDataset()[24] |
887 |
|
888 |
print "Test of operators" |
889 |
print "=================" |
890 |
b=Controler(buffer_size=15,debug=True) |
891 |
b.setFlushRate(2) |
892 |
q=DataCatcher(b) |
893 |
b1=b<<1 |
894 |
a=b+b1 |
895 |
a_s=b1+1. |
896 |
s_a=1.+b1 |
897 |
d=b-b1 |
898 |
d_s=b1-1. |
899 |
s_d=1.-b1 |
900 |
m=b*b1 |
901 |
m_s=b1*2. |
902 |
s_m=2.*b1 |
903 |
dv=b/b1 |
904 |
dv_s=b1/2. |
905 |
s_dv=2./b1 |
906 |
p=b**b1 |
907 |
p_s=b1**2. |
908 |
s_p=2.**b1 |
909 |
pb=+b |
910 |
mb=-b |
911 |
sum=TimeSeriesCumulativeSum(b) |
912 |
diff=differential(b) |
913 |
smt=smooth(b,2) |
914 |
int=integral(b*2) |
915 |
fl=file("/tmp/test.csv","w") |
916 |
w=Writer(q,fl) |
917 |
v=viewer(q) |
918 |
plo=Plotter([a,a_s],window_size=4,file_name="s.ps") |
919 |
for i in range(30): |
920 |
b.nextTime(i*1.) |
921 |
if i%2==1: q.nextValue(i*28.) |
922 |
b.flush() |
923 |
print "a[28] should be %e: %e"%(28.+29.,a[28]) |
924 |
print "a_s[28] should be %e: %e"%(29.+1.,a_s[28]) |
925 |
print "s_a[28] should be %e: %e"%(29.+1.,s_a[28]) |
926 |
print "d[28] should be %e: %e"%(28.-29.,d[28]) |
927 |
print "d_s[28] should %e: %e"%(29.-1.,d_s[28]) |
928 |
print "s_d[28] should %e: %e"%(1.-29.,s_d[28]) |
929 |
print "m[28] should be %e: %e"%(28.*29.,m[28]) |
930 |
print "m_s[28] should be %e: %e"%(29.*2.,m_s[28]) |
931 |
print "s_m[28] should be %e: %e"%(29.*2.,s_m[28]) |
932 |
print "dv[28] should be %e: %e"%(28./29.,dv[28]) |
933 |
print "dv_s[28] should be %e: %e"%(29./2.,dv_s[28]) |
934 |
print "s_dv[28] should be %e: %e"%(2./29.,s_dv[28]) |
935 |
print "p[28] should be %e: %e"%(28.**29.,p[28]) |
936 |
print "p_s[28] should be %e: %e"%(29.**2,p_s[28]) |
937 |
print "s_p[28] should be %e: %e"%(2.**29.,s_p[28]) |
938 |
print "pb[28] should be %e: %e"%(28.,pb[28]) |
939 |
print "mb[28] should be %e: %e"%(-28.,mb[28]) |
940 |
print "sum[28] should be %e: %e"%(28*29./2,sum[28]) |
941 |
print "diff[28] should be %e: %e"%(1.,diff[28]) |
942 |
print "smt[27] should be %e: %e"%(27.,smt[27]) |
943 |
print "int[28] should be %e: %e"%(28.**2,int[28]) |
944 |
print "q[27] should be %e: %e"%(27*28.,q[27]) |
945 |
print "q[28] should be %e: %e"%(28*28.,q[28]) |
946 |
print "q[29] should be %e: %e"%(29*28.,q[29]) |
947 |
fl.flush() |
948 |
|
949 |
rin=Reader(file("/tmp/test.csv","r+"),buffer_size=15,debug=True) |
950 |
rin.run() |
951 |
inp=rin.getTimeSeries() |
952 |
print "inp[27] should be %e: %e"%(27*28.,inp[27]) |
953 |
print "inp[28] should be %e: %e"%(28*28.,inp[28]) |
954 |
print "inp[29] should be %e: %e"%(29*28.,inp[29]) |
955 |
|