1 |
# $Id$ |
2 |
|
3 |
import numarray |
4 |
from types import SliceType |
5 |
DEFAULT_BUFFER_SIZE=1000 |
6 |
DEFAULT_FLOAT_TYPE=numarray.Float64 |
7 |
|
8 |
class TimeSeriesBase: |
9 |
"""The TimeSeriesBase class is the base class for all class of the TimeSeries module.""" |
10 |
|
11 |
def __init__(self,debug=False,description="TimeSeriesBase"): |
12 |
self.__debug=debug |
13 |
self.setDescription(description) |
14 |
|
15 |
def __str__(self): |
16 |
return self.__description |
17 |
|
18 |
def setDescription(self,text): |
19 |
self.__description=text |
20 |
|
21 |
def setDebugOn(self): |
22 |
"""switch on degugging mode""" |
23 |
self.__debug=True |
24 |
|
25 |
def setDebugOff(self): |
26 |
"""switch off degugging mode""" |
27 |
self.__debug=False |
28 |
|
29 |
def setDebug(self,flag=False): |
30 |
"""sets debug mode to flag""" |
31 |
if flag: |
32 |
self.setDebugOn() |
33 |
else: |
34 |
self.setDebugOff() |
35 |
|
36 |
def debug(self): |
37 |
"""returns true if debug mode is on""" |
38 |
return self.__debug |
39 |
|
40 |
#============================================================================================================ |
41 |
class TimeSeriesBaseDataset(TimeSeriesBase): |
42 |
"""provides an interface for accessing a set of linearly ordered data.""" |
43 |
def __init__(self,buffer,offset=0,debug=False,description="TimeSeriesDataset"): |
44 |
TimeSeriesBase.__init__(self,debug,description) |
45 |
self.__buffer=buffer |
46 |
self.__offset=offset |
47 |
if self.debug(): print "Debug: %s: offset %d to buffer"%(self,self.getOffset()) |
48 |
|
49 |
def __len__(self): |
50 |
"""needed to handle negative indexing in slicing""" |
51 |
return 0 |
52 |
|
53 |
def getNumComponents(self): |
54 |
"""returns the number of components of the data (may be overwritten by subclass)""" |
55 |
return self.getBaseBuffer().getNumComponents() |
56 |
|
57 |
def getIdOfLastDatum(self): |
58 |
"""returns the identification number of the last datum in the data set (may be overwritten by subclass)""" |
59 |
return self.getBaseBuffer().getIdOfLastDatum()-self.getOffset() |
60 |
|
61 |
def getIdOfFirstDatum(self): |
62 |
"""returns the identification number of the first datum (may be overwritten by subclass)""" |
63 |
return self.getBaseBuffer().getIdOfFirstDatum()-self.getOffset() |
64 |
|
65 |
def getIdOfFirstAvailableDatum(self): |
66 |
"""returns the identification number of the first avaiable datum (may be overwritten by subclass)""" |
67 |
return self.getBaseBuffer().getIdOfFirstAvailableDatum()-self.getOffset() |
68 |
|
69 |
def getOffsetInBaseBuffer(self): |
70 |
"""returns the offset to access elements in getBaseBuffer() (may be overwritten by subclass)""" |
71 |
return self.getOffset() |
72 |
|
73 |
def getIdOfLastUnreferencedDatum(self): |
74 |
"""returns the identification number of the last datum which has been unused by all TimeSeries refering to the TimeSeriesBaseDataset (may be overwritten by subclass)""" |
75 |
return self.getBaseBuffer().getIdOfLastUnreferencedDatum()-self.getOffset() |
76 |
|
77 |
def updateIdOfLastUnreferencedDatum(self,last_unreferenced_datum): |
78 |
"""updates the identification number of the last unused datum (to be overwritten by subclass)""" |
79 |
self.getBaseBuffer().updateIdOfLastUnreferencedDatum(last_unreferenced_datum+self.getOffset()) |
80 |
|
81 |
def append(self,values): |
82 |
"""appends data to the buffer. If the buffer would be full the buffer is rearranged before the data are appended (to be overwritten by subclass)""" |
83 |
self.getBaseBuffer().append(values) |
84 |
|
85 |
def getBaseBufferSize(self): |
86 |
"""returns the size of the buffer (to be overwritten by subclass)""" |
87 |
return self.getBaseBuffer().getBaseBufferSize() |
88 |
|
89 |
def needsRearrangement(self,num_new_data=0): |
90 |
"""returns True if the buffer will be full after num_new_data have been appended (to be overwritten by subclass)""" |
91 |
return self.getBaseBuffer().needsRearrangement(num_new_data) |
92 |
|
93 |
def isEmpty(self): |
94 |
"""returns true if no data are appeneded to buffer""" |
95 |
return self.getNumData()<=0 |
96 |
|
97 |
def getNumData(self): |
98 |
"""returns the number of data (not all of them are accessible)""" |
99 |
return self.getIdOfLastDatum()-self.getIdOfFirstDatum()+1 |
100 |
|
101 |
def getBaseBuffer(self): |
102 |
"""return the buffer referenced by the TimeSeriesBaseDataset""" |
103 |
return self.__buffer |
104 |
|
105 |
def getOffset(self): |
106 |
"""return the offset when referring to dataset elements""" |
107 |
return self.__offset |
108 |
|
109 |
def __getitem__(self,index): |
110 |
"""returns the datum index""" |
111 |
if type(index)==SliceType: |
112 |
start=index.start |
113 |
end=index.stop |
114 |
if start==end: |
115 |
return self[start] |
116 |
else: |
117 |
if start<self.getIdOfFirstDatum() or start>self.getIdOfLastDatum() or \ |
118 |
end-1<self.getIdOfFirstDatum() or end-1>self.getIdOfLastDatum(): raise IndexError,"%s: Index [%d:%d] out of range"%(self,start,end) |
119 |
return self.getBaseBuffer()[start+self.getOffsetInBaseBuffer():end+self.getOffsetInBaseBuffer()] |
120 |
else: |
121 |
if index<self.getIdOfFirstDatum() or index>self.getIdOfLastDatum(): raise IndexError,"%s: Index %d out of range"%(self,index) |
122 |
return self.getBaseBuffer()[index+self.getOffsetInBaseBuffer()] |
123 |
|
124 |
class TimeSeriesBaseBuffer(TimeSeriesBaseDataset): |
125 |
"""An inplementation of TimeSeriesBaseDataset which actually is storing data into a numarray buffer""" |
126 |
def __init__(self,buffer_size=DEFAULT_BUFFER_SIZE,numComponents=1,type=DEFAULT_FLOAT_TYPE,id_of_first_datum=0,debug=False,description="TimeSeriesBaseBuffer"): |
127 |
if numComponents<2: |
128 |
buffer=numarray.zeros((buffer_size,),type) |
129 |
else: |
130 |
buffer=numarray.zeros((buffer_size,numComponents),type) |
131 |
TimeSeriesBaseDataset.__init__(self,buffer,id_of_first_datum-1,debug,description) |
132 |
self.__num_data_in_buffer=0 |
133 |
self.__id_last_unreferenced_datum=id_of_first_datum-1 |
134 |
self.__id_last_datum=id_of_first_datum-1 |
135 |
self.__id_first_datum=id_of_first_datum |
136 |
if self.debug(): print "Debug: %s : buffer of size %d with %d components allocated (first datum is %d)."% \ |
137 |
(self,self.getBaseBufferSize(),self.getNumComponents(),id_of_first_datum) |
138 |
|
139 |
|
140 |
def getBaseBufferSize(self): |
141 |
"""returns the size of the buffer""" |
142 |
return self.getBaseBuffer().shape[0] |
143 |
|
144 |
def getNumComponents(self): |
145 |
"""returns the number of components of the data (overwrites TimeSeriesBaseDataset method)""" |
146 |
if self.getBaseBuffer().rank==1: |
147 |
return 1 |
148 |
else: |
149 |
self.getBaseBuffer().shape[1] |
150 |
|
151 |
def getNumDataInBaseBuffer(self): |
152 |
"""returns the number of data currently in the buffer""" |
153 |
return self.__num_data_in_buffer |
154 |
|
155 |
def getIdOfLastDatum(self): |
156 |
"""returns the identification number of the last datum in the data set (overwrites method from TimeSeriesBaseDataset)""" |
157 |
return self.__id_last_datum |
158 |
|
159 |
def getIdOfFirstDatum(self): |
160 |
"""returns the identification number of the first datum (overwrites method from TimeSeriesBaseDataset)""" |
161 |
return self.__id_first_datum |
162 |
|
163 |
def getOffsetInBaseBuffer(self): |
164 |
"""returns the offset to access elements in the buffer (overwrites method from TimeSeriesBaseDataset)""" |
165 |
return -self.getIdOfLastDatum()+self.getNumDataInBaseBuffer()-1 |
166 |
|
167 |
def getIdOfLastUnreferencedDatum(self): |
168 |
"""returns the identification number of the last datum which has been unused by all TimeSeries refering to the TimeSeriesBaseDataset (overwrites method from TimeSeriesBaseDataset)""" |
169 |
return self.__id_last_unreferenced_datum |
170 |
|
171 |
def updateIdOfLastUnreferencedDatum(self,last_unreferenced_datum): |
172 |
"""updates the identification number of the last unused datum (to be overwritten by subclass)""" |
173 |
self.getBaseBuffer().updateIdOfLastUnreferencedDatum(last_unreferenced_datum-self.getOffset()) |
174 |
|
175 |
def updateIdOfLastUnreferencedDatum(self,last_unreferenced_datum): |
176 |
"""updates the identification number of the last unused datum (overwrites TimeSeriesBaseDataset method)""" |
177 |
if self.__id_last_unreferenced_datum>last_unreferenced_datum: |
178 |
self.__id_last_unreferenced_datum=last_unreferenced_datum |
179 |
if self.debug(): print "Debug: %s: last unused datum is now %s"%(self,last_unreferenced_datum) |
180 |
|
181 |
def needsRearrangement(self,num_new_data=0): |
182 |
"""returns True if the buffer will be full after num_new_data have been appended""" |
183 |
return self.getNumDataInBaseBuffer()+num_new_data>self.getBaseBufferSize() |
184 |
|
185 |
def getIdOfFirstAvailableDatum(self): |
186 |
"""returns the identification number of the first avaiable datum (overwrites TimeSeriesBaseDataset method)""" |
187 |
return self.getIdOfLastDatum()-self.__num_data_in_buffer+1 |
188 |
|
189 |
def append(self,data): |
190 |
"""appends data to the buffer. If the buffer would be full the buffer is rearranged before the data are appended (overwrites TimeSeriesBaseDataset method)""" |
191 |
data=numarray.array(data) |
192 |
nc=self.getNumComponents() |
193 |
if data.rank==0: |
194 |
if nc==1: |
195 |
num_new_data=1 |
196 |
else: |
197 |
raise ValueError,"%s: illegal data shape"%self |
198 |
elif data.rank==1: |
199 |
if nc==1: |
200 |
num_new_data=data.shape[0] |
201 |
else: |
202 |
num_new_data=1 |
203 |
elif data.rank==2: |
204 |
if not nc==data.shape[1]: raise ValueError,"%s: illegal data shape"%self |
205 |
num_new_data=data.shape[0] |
206 |
else: |
207 |
raise ValueError,"%s: illegal rank"%self |
208 |
|
209 |
# check is buffer will be overflown when data are appended: |
210 |
if self.needsRearrangement(num_new_data): |
211 |
nn=self.getNumDataInBaseBuffer() |
212 |
num_protected_data=self.getIdOfLastDatum()-self.getIdOfLastUnreferencedDatum() |
213 |
if num_protected_data+num_new_data>self.getBaseBufferSize(): |
214 |
raise ValueError,"%s: buffer overflow: buffer size has to be bigger than %d"%(self,num_protected_data+num_new_data) |
215 |
if num_protected_data>0: self.getBaseBuffer()[0:num_protected_data]=self.getBaseBuffer()[nn-num_protected_data:nn] |
216 |
self.__num_data_in_buffer=num_protected_data |
217 |
self.__id_last_unreferenced_datum=self.__id_last_datum |
218 |
if self.debug(): |
219 |
print "Debug: %s: rearrangement: first data in buffer is %d."%(self,self.getIdOfLastDatum()-self.getNumDataInBaseBuffer()+1) |
220 |
# copy data over: |
221 |
nn=self.getNumDataInBaseBuffer() |
222 |
self.getBaseBuffer()[nn:nn+num_new_data]=data |
223 |
self.__num_data_in_buffer+=num_new_data |
224 |
self.__id_last_datum+=num_new_data |
225 |
self.__id_last_unreferenced_datum+=num_new_data |
226 |
if self.debug(): print "Debug: %s: %d data appended. Last unreferenced datum is now %d."%(self,num_new_data,self.__id_last_unreferenced_datum) |
227 |
|
228 |
# ====================================== |
229 |
class TimeSeriesControlerView(TimeSeriesBase): |
230 |
"""A TimeSeriesControlerView is attached to a Controler and moves forward in time by increasing the id of the last processed datum. |
231 |
Any implementation of a TimeSeriesControlerView must provide the getControler method which returns the controler""" |
232 |
def __init__(self,id_first_datum=0,debug=False,description="TimeSeries"): |
233 |
TimeSeriesBase.__init__(self,debug,description) |
234 |
self.__id_last_processed_datum=id_first_datum-1 |
235 |
if self.debug(): print "Debug: %s created with first datum %d"%(str(self),id_first_datum) |
236 |
|
237 |
def getIdOfLastProcessedDatum(self): |
238 |
return self.__id_last_processed_datum |
239 |
|
240 |
def updateIdOfLastProcessedDatum(self,id_last_processed_datum): |
241 |
self.__id_last_processed_datum=id_last_processed_datum |
242 |
|
243 |
# def getControler(self): |
244 |
# """returns the Controler of the time series (to be overwritten by subclass)""" |
245 |
# pass |
246 |
|
247 |
class TimeSeries(TimeSeriesBaseDataset,TimeSeriesControlerView): |
248 |
"""makes TimeSeriesBaseDataset look like a TimeSeries and introduces operations |
249 |
Any implementation of a TimeSeriesControlerView must provide the getControler method which returns the controler""" |
250 |
def __init__(self,dataset,debug=False,description="TimeSeries"): |
251 |
TimeSeriesControlerView.__init__(self,dataset.getIdOfFirstDatum(),debug,description) |
252 |
TimeSeriesBaseDataset.__init__(self,dataset,0,debug,description) |
253 |
|
254 |
def getDataset(self): |
255 |
"""returns the TimeSeriesBaseDataset of the time series""" |
256 |
return self.getBaseBuffer() |
257 |
|
258 |
# def getControler(self): |
259 |
# """returns the Controler of the time series (to be overwritten by subclass)""" |
260 |
# pass |
261 |
|
262 |
def __add__(self,arg): |
263 |
if isinstance(arg,TimeSeriesBaseDataset): |
264 |
return TimeSeriesAdd(self,arg) |
265 |
else: |
266 |
return TimeSeriesAddScalar(self,arg) |
267 |
|
268 |
def __sub__(self,arg): |
269 |
return self+(-1.)*arg |
270 |
|
271 |
def __mul__(self,arg): |
272 |
if isinstance(arg,TimeSeriesBaseDataset): |
273 |
return TimeSeriesMult(self,arg) |
274 |
else: |
275 |
return TimeSeriesMultScalar(self,arg) |
276 |
|
277 |
def __div__(self,arg): |
278 |
if isinstance(arg,TimeSeriesBaseDataset): |
279 |
return TimeSeriesDiv(self,arg) |
280 |
else: |
281 |
return TimeSeriesMultScalar(self,1./arg) |
282 |
|
283 |
def __pow__(self,arg): |
284 |
if isinstance(arg,TimeSeriesBaseDataset): |
285 |
return TimeSeriesPower(self,arg) |
286 |
else: |
287 |
return TimeSeriesPowerScalar(self,arg) |
288 |
|
289 |
def __radd__(self,arg): |
290 |
return self.__add__(arg) |
291 |
|
292 |
def __rsub__(self,arg): |
293 |
return arg+(-1.)*self |
294 |
|
295 |
def __rmul__(self,arg): |
296 |
return self.__mul__(arg) |
297 |
|
298 |
def __rdiv__(self,arg): |
299 |
if isinstance(arg,TimeSeriesBaseDataset): |
300 |
return TimeSeriesDiv(arg,self) |
301 |
else: |
302 |
return TimeSeriesDivScalar(self,arg) |
303 |
|
304 |
def __rpow__(self,arg): |
305 |
if isinstance(arg,TimeSeriesBaseDataset): |
306 |
return TimeSeriesPower(arg,self) |
307 |
else: |
308 |
return Exp(numarray.log(arg)*self) |
309 |
|
310 |
def __lshift__(self,arg): |
311 |
return TimeSeriesShift(self,-arg) |
312 |
|
313 |
def __rshift__(self,arg): |
314 |
return TimeSeriesShift(self,arg) |
315 |
|
316 |
def __neg__(self): |
317 |
return (-1.0)*self |
318 |
|
319 |
def __pos__(self): |
320 |
return (1.0)*self |
321 |
|
322 |
class TimeSeriesOperator(TimeSeriesControlerView): |
323 |
"""a TimeSeriesOperator decribes an operation acting on list of TimeSeries time_series_args. It allows to update its output (if there is any) |
324 |
through the update method which is overwritten by a particular implementation of the class. The update method is called to process the data [start:end] using |
325 |
[start-left_wing_size:end+right_wing_size] of its arguments""" |
326 |
def __init__(self,controler,time_series_args=[],left_wing_size=0,right_wing_size=0,debug=False,description="TimeSeriesOperator"): |
327 |
id_first_datum=controler.getIdOfFirstDatum() |
328 |
for i in time_series_args: id_first_datum=max(id_first_datum,i.getIdOfFirstDatum()) |
329 |
TimeSeriesControlerView.__init__(self,id_first_datum+left_wing_size,debug,description) |
330 |
self.__left_wing_size=left_wing_size |
331 |
self.__right_wing_size=right_wing_size |
332 |
self.__time_series_args=time_series_args |
333 |
self.__controler=controler |
334 |
controler.appendOperatorToUpdateList(self) |
335 |
if self.debug(): print "Debug: %s: with left/right wing size %d/%d and %d arguments."%(str(self),left_wing_size,right_wing_size,len(time_series_args)) |
336 |
|
337 |
def __del__(self): |
338 |
self.getControler().removeOperatorFromUpdateList(self) |
339 |
|
340 |
def getControler(self): |
341 |
"""returns the Controler updating the TimeSeriesOperator""" |
342 |
return self.__controler |
343 |
|
344 |
def getLeftWingSize(self): |
345 |
"""returns the left wing size""" |
346 |
return self.__left_wing_size |
347 |
|
348 |
def getRightWingSize(self): |
349 |
"""returns the right wing size""" |
350 |
return self.__right_wing_size |
351 |
|
352 |
def getArguments(self,index=None): |
353 |
"""returns the list of arguments or, index is present, the argument with index index. In the latter case None is returned if no arguments are present""" |
354 |
if index==None: |
355 |
return self.__time_series_args |
356 |
else: |
357 |
if len(self.__time_series_args)>0: |
358 |
return self.__time_series_args[index] |
359 |
else: |
360 |
return None |
361 |
|
362 |
def getArgumentDataset(self,index): |
363 |
"""returns the dataset of in the argument with index index""" |
364 |
arg=self.getArguments(index) |
365 |
if arg==None: |
366 |
return None |
367 |
else: |
368 |
return self.getArguments(index).getDataset() |
369 |
|
370 |
def flush(self): |
371 |
"""calls the update method with all the maximum processable range. It also updates the id of unused datum for all arguments""" |
372 |
start=self.getIdOfLastProcessedDatum()+1 |
373 |
end=self.getControler().getIdOfLastDatum() |
374 |
for i in self.getArguments(): end=min(end,i.getIdOfLastDatum()) |
375 |
if start<=end-self.getRightWingSize(): |
376 |
if self.debug(): print "Debug: %s: range [%d:%d] is updated."%(self,start,end-self.getRightWingSize()) |
377 |
self.update(start,end-self.getRightWingSize()+1) |
378 |
for i in self.getArguments(): i.updateIdOfLastUnreferencedDatum(end-self.getLeftWingSize()) |
379 |
self.updateIdOfLastProcessedDatum(end) |
380 |
|
381 |
def update(self,start,end): |
382 |
"""updates the the data [start:end] using [start-left_wing_size:end+right_wing_size] of its arguments (is overwritten by a particular TimeSeriesOperator)""" |
383 |
pass |
384 |
|
385 |
|
386 |
class TimeSeriesFilter(TimeSeries,TimeSeriesOperator): |
387 |
"""a TimeSeriesFilter is a TimeSeries taht is created trough a TimeSeriesOperator""" |
388 |
def __init__(self,controler,dataset,time_series_args=[],left_wing_size=0,right_wing_size=0,debug=False,description="TimeSeriesFilter"): |
389 |
TimeSeriesOperator.__init__(self,controler,time_series_args,left_wing_size,right_wing_size,debug,description) |
390 |
TimeSeries.__init__(self,dataset,debug,description) |
391 |
|
392 |
def update(self,start,end): |
393 |
"""appends zeros to the dataset. This method should be overwritten by a particular TimeSeriesFilter""" |
394 |
nc=self.getNumComponents() |
395 |
if nc>1: |
396 |
self.getDataset().append(numarray.zeros([nc,end-start])) |
397 |
else: |
398 |
self.getDataset().append(numarray.zeros(end-start)) |
399 |
|
400 |
class Controler(TimeSeries): |
401 |
"""controls a set of TimeSeries""" |
402 |
def __init__(self,buffer_size=DEFAULT_BUFFER_SIZE,debug=False,description="TimeSeriesControler"): |
403 |
TimeSeries.__init__(self,TimeSeriesBaseBuffer(buffer_size,1,DEFAULT_FLOAT_TYPE,0,debug,"node buffer of "+description),debug,"nodes of "+description) |
404 |
self.setFlushRate() |
405 |
self.__update_time_series=list() |
406 |
|
407 |
def __del__(self): |
408 |
self.flush() |
409 |
|
410 |
def getControler(self): |
411 |
"""returns the Controler of the time series (overwrites method of by TimeSeries)""" |
412 |
return self |
413 |
|
414 |
def setFlushRate(self,rate=50): |
415 |
"""set the flush rate, i.e. after rate new time nodes have been checked in the flush method is called.""" |
416 |
self.__flush_rate=rate |
417 |
if self.debug(): print "Debug: %s: flush rate is set to %d"%(self,rate) |
418 |
|
419 |
def needsFlushing(self): |
420 |
"""returns true if the depending TimeSeriesFilters needs to be flushed becuase the time nodes buffer is full or because of the set flush rate""" |
421 |
return self.needsRearrangement(1) or (self.getNumData()+1)%self.__flush_rate==0 |
422 |
|
423 |
def flush(self): |
424 |
"""flushes all dependend TimeSeriesFilters by processing their flush method""" |
425 |
if self.debug(): print "Debug: %s: start flushing"%self |
426 |
for time_serie in self.__update_time_series: time_serie.flush() |
427 |
|
428 |
def appendOperatorToUpdateList(self,time_serie): |
429 |
if not time_serie.getControler()==self: raise ValueError,"%s: TimeSeries %s is not defined on this controler."%(self,time_serie) |
430 |
if not self.isEmpty(): raise ValueError,"%s: you can only check in a time series time_serie is controler is empty."%self |
431 |
self.__update_time_series.append(time_serie) |
432 |
if self.debug(): print "Debug: %s: %s has been added to update list."%(self,time_serie) |
433 |
|
434 |
def removeOperatorFromUpdateList(self,time_serie): |
435 |
self.__update_time_series.remove(time_serie) |
436 |
if self.debug(): print "Debug: %s: %s has been removed from update list."%(self,time_serie) |
437 |
|
438 |
def nextTime(self,value): |
439 |
if self.needsFlushing(): self.flush() |
440 |
self.getDataset().append(value) |
441 |
if self.debug(): print "Debug: %s: new time node %e has been added."%(self,value) |
442 |
|
443 |
class TimeSeriesShift(TimeSeries): |
444 |
"""creates a shift of the time series, i.e. if d[n] is the datum at time t[n], the value at t[n] becomes v[n+shift] on the output""" |
445 |
def __init__(self,time_serie,shift=1): |
446 |
if shift<0: |
447 |
dsc="(%s)<<%d"%(time_serie,-shift) |
448 |
else: |
449 |
dsc="(%s)>>%d"%(time_serie,shift) |
450 |
self.__controler=time_serie.getControler() |
451 |
TimeSeries.__init__(self,TimeSeriesBaseDataset(time_serie.getDataset(),-shift,time_serie.debug(),"buffer view to "+dsc),time_serie.debug(),dsc) |
452 |
|
453 |
def getControler(self): |
454 |
return self.__controler |
455 |
|
456 |
class TimeSeriesAdd(TimeSeriesFilter): |
457 |
"""adds two TimeSeries""" |
458 |
def __init__(self,time_serie_1,time_serie_2): |
459 |
dsc="(%s)+(%s)"%(time_serie_1,time_serie_2) |
460 |
dbg=time_serie_1.debug() or time_serie_2.debug() |
461 |
cntrl=time_serie_1.getControler() |
462 |
if not cntrl==time_serie_2.getControler(): |
463 |
raise ValueError("TimeSeriesAdd: %s and %s have different controler."%(time_serie_1,time_serie_2)) |
464 |
id_first_datum=max(time_serie_1.getIdOfFirstDatum(),time_serie_2.getIdOfFirstDatum()) |
465 |
TimeSeriesFilter.__init__(self,cntrl, \ |
466 |
TimeSeriesBaseBuffer(cntrl.getBaseBufferSize(),time_serie_1.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \ |
467 |
[time_serie_1,time_serie_2],0,0,dbg,dsc) |
468 |
|
469 |
def update(self,start,end): |
470 |
self.append(self.getArgumentDataset(0)[start:end]+self.getArgumentDataset(1)[start:end]) |
471 |
|
472 |
class TimeSeriesAddScalar(TimeSeriesFilter): |
473 |
"""adds a single value to a TimeSeries""" |
474 |
def __init__(self,time_serie,scalar): |
475 |
dsc="(%s)+(%s)"%(time_serie,scalar) |
476 |
dbg=time_serie.debug() |
477 |
cntrl=time_serie.getControler() |
478 |
id_first_datum=time_serie.getIdOfFirstDatum() |
479 |
TimeSeriesFilter.__init__(self,cntrl, \ |
480 |
TimeSeriesBaseBuffer(cntrl.getBaseBufferSize(),time_serie.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \ |
481 |
[time_serie],0,0,dbg,dsc) |
482 |
self.__scalar=scalar |
483 |
|
484 |
def update(self,start,end): |
485 |
self.append(self.getArgumentDataset(0)[start:end]+self.__scalar) |
486 |
|
487 |
class TimeSeriesMult(TimeSeriesFilter): |
488 |
"""multiplies two TimeSeries""" |
489 |
def __init__(self,time_serie_1,time_serie_2): |
490 |
dsc="(%s)*(%s)"%(time_serie_1,time_serie_2) |
491 |
dbg=time_serie_1.debug() or time_serie_2.debug() |
492 |
cntrl=time_serie_1.getControler() |
493 |
if not cntrl==time_serie_2.getControler(): |
494 |
raise ValueError("TimeSeriesMult: %s and %s have different controler."%(time_serie_1,time_serie_2)) |
495 |
id_first_datum=max(time_serie_1.getIdOfFirstDatum(),time_serie_2.getIdOfFirstDatum()) |
496 |
TimeSeriesFilter.__init__(self,cntrl, \ |
497 |
TimeSeriesBaseBuffer(cntrl.getBaseBufferSize(),time_serie_1.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \ |
498 |
[time_serie_1,time_serie_2],0,0,dbg,dsc) |
499 |
|
500 |
def update(self,start,end): |
501 |
self.append(self.getArgumentDataset(0)[start:end]*self.getArgumentDataset(1)[start:end]) |
502 |
|
503 |
class TimeSeriesMultScalar(TimeSeriesFilter): |
504 |
"""multiplies a TimeSeries with a single value""" |
505 |
def __init__(self,time_serie,scalar): |
506 |
dsc="(%s)*%s"%(time_serie,scalar) |
507 |
dbg=time_serie.debug() |
508 |
cntrl=time_serie.getControler() |
509 |
id_first_datum=time_serie.getIdOfFirstDatum() |
510 |
TimeSeriesFilter.__init__(self,cntrl, \ |
511 |
TimeSeriesBaseBuffer(cntrl.getBaseBufferSize(),time_serie.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \ |
512 |
[time_serie],0,0,dbg,dsc) |
513 |
self.__scalar=scalar |
514 |
|
515 |
def update(self,start,end): |
516 |
self.append(self.getArgumentDataset(0)[start:end]*self.__scalar) |
517 |
|
518 |
class TimeSeriesDiv(TimeSeriesFilter): |
519 |
"""divides two TimeSeries""" |
520 |
def __init__(self,time_serie_1,time_serie_2): |
521 |
dsc="(%s)/(%s)"%(time_serie_1,time_serie_2) |
522 |
dbg=time_serie_1.debug() or time_serie_2.debug() |
523 |
cntrl=time_serie_1.getControler() |
524 |
if not cntrl==time_serie_2.getControler(): |
525 |
raise ValueError("TimeSeriesDiv: %s and %s have different controler."%(time_serie_1,time_serie_2)) |
526 |
id_first_datum=max(time_serie_1.getIdOfFirstDatum(),time_serie_2.getIdOfFirstDatum()) |
527 |
TimeSeriesFilter.__init__(self,cntrl, \ |
528 |
TimeSeriesBaseBuffer(cntrl.getBaseBufferSize(),time_serie_1.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \ |
529 |
[time_serie_1,time_serie_2],0,0,dbg,dsc) |
530 |
|
531 |
def update(self,start,end): |
532 |
self.append(self.getArgumentDataset(0)[start:end]/self.getArgumentDataset(1)[start:end]) |
533 |
|
534 |
class TimeSeriesDivScalar(TimeSeriesFilter): |
535 |
"""divides a scalar be a TimeSerie""" |
536 |
def __init__(self,time_serie,scalar): |
537 |
dsc="(%s)/(%s)"%(scalar,time_serie) |
538 |
dbg=time_serie.debug() |
539 |
cntrl=time_serie.getControler() |
540 |
id_first_datum=time_serie.getIdOfFirstDatum() |
541 |
TimeSeriesFilter.__init__(self,cntrl, \ |
542 |
TimeSeriesBaseBuffer(cntrl.getBaseBufferSize(),time_serie.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \ |
543 |
[time_serie],0,0,dbg,dsc) |
544 |
self.__scalar=scalar |
545 |
|
546 |
def update(self,start,end): |
547 |
self.append(self.__scalar/self.getArgumentDataset(0)[start:end]) |
548 |
|
549 |
class TimeSeriesPower(TimeSeriesFilter): |
550 |
"""raise one TimeSeries to the power of an other TimeSeries""" |
551 |
def __init__(self,time_serie_1,time_serie_2): |
552 |
dsc="(%s)**(%s)"%(time_serie_1,time_serie_2) |
553 |
dbg=time_serie_1.debug() or time_serie_2.debug() |
554 |
cntrl=time_serie_1.getControler() |
555 |
if not cntrl==time_serie_2.getControler(): |
556 |
raise ValueError("TimeSeriesPower: %s and %s have different controler."%(time_serie_1,time_serie_2)) |
557 |
id_first_datum=max(time_serie_1.getIdOfFirstDatum(),time_serie_2.getIdOfFirstDatum()) |
558 |
TimeSeriesFilter.__init__(self,cntrl, \ |
559 |
TimeSeriesBaseBuffer(cntrl.getBaseBufferSize(),time_serie_1.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \ |
560 |
[time_serie_1,time_serie_2],0,0,dbg,dsc) |
561 |
|
562 |
def update(self,start,end): |
563 |
self.append(self.getArgumentDataset(0)[start:end]**self.getArgumentDataset(1)[start:end]) |
564 |
|
565 |
class TimeSeriesPowerScalar(TimeSeriesFilter): |
566 |
"""raises a TimeSerie to the power of a scalar""" |
567 |
def __init__(self,time_serie,scalar): |
568 |
dsc="(%s)**(%s)"%(time_serie,scalar) |
569 |
dbg=time_serie.debug() |
570 |
cntrl=time_serie.getControler() |
571 |
id_first_datum=time_serie.getIdOfFirstDatum() |
572 |
TimeSeriesFilter.__init__(self,cntrl, \ |
573 |
TimeSeriesBaseBuffer(cntrl.getBaseBufferSize(),time_serie.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \ |
574 |
[time_serie],0,0,dbg,dsc) |
575 |
self.__scalar=scalar |
576 |
|
577 |
def update(self,start,end): |
578 |
self.append(self.getArgumentDataset(0)[start:end]**self.__scalar) |
579 |
|
580 |
class Exp(TimeSeriesFilter): |
581 |
"""""" |
582 |
def __init__(self,time_serie): |
583 |
dsc="exp(%s)"%(time_serie) |
584 |
dbg=time_serie.debug() |
585 |
cntrl=time_serie.getControler() |
586 |
id_first_datum=time_serie.getIdOfFirstDatum() |
587 |
TimeSeriesFilter.__init__(self,cntrl, \ |
588 |
TimeSeriesBaseBuffer(cntrl.getBaseBufferSize(),time_serie.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \ |
589 |
[time_serie],0,0,dbg,dsc) |
590 |
|
591 |
def update(self,start,end): |
592 |
self.append(numarray.exp(self.getArgumentDataset(0)[start:end])) |
593 |
|
594 |
class Writer(TimeSeriesOperator): |
595 |
"""writes the time series into an output strim ostream which mast have the writeline method. The values are seperated by the string seperator.""" |
596 |
def __init__(self,time_serie,ostream,seperator=",",commend_tag="#"): |
597 |
dsc="write %s to %s"%(time_serie,ostream) |
598 |
dbg=time_serie.debug() |
599 |
cntrl=time_serie.getControler() |
600 |
self.__ostream=ostream |
601 |
self.__seperator=seperator |
602 |
TimeSeriesOperator.__init__(self,cntrl,[time_serie],0,0,dbg,dsc) |
603 |
ostream.writelines("%s time series %s\n"%(commend_tag,str(self))) |
604 |
|
605 |
def update(self,start,end): |
606 |
cntrl=self.getControler() |
607 |
arg=self.getArguments(0) |
608 |
n=arg.getNumComponents() |
609 |
if n<2: |
610 |
for i in range(start,end): self.__ostream.writelines("%s%s%s\n"%(cntrl[i],self.__seperator,arg[i])) |
611 |
else: |
612 |
for i in range(start,end): |
613 |
l="%s"%cntrl[i] |
614 |
for j in range(n): l=l+"%s%s"(self.__seperator,arg[i][j]) |
615 |
self.__ostream.writelines("%s\n"%l) |
616 |
|
617 |
class DataCatcher(TimeSeries): |
618 |
"""collects data into a time series.""" |
619 |
def __init__(self,controler,numComponents=1,description="DataCatcher"): |
620 |
self.__controler=controler |
621 |
dbg=controler.debug() |
622 |
TimeSeries.__init__(self,TimeSeriesBaseBuffer(controler.getBaseBufferSize(),numComponents,DEFAULT_FLOAT_TYPE,controler.getIdOfFirstDatum(),dbg,"buffer for "+description),dbg,description) |
623 |
|
624 |
def getControler(self): |
625 |
return self.__controler |
626 |
|
627 |
def nextValue(self,value): |
628 |
"""append a value to the time series""" |
629 |
id_last=self.getIdOfLastDatum() |
630 |
id_current=self.getControler().getIdOfLastDatum() |
631 |
if id_last+1==id_current: |
632 |
self.getDataset().append(value) |
633 |
elif id_last+1<id_current: |
634 |
if self.isEmpty(): |
635 |
self.getDataset().append(value) |
636 |
id_last+=1 |
637 |
t_last=self.getControler()[id_last] |
638 |
t_current=self.getControler()[id_current] |
639 |
value_last=self[id_last] |
640 |
out=(value_last-value)/(t_last-t_current)*(self.getControler()[id_last+1:id_current+1]-t_current)+value |
641 |
self.getDataset().append(out) |
642 |
else : |
643 |
raise ValueError,"%s: a new time node must be introduced before a new value can be added." |
644 |
self.updateIdOfLastUnreferencedDatum(id_last) |
645 |
|
646 |
|
647 |
class TimeSeriesCumulativeSum(TimeSeriesFilter): |
648 |
"""cummulative sum of the time series values""" |
649 |
def __init__(self,time_serie): |
650 |
dsc="cumsum(%s)"%(time_serie) |
651 |
dbg=time_serie.debug() |
652 |
cntrl=time_serie.getControler() |
653 |
id_first_datum=time_serie.getIdOfFirstDatum() |
654 |
TimeSeriesFilter.__init__(self,cntrl, \ |
655 |
TimeSeriesBaseBuffer(cntrl.getBaseBufferSize(),time_serie.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \ |
656 |
[time_serie],0,0,dbg,dsc) |
657 |
self.__last_value=0 |
658 |
|
659 |
def update(self,start,end): |
660 |
out=numarray.cumsum(self.getArgumentDataset(0)[start:end])+self.__last_value |
661 |
self.__last_value=out[end-start-1] |
662 |
self.append(out) |
663 |
|
664 |
|
665 |
class Reader(TimeSeriesBase): |
666 |
"""reads a list of input streams and creates a time series for each input stream but on the same Controler where the first column |
667 |
is used to create the time nodes""" |
668 |
def __init__(self,list_of_istreams,buffer_size=DEFAULT_BUFFER_SIZE,seperator=",",commend_tag="#",debug=False): |
669 |
TimeSeriesBase.__init__(self,debug=debug,description="reader") |
670 |
if not isinstance(list_of_istreams,list): |
671 |
self.__list_of_istreams=[list_of_istreams] |
672 |
else: |
673 |
self.__list_of_istreams=list_of_istreams |
674 |
self.__cntrl=Controler(buffer_size,debug,"reader controler") |
675 |
self.__seperator=seperator |
676 |
self.__commend_tag=commend_tag |
677 |
self.__time_series={} |
678 |
self.__t={} |
679 |
self.__v={} |
680 |
# set up the time series: |
681 |
for i in self.__list_of_istreams: |
682 |
line=self.__commend_tag |
683 |
while not line=="" and line[0]==self.__commend_tag: |
684 |
line=i.readline().strip() |
685 |
if line=="": |
686 |
list_of_istreams.remove(i) |
687 |
else: |
688 |
d=line.split(self.__seperator) |
689 |
self.__t[i]=float(d[0]) |
690 |
tmp=[] |
691 |
for j in d[1:]: tmp.append(float(j)) |
692 |
self.__v[i]=numarray.array(tmp) |
693 |
self.__time_series[i]=DataCatcher(self.__cntrl,len(d)-1,str(i)) |
694 |
|
695 |
# |
696 |
def run(self): |
697 |
while len(self.__list_of_istreams)>0: |
698 |
if len(self.__time_series)>0: |
699 |
# find list all times with minumum time node: |
700 |
tminargs=[] |
701 |
for i in self.__time_series: |
702 |
if len(tminargs)==0: |
703 |
tminargs.append(i) |
704 |
elif abs(t[tminargs[0]]-self.__t[i])<1.e-8*abs(self.__t[i]): |
705 |
tminargs.append(i) |
706 |
elif self.__t[i]<t[tminargs[0]]: |
707 |
tminargs=[i] |
708 |
# find list all times with minumum time node: |
709 |
self.__cntrl.nextTime(self.__t[tminargs[0]]) |
710 |
for i in tminargs: |
711 |
self.__time_series[i].nextValue(self.__v[i]) |
712 |
# find next line without leading "#" |
713 |
line="#" |
714 |
while not line=="" and line[0]==self.__commend_tag: |
715 |
line=i.readline().strip() |
716 |
# if eof reached iostream is removed for searching |
717 |
if line=="": |
718 |
self.__list_of_istreams.remove(i) |
719 |
else: |
720 |
d=line.split(self.__seperator) |
721 |
self.__t[i]=float(d[0]) |
722 |
tmp=[] |
723 |
for j in d[1:]: tmp.append(float(j)) |
724 |
self.__v[i]=numarray.array(tmp) |
725 |
|
726 |
def getControler(self): |
727 |
"""returns the controler shared by all time series created through the input streams""" |
728 |
return self.__cntrl |
729 |
|
730 |
def getTimeSeries(self,istream=None): |
731 |
"""returns the time series as a tuple. If istream is present its time series is returned""" |
732 |
if istream==None: |
733 |
out=self.__time_series.values() |
734 |
if len(out)>1: |
735 |
return tuple(out) |
736 |
elif len(out)>0: |
737 |
return out[0] |
738 |
else: |
739 |
return None |
740 |
else: |
741 |
return self.__time_series[istream] |
742 |
|
743 |
|
744 |
class Plotter(TimeSeriesOperator): |
745 |
def __init__(self,time_series,window_size=DEFAULT_BUFFER_SIZE/4,file_name=None,format=None): |
746 |
if isinstance(time_series,list): |
747 |
dbg=time_series[0].getControler().debug() |
748 |
text="" |
749 |
for i in time_series: |
750 |
if len(text)==0: |
751 |
text=str(i) |
752 |
else: |
753 |
text=text+","+str(i) |
754 |
TimeSeriesOperator.__init__(self,time_series[0].getControler(),time_series,window_size,0,dbg,"plot(%s)"%text) |
755 |
else: |
756 |
dbg=time_series.getControler().debug() |
757 |
text=str(time_series) |
758 |
TimeSeriesOperator.__init__(self,time_series.getControler(),[time_series],window_size,0,dbg,"plot(%s)"%text) |
759 |
from pyvisi.renderers.gnuplot import LinePlot,Scene,PsImage |
760 |
self.__renderer=Scene() |
761 |
self.__line_plot=LinePlot(self.__renderer) |
762 |
self.__line_plot.setTitle(text) |
763 |
self.__line_plot.setLineStyle("lines") |
764 |
self.__line_plot.setXLabel("time") |
765 |
self.__line_plot.setYLabel("values") |
766 |
self.__file_name=file_name |
767 |
if format==None: |
768 |
self.__format=PsImage() |
769 |
else: |
770 |
self.__format=format |
771 |
self.__window_size=window_size |
772 |
|
773 |
def update(self,start,end): |
774 |
s=max(end-self.__window_size,self.getControler().getIdOfFirstAvailableDatum()) |
775 |
args=[self.getControler()[s:end]] |
776 |
for arg in self.getArguments(): args.append(arg[s:end]) |
777 |
self.__line_plot.setData(*args) |
778 |
self.__line_plot.render() |
779 |
if self.__file_name==None: |
780 |
raise SystemError,"Online viewing is not avilabel yet!" |
781 |
else: |
782 |
self.__renderer.save(fname=self.__file_name, format=self.__format) |
783 |
|
784 |
|
785 |
def viewer(time_serie,seperator=","): |
786 |
"""creates a viewer for a time series""" |
787 |
import sys |
788 |
return Writer(time_serie,sys.stdout,seperator) |
789 |
|
790 |
def differential(time_serie): |
791 |
"""calculates the derivative Dv of the time series v: |
792 |
|
793 |
Dv[n]=(v[n]-v[n-1])/(t[n]-t[n-1]) |
794 |
|
795 |
""" |
796 |
out=(((time_serie<<1)-time_serie)/((time_serie.getControler()<<1)-time_serie.getControler())+ \ |
797 |
((time_serie>>1)-time_serie)/((time_serie.getControler()>>1)-time_serie.getControler()))/2. |
798 |
out.setDescription("d(%s)/dt"%str(time_serie)) |
799 |
out.setDebug(time_serie.debug()) |
800 |
return out |
801 |
|
802 |
def integral(time_serie): |
803 |
"""calculates the intagral Iv of the time series v using the trapozidal rule: |
804 |
|
805 |
Iv[n]=int_{t_0}^{t_n} v ~ sum_{0<i<=n} n (v[i]+v[i-1])/2*(t[i]-t[i-1]) |
806 |
|
807 |
""" |
808 |
out=TimeSeriesCumulativeSum(((time_serie>>1)+time_serie)/2.*(time_serie.getControler()-(time_serie.getControler()>>1))) |
809 |
out.setDescription("I (%s) dt"%str(time_serie)) |
810 |
out.setDebug(time_serie.debug()) |
811 |
return out |
812 |
|
813 |
def smooth(time_serie,range=5): |
814 |
"""smoothes a time series using the at each time the previous and next range values""" |
815 |
i=integral(time_serie) |
816 |
out=((i>>range)-(i<<range))/((time_serie.getControler()>>range)-(time_serie.getControler()<<range)) |
817 |
out.setDescription("smooth(%s,-%d:%d) dt"%(str(time_serie),range,range)) |
818 |
out.setDebug(time_serie.debug()) |
819 |
return out |
820 |
|
821 |
def leakySmooth(time_serie,l=0.99): |
822 |
"""leaky smoother: s(t)=int_{t_0}^{t} v(r) l^{t-r} dr/ int_{t_0}^{t} l^{t-r} dr """ |
823 |
w=l**(-time_serie.getControler()) |
824 |
out=integrate(time_serie*w)/integrate(w) |
825 |
out.setDescription("leaky smoother(%s)"%str(time_serie)) |
826 |
return out |
827 |
|
828 |
# test |
829 |
|
830 |
if __name__=="__main__": |
831 |
# tests the interfaces to data sets: |
832 |
print "Test of Datasets:" |
833 |
print "=================" |
834 |
bf=TimeSeriesBaseBuffer(buffer_size=5,numComponents=1,debug=True,description="TestBaseBuffer") |
835 |
bfv_l=TimeSeriesBaseDataset(bf,offset=1,debug=True,description="offset 1") |
836 |
bfv_r=TimeSeriesBaseDataset(bf,offset=-1,debug=True,description="offset -1") |
837 |
bf.append([1.,2.,3.,4.]) |
838 |
print "should be all 2. :",bfv_l[0] |
839 |
print bf[1] |
840 |
print bfv_r[2] |
841 |
bf.append([5.,6.,7.]) |
842 |
print "should be all 5. :",bfv_l[3],bf[4],bfv_r[5] |
843 |
print "should be all 6. :",bfv_l[4],bf[5],bfv_r[6] |
844 |
print "should be all 7. :",bfv_l[5],bf[6],bfv_r[7] |
845 |
print "should be all [6., 7.] :",bfv_l[4:6],bf[5:7],bfv_r[6:8] |
846 |
|
847 |
print "Test of Controler" |
848 |
print "=================" |
849 |
b=Controler(buffer_size=15,debug=True) |
850 |
s3=b>>3 |
851 |
s1=b>>1 |
852 |
s_3=b<<3 |
853 |
print s_3 |
854 |
print b |
855 |
print b+s3 |
856 |
sum=(s_3+b)+(b+s3) |
857 |
|
858 |
for i in range(30): |
859 |
b.nextTime(i*1.) |
860 |
b.flush() |
861 |
print "should be all 28. :",s_3.getDataset()[25],b.getDataset()[28],s3.getDataset()[31] |
862 |
print "should be all 29. :",s_3.getDataset()[26],b.getDataset()[29],s3.getDataset()[32] |
863 |
print "should be all 96. :",sum.getDataset()[24] |
864 |
|
865 |
print "Test of operators" |
866 |
print "=================" |
867 |
b=Controler(buffer_size=15,debug=True) |
868 |
b.setFlushRate(2) |
869 |
q=DataCatcher(b) |
870 |
b1=b<<1 |
871 |
a=b+b1 |
872 |
a_s=b1+1. |
873 |
s_a=1.+b1 |
874 |
d=b-b1 |
875 |
d_s=b1-1. |
876 |
s_d=1.-b1 |
877 |
m=b*b1 |
878 |
m_s=b1*2. |
879 |
s_m=2.*b1 |
880 |
dv=b/b1 |
881 |
dv_s=b1/2. |
882 |
s_dv=2./b1 |
883 |
p=b**b1 |
884 |
p_s=b1**2. |
885 |
s_p=2.**b1 |
886 |
pb=+b |
887 |
mb=-b |
888 |
sum=TimeSeriesCumulativeSum(b) |
889 |
diff=differential(b) |
890 |
smt=smooth(b,2) |
891 |
int=integral(b*2) |
892 |
fl=file("/tmp/test.csv","w") |
893 |
w=Writer(q,fl) |
894 |
v=viewer(q) |
895 |
plo=Plotter([a,a_s],window_size=4,file_name="s.ps") |
896 |
for i in range(30): |
897 |
b.nextTime(i*1.) |
898 |
if i%2==1: q.nextValue(i*28.) |
899 |
b.flush() |
900 |
print "a[28] should be %e: %e"%(28.+29.,a[28]) |
901 |
print "a_s[28] should be %e: %e"%(29.+1.,a_s[28]) |
902 |
print "s_a[28] should be %e: %e"%(29.+1.,s_a[28]) |
903 |
print "d[28] should be %e: %e"%(28.-29.,d[28]) |
904 |
print "d_s[28] should %e: %e"%(29.-1.,d_s[28]) |
905 |
print "s_d[28] should %e: %e"%(1.-29.,s_d[28]) |
906 |
print "m[28] should be %e: %e"%(28.*29.,m[28]) |
907 |
print "m_s[28] should be %e: %e"%(29.*2.,m_s[28]) |
908 |
print "s_m[28] should be %e: %e"%(29.*2.,s_m[28]) |
909 |
print "dv[28] should be %e: %e"%(28./29.,dv[28]) |
910 |
print "dv_s[28] should be %e: %e"%(29./2.,dv_s[28]) |
911 |
print "s_dv[28] should be %e: %e"%(2./29.,s_dv[28]) |
912 |
print "p[28] should be %e: %e"%(28.**29.,p[28]) |
913 |
print "p_s[28] should be %e: %e"%(29.**2,p_s[28]) |
914 |
print "s_p[28] should be %e: %e"%(2.**29.,s_p[28]) |
915 |
print "pb[28] should be %e: %e"%(28.,pb[28]) |
916 |
print "mb[28] should be %e: %e"%(-28.,mb[28]) |
917 |
print "sum[28] should be %e: %e"%(28*29./2,sum[28]) |
918 |
print "diff[28] should be %e: %e"%(1.,diff[28]) |
919 |
print "smt[27] should be %e: %e"%(27.,smt[27]) |
920 |
print "int[28] should be %e: %e"%(28.**2,int[28]) |
921 |
print "q[27] should be %e: %e"%(27*28.,q[27]) |
922 |
print "q[28] should be %e: %e"%(28*28.,q[28]) |
923 |
print "q[29] should be %e: %e"%(29*28.,q[29]) |
924 |
fl.flush() |
925 |
|
926 |
rin=Reader(file("/tmp/test.csv","r+"),buffer_size=15,debug=True) |
927 |
rin.run() |
928 |
inp=rin.getTimeSeries() |
929 |
print "inp[27] should be %e: %e"%(27*28.,inp[27]) |
930 |
print "inp[28] should be %e: %e"%(28*28.,inp[28]) |
931 |
print "inp[29] should be %e: %e"%(29*28.,inp[29]) |
932 |
|