1 |
jgs |
110 |
# $Id$ |
2 |
|
|
|
3 |
|
|
import numarray |
4 |
jgs |
117 |
from types import SliceType |
5 |
|
|
DEFAULT_BUFFER_SIZE=9 |
6 |
|
|
DEFAULT_FLOAT_TYPE=numarray.Float64 |
7 |
jgs |
110 |
|
8 |
|
|
class TimeSeriesBase: |
9 |
jgs |
117 |
"""The TimeSeriesBase class is the base class for all class of the TimeSeries module.""" |
10 |
jgs |
110 |
|
11 |
jgs |
117 |
def __init__(self,debug=False,description="timeseries.Base"): |
12 |
|
|
self.__debug=debug |
13 |
|
|
self.__description=description |
14 |
jgs |
110 |
|
15 |
|
|
def __str__(self): |
16 |
jgs |
117 |
return self.__description |
17 |
jgs |
110 |
|
18 |
|
|
def setDebugOn(self): |
19 |
|
|
"""switch on degugging mode""" |
20 |
|
|
self.__debug=True |
21 |
|
|
|
22 |
|
|
def setDebugOff(self): |
23 |
|
|
"""switch off degugging mode""" |
24 |
|
|
self.__debug=False |
25 |
|
|
|
26 |
|
|
def setDebug(self,flag=False): |
27 |
|
|
"""sets debug mode to flag""" |
28 |
|
|
if flag: |
29 |
|
|
self.setDebugOn() |
30 |
|
|
else: |
31 |
|
|
self.setDebugOff() |
32 |
|
|
|
33 |
|
|
def debug(self): |
34 |
|
|
"""returns true if debug mode is on""" |
35 |
|
|
return self.__debug |
36 |
|
|
|
37 |
jgs |
117 |
#============================================================================================================ |
38 |
|
|
class TimeSeriesDataset(TimeSeriesBase): |
39 |
|
|
"""provides an interface for accessing a set of linearly ordered data.""" |
40 |
|
|
def __init__(self,buffer,offset=0,debug=False,description="timeseries.Dataset"): |
41 |
|
|
TimeSeriesBase.__init__(self,debug,description) |
42 |
|
|
self.__buffer=buffer |
43 |
|
|
self.__offset=offset |
44 |
|
|
if self.debug(): print "Debug: %s: offset %d to buffer"%(self,self.getOffset()) |
45 |
jgs |
110 |
|
46 |
jgs |
117 |
def __len__(self): |
47 |
|
|
"""needed to handle negative indexing in slicing""" |
48 |
|
|
return 0 |
49 |
jgs |
110 |
|
50 |
jgs |
117 |
def getNumComponents(self): |
51 |
|
|
"""returns the number of components of the data (may be overwritten by subclass)""" |
52 |
|
|
return self.getBuffer().getNumComponents() |
53 |
jgs |
110 |
|
54 |
jgs |
117 |
def getIdOfLastDatum(self): |
55 |
|
|
"""returns the identification number of the last datum in the data set (may be overwritten by subclass)""" |
56 |
|
|
return self.getBuffer().getIdOfLastDatum()-self.getOffset() |
57 |
jgs |
110 |
|
58 |
jgs |
117 |
def getIdOfFirstDatum(self): |
59 |
|
|
"""returns the identification number of the first datum (may be overwritten by subclass)""" |
60 |
|
|
return self.getBuffer().getIdOfFirstDatum()-self.getOffset() |
61 |
jgs |
110 |
|
62 |
jgs |
117 |
def getOffsetInBuffer(self): |
63 |
|
|
"""returns the offset to access elements in getBuffer() (may be overwritten by subclass)""" |
64 |
|
|
return self.getOffset() |
65 |
|
|
|
66 |
|
|
def getIdOfLastUnusedDatum(self): |
67 |
|
|
"""returns the identification number of the last datum which has been unused by all TimeSeries refering to the TimeSeriesDataset (may be overwritten by subclass)""" |
68 |
|
|
return self.getBuffer().getIdOfLastUnusedDatum()-self.getOffset() |
69 |
|
|
|
70 |
|
|
def updateIdOfLastUnusedDatum(self,last_unused_datum): |
71 |
|
|
"""updates the identification number of the last unused datum (to be overwritten by subclass)""" |
72 |
|
|
self.getBuffer().updateIdOfLastUnusedDatum(last_unused_datum+self.getOffset()) |
73 |
|
|
|
74 |
|
|
def append(self,values): |
75 |
|
|
"""appends data to the buffer. If the buffer would be full the buffer is rearranged before the data are appended (to be overwritten by subclass)""" |
76 |
|
|
self.getBuffer().append(values) |
77 |
|
|
|
78 |
|
|
def getBufferSize(self): |
79 |
|
|
"""returns the size of the buffer (to be overwritten by subclass)""" |
80 |
|
|
return self.getBuffer().getBufferSize() |
81 |
|
|
|
82 |
|
|
def needsRearrangement(self,num_new_data=0): |
83 |
|
|
"""returns True if the buffer will be full after num_new_data have been appended (to be overwritten by subclass)""" |
84 |
|
|
return self.getBuffer().needsRearrangement(num_new_data) |
85 |
|
|
|
86 |
|
|
def isEmpty(self): |
87 |
|
|
"""returns true if no data are appeneded to buffer""" |
88 |
|
|
return self.getNumData()<=0 |
89 |
|
|
|
90 |
|
|
def getNumData(self): |
91 |
|
|
"""returns the number of data (not all of them are accessible)""" |
92 |
|
|
return self.getIdOfLastDatum()-self.getIdOfFirstDatum()+1 |
93 |
|
|
|
94 |
|
|
def getBuffer(self): |
95 |
|
|
"""return the buffer referenced by the TimeSeriesDataset""" |
96 |
|
|
return self.__buffer |
97 |
|
|
|
98 |
|
|
def getOffset(self): |
99 |
|
|
"""return the offset when referring to dataset elements""" |
100 |
|
|
return self.__offset |
101 |
|
|
|
102 |
|
|
def __getitem__(self,index): |
103 |
|
|
"""returns the datum index""" |
104 |
|
|
if type(index)==SliceType: |
105 |
|
|
start=index.start |
106 |
|
|
end=index.stop |
107 |
|
|
if start==end: |
108 |
|
|
return self[start] |
109 |
|
|
else: |
110 |
|
|
if start<self.getIdOfFirstDatum() or start>self.getIdOfLastDatum() or \ |
111 |
|
|
end-1<self.getIdOfFirstDatum() or end-1>self.getIdOfLastDatum(): raise IndexError,"%s: Index [%d:%d] out of range"%(self,start,end) |
112 |
|
|
return self.getBuffer()[start+self.getOffsetInBuffer():end+self.getOffsetInBuffer()] |
113 |
|
|
else: |
114 |
|
|
if index<self.getIdOfFirstDatum() or index>self.getIdOfLastDatum(): raise IndexError,"%s: Index %d out of range"%(self,index) |
115 |
|
|
return self.getBuffer()[index+self.getOffsetInBuffer()] |
116 |
|
|
|
117 |
|
|
class TimeSeriesBuffer(TimeSeriesDataset): |
118 |
|
|
"""An inplementation of TimeSeriesDataset which actually is storing data into a numarray buffer""" |
119 |
|
|
def __init__(self,buffer_size=DEFAULT_BUFFER_SIZE,numComponents=1,type=DEFAULT_FLOAT_TYPE,id_of_first_datum=0,debug=False,description="timeseries.Buffer"): |
120 |
jgs |
110 |
if numComponents<2: |
121 |
jgs |
117 |
buffer=numarray.zeros((buffer_size,),type) |
122 |
jgs |
110 |
else: |
123 |
jgs |
117 |
buffer=numarray.zeros((buffer_size,numComponents),type) |
124 |
|
|
TimeSeriesDataset.__init__(self,buffer,id_of_first_datum-1,debug,description) |
125 |
|
|
self.__num_data_in_buffer=0 |
126 |
|
|
self.__id_last_unused_datum=id_of_first_datum-1 |
127 |
|
|
self.__id_last_datum=id_of_first_datum-1 |
128 |
|
|
self.__id_first_datum=id_of_first_datum |
129 |
|
|
if self.debug(): print "Debug: %s : buffer of size %d with %d components allocated (first datum is %d)."% \ |
130 |
|
|
(self,self.getBufferSize(),self.getNumComponents(),id_of_first_datum) |
131 |
jgs |
110 |
|
132 |
|
|
|
133 |
jgs |
117 |
def getBufferSize(self): |
134 |
|
|
"""returns the size of the buffer""" |
135 |
|
|
return self.getBuffer().shape[0] |
136 |
|
|
|
137 |
jgs |
110 |
def getNumComponents(self): |
138 |
jgs |
117 |
"""returns the number of components of the data (overwrites TimeSeriesDataset method)""" |
139 |
|
|
if self.getBuffer().rank==1: |
140 |
jgs |
110 |
return 1 |
141 |
|
|
else: |
142 |
jgs |
117 |
self.getBuffer().shape[1] |
143 |
jgs |
110 |
|
144 |
jgs |
117 |
def getNumDataInBuffer(self): |
145 |
|
|
"""returns the number of data currently in the buffer""" |
146 |
|
|
return self.__num_data_in_buffer |
147 |
jgs |
110 |
|
148 |
jgs |
117 |
def getIdOfLastDatum(self): |
149 |
|
|
"""returns the identification number of the last datum in the data set (overwrites method from TimeSeriesDataset)""" |
150 |
|
|
return self.__id_last_datum |
151 |
jgs |
110 |
|
152 |
jgs |
117 |
def getIdOfFirstDatum(self): |
153 |
|
|
"""returns the identification number of the first datum (overwrites method from TimeSeriesDataset)""" |
154 |
|
|
return self.__id_first_datum |
155 |
jgs |
110 |
|
156 |
jgs |
117 |
def getOffsetInBuffer(self): |
157 |
|
|
"""returns the offset to access elements in the buffer (overwrites method from TimeSeriesDataset)""" |
158 |
|
|
return -self.getIdOfLastDatum()+self.getNumDataInBuffer()-1 |
159 |
jgs |
110 |
|
160 |
jgs |
117 |
def getIdOfLastUnusedDatum(self): |
161 |
|
|
"""returns the identification number of the last datum which has been unused by all TimeSeries refering to the TimeSeriesDataset (overwrites method from TimeSeriesDataset)""" |
162 |
|
|
return self.__id_last_unused_datum |
163 |
jgs |
110 |
|
164 |
jgs |
117 |
def updateIdOfLastUnusedDatum(self,last_unused_datum): |
165 |
|
|
"""updates the identification number of the last unused datum (to be overwritten by subclass)""" |
166 |
|
|
self.getBuffer().updateIdOfLastUnusedDatum(last_unused_datum-self.getOffset()) |
167 |
jgs |
110 |
|
168 |
jgs |
117 |
def updateIdOfLastUnusedDatum(self,last_unused_datum): |
169 |
|
|
"""updates the identification number of the last unused datum (overwrites TimeSeriesDataset method)""" |
170 |
|
|
if self.__id_last_unused_datum>last_unused_datum: |
171 |
|
|
self.__id_last_unused_datum=last_unused_datum |
172 |
|
|
if self.debug(): print "Debug: %s: last unused datum is now %s"%(self,last_unused_datum) |
173 |
jgs |
110 |
|
174 |
jgs |
117 |
def needsRearrangement(self,num_new_data=0): |
175 |
|
|
"""returns True if the buffer will be full after num_new_data have been appended""" |
176 |
|
|
return self.getNumDataInBuffer()+num_new_data>self.getBufferSize() |
177 |
|
|
|
178 |
|
|
def append(self,data): |
179 |
|
|
"""appends data to the buffer. If the buffer would be full the buffer is rearranged before the data are appended (overwrites TimeSeriesDataset method)""" |
180 |
|
|
data=numarray.array(data) |
181 |
|
|
nc=self.getNumComponents() |
182 |
|
|
if data.rank==0: |
183 |
|
|
if nc==1: |
184 |
|
|
num_new_data=1 |
185 |
|
|
else: |
186 |
|
|
raise ValueError,"%s: illegal data shape"%self |
187 |
|
|
elif data.rank==1: |
188 |
|
|
if nc==1: |
189 |
|
|
num_new_data=data.shape[0] |
190 |
|
|
else: |
191 |
|
|
num_new_data=1 |
192 |
|
|
elif data.rank==2: |
193 |
|
|
if not nc==data.shape[1]: raise ValueError,"%s: illegal data shape"%self |
194 |
|
|
num_new_data=data.shape[0] |
195 |
|
|
else: |
196 |
|
|
raise ValueError,"%s: illegal rank"%self |
197 |
jgs |
110 |
|
198 |
jgs |
117 |
# check is buffer will be overflown when data are appended: |
199 |
|
|
if self.needsRearrangement(num_new_data): |
200 |
|
|
nn=self.getNumDataInBuffer() |
201 |
|
|
num_protected_data=self.getIdOfLastDatum()-self.getIdOfLastUnusedDatum() |
202 |
|
|
if num_protected_data+num_new_data>self.getBufferSize(): |
203 |
|
|
raise ValueError,"%s: buffer overflow: buffer size has to be bigger than %d"%(self,num_protected_data+num_new_data) |
204 |
|
|
if num_protected_data>0: self.getBuffer()[0:num_protected_data]=self.getBuffer()[nn-num_protected_data:nn] |
205 |
|
|
self.__num_data_in_buffer=num_protected_data |
206 |
|
|
self.__id_last_unused_datum=self.__id_last_datum |
207 |
|
|
if self.debug(): |
208 |
|
|
print "Debug: %s: rearrangement: first data in buffer is %d."%(self,self.getIdOfLastDatum()-self.getNumDataInBuffer()+1) |
209 |
|
|
# copy data over: |
210 |
|
|
nn=self.getNumDataInBuffer() |
211 |
|
|
self.getBuffer()[nn:nn+num_new_data]=data |
212 |
|
|
self.__num_data_in_buffer+=num_new_data |
213 |
|
|
self.__id_last_datum+=num_new_data |
214 |
|
|
self.__id_last_unused_datum+=num_new_data |
215 |
|
|
if self.debug(): print "Debug: %s: %d data appended. Last unused datum is now %d."%(self,num_new_data,self.__id_last_unused_datum) |
216 |
jgs |
110 |
|
217 |
jgs |
117 |
# ====================================== |
218 |
|
|
class TimeSeries(TimeSeriesDataset): |
219 |
|
|
"""a TimeSeries glues a Controler controler and a TimeSeriesDataset dataset together. It also provides a TimeSeriesDataset view to the datset""" |
220 |
|
|
def __init__(self,dataset,debug=False,description="timeseries."): |
221 |
|
|
TimeSeriesDataset.__init__(self,dataset,0,debug,description) |
222 |
|
|
self.__id_last_processed_datum=dataset.getIdOfFirstDatum()-1 |
223 |
|
|
|
224 |
|
|
def getDataset(self): |
225 |
|
|
"""returns the TimeSeriesDataset of the time series""" |
226 |
|
|
return self.getBuffer() |
227 |
jgs |
110 |
|
228 |
jgs |
117 |
def getControler(self): |
229 |
|
|
"""returns the Controler of the time series (to be overwritten by subclass)""" |
230 |
|
|
pass |
231 |
jgs |
110 |
|
232 |
jgs |
117 |
def getIdOfLastProcessedDatum(self): |
233 |
|
|
return self.__id_last_processed_datum |
234 |
jgs |
110 |
|
235 |
jgs |
117 |
def updateIdOfLastProcessedDatum(self,id_last_processed_datum): |
236 |
|
|
self.__id_last_processed_datum=id_last_processed_datum |
237 |
jgs |
110 |
|
238 |
jgs |
117 |
def __add__(self,arg): |
239 |
|
|
if isinstance(arg,TimeSeriesDataset): |
240 |
|
|
return TimeSeriesSum(self,arg) |
241 |
|
|
else: |
242 |
|
|
return TimeSeriesAddScalar(self,arg) |
243 |
jgs |
110 |
|
244 |
jgs |
117 |
def __sub__(self,arg): |
245 |
|
|
return self+(-1.)*arg |
246 |
|
|
|
247 |
|
|
def __mul__(self,arg): |
248 |
|
|
if isinstance(arg,TimeSeriesDataset): |
249 |
|
|
return TimeSeriesMult(self,arg) |
250 |
|
|
else: |
251 |
|
|
return TimeSeriesMultScalar(self,arg) |
252 |
|
|
|
253 |
|
|
def __div__(self,arg): |
254 |
|
|
if isinstance(arg,TimeSeriesDataset): |
255 |
|
|
return TimeSeriesDiv(self,arg) |
256 |
|
|
else: |
257 |
|
|
return TimeSeriesMultScalar(self,1./arg) |
258 |
|
|
|
259 |
|
|
def __pow__(self,arg): |
260 |
|
|
if isinstance(arg,TimeSeriesDataset): |
261 |
|
|
return TimeSeriesPower(self,arg) |
262 |
|
|
else: |
263 |
|
|
return TimeSeriesPowerScalar(self,arg) |
264 |
jgs |
110 |
|
265 |
jgs |
117 |
def __radd__(self,arg): |
266 |
|
|
return self.__add__(arg) |
267 |
|
|
|
268 |
|
|
def __rsub__(self,arg): |
269 |
|
|
return arg+(-1.)*self |
270 |
|
|
|
271 |
|
|
def __rmul__(self,arg): |
272 |
|
|
return self.__mul__(arg) |
273 |
|
|
|
274 |
|
|
def __rdiv__(self,arg): |
275 |
|
|
if isinstance(arg,TimeSeriesDataset): |
276 |
|
|
return TimeSeriesDiv(arg,self) |
277 |
|
|
else: |
278 |
|
|
return TimeSeriesDivScalar(self,arg) |
279 |
|
|
|
280 |
|
|
def __rpow__(self,arg): |
281 |
|
|
if isinstance(arg,TimeSeriesDataset): |
282 |
|
|
return TimeSeriesPower(arg,self) |
283 |
|
|
else: |
284 |
|
|
return Exp(numarray.log(arg)*self) |
285 |
|
|
|
286 |
|
|
def __lshift__(self,arg): |
287 |
|
|
return TimeSeriesShift(self,-arg) |
288 |
|
|
|
289 |
|
|
def __rshift__(self,arg): |
290 |
|
|
return TimeSeriesShift(self,arg) |
291 |
|
|
|
292 |
|
|
def __neg__(self): |
293 |
|
|
return (-1.0)*self |
294 |
|
|
|
295 |
|
|
def __pos__(self): |
296 |
|
|
return (1.0)*self |
297 |
|
|
|
298 |
|
|
class TimeSeriesFilter(TimeSeries): |
299 |
|
|
"""a TimeSeriesFilter is a TimeSeriesDataset attached to a Controler where the TimeSeriesDataset provides data |
300 |
|
|
at the time nodes defined by the Controler. Additional to a TimeSeries a TimeSeriesFilter allows to update |
301 |
|
|
the underlying TimeSeriesDataset through the update method which is overwritten by a particular implementation of the |
302 |
|
|
class. The update method is called to append the data [start:end] to the attached dataset by the the attached TimeSerieControler""" |
303 |
|
|
def __init__(self,controler,dataset,args=[],left_wing_size=0,right_wing_size=0,debug=False,description="timeseries.Filter"): |
304 |
|
|
TimeSeries.__init__(self,dataset,debug,description) |
305 |
|
|
self.__left_wing_size=left_wing_size |
306 |
|
|
self.__right_wing_size=right_wing_size |
307 |
|
|
self.__args=args |
308 |
|
|
self.__controler=controler |
309 |
|
|
controler.appendFilterToUpdateList(self) |
310 |
|
|
|
311 |
|
|
def getControler(self): |
312 |
|
|
"""returns the Controler of the time series (overwrites method of by TimeSeries)""" |
313 |
|
|
return self.__controler |
314 |
|
|
|
315 |
|
|
def update(self,start,end): |
316 |
|
|
"""appends zeros to the dataset. This method should be overwritten by a particular TimeSeriesFilter""" |
317 |
|
|
nc=self.getNumComponents() |
318 |
|
|
if nc>1: |
319 |
|
|
self.getDataset().append(numarray.zeros([nc,end-start])) |
320 |
|
|
else: |
321 |
|
|
self.getDataset().append(numarray.zeros(end-start)) |
322 |
|
|
def getLeftWingSize(self): |
323 |
|
|
"""returns the left wing size""" |
324 |
|
|
return self.__left_wing_size |
325 |
|
|
|
326 |
|
|
def getRightWingSize(self): |
327 |
|
|
"""returns the right wing size""" |
328 |
|
|
return self.__right_wing_size |
329 |
|
|
|
330 |
|
|
def getArguments(self,index=None): |
331 |
|
|
"""returns the list of arguments or, index is present, the argument with index index. In the latter case None is returned if no arguments are present""" |
332 |
|
|
if index==None: |
333 |
|
|
return self.__args |
334 |
|
|
else: |
335 |
|
|
if len(self.__args)>0: |
336 |
|
|
return self.__args[index] |
337 |
|
|
else: |
338 |
|
|
return None |
339 |
|
|
|
340 |
|
|
def getArgumentDataset(self,index): |
341 |
|
|
"""returns the dataset of in the argument with index index""" |
342 |
|
|
arg=self.getArguments(index) |
343 |
|
|
if arg==None: |
344 |
|
|
return None |
345 |
|
|
else: |
346 |
|
|
return self.getArguments(index).getDataset() |
347 |
|
|
|
348 |
|
|
def flush(self): |
349 |
|
|
"""calls the update method with all the maximum processable range. It also updates the id of unused datum for all arguments""" |
350 |
|
|
start=self.getIdOfLastProcessedDatum()+1 |
351 |
|
|
end=None |
352 |
|
|
for i in self.getArguments(): |
353 |
|
|
if end==None: |
354 |
|
|
end=i.getIdOfLastDatum() |
355 |
|
|
else: |
356 |
|
|
end=min(end,i.getIdOfLastDatum()) |
357 |
|
|
if not end==None: |
358 |
|
|
if self.debug(): print "Debug: %s: range [%d:%d] is updated."%(self,start,end-self.getRightWingSize()) |
359 |
|
|
self.update(start,end-self.getRightWingSize()+1) |
360 |
|
|
for i in self.getArguments(): i.updateIdOfLastUnusedDatum(end-self.getLeftWingSize()) |
361 |
|
|
self.updateIdOfLastProcessedDatum(end) |
362 |
|
|
|
363 |
|
|
class Controler(TimeSeries): |
364 |
|
|
"""controls a set of TimeSeries""" |
365 |
|
|
def __init__(self,buffer_size=DEFAULT_BUFFER_SIZE,debug=False,description="timeseries.Controler"): |
366 |
|
|
TimeSeries.__init__(self,TimeSeriesBuffer(buffer_size,1,DEFAULT_FLOAT_TYPE,0,debug,"Time nodes buffer of "+description),\ |
367 |
|
|
debug,"Time nodes of "+description) |
368 |
|
|
self.setFlushRate() |
369 |
|
|
self.__update_time_series=list() |
370 |
|
|
|
371 |
|
|
def __del__(self): |
372 |
|
|
self.flush() |
373 |
|
|
|
374 |
|
|
def getControler(self): |
375 |
|
|
"""returns the Controler of the time series (overwrites method of by TimeSeries)""" |
376 |
|
|
return self |
377 |
|
|
|
378 |
|
|
def setFlushRate(self,rate=50): |
379 |
|
|
"""set the flush rate, i.e. after rate new time nodes have been checked in the flush method is called.""" |
380 |
|
|
self.__flush_rate=rate |
381 |
|
|
if self.debug(): print "Debug: %s: flush rate is set to %d"%(self,rate) |
382 |
jgs |
110 |
|
383 |
jgs |
117 |
def needsFlushing(self): |
384 |
|
|
"""returns true if the depending TimeSeriesFilters needs to be flushed becuase the time nodes buffer is full or because of the set flush rate""" |
385 |
|
|
return self.needsRearrangement(1) or (self.getNumData()+1)%self.__flush_rate==0 |
386 |
jgs |
110 |
|
387 |
|
|
def flush(self): |
388 |
jgs |
117 |
"""flushes all dependend TimeSeriesFilters by processing their flush method""" |
389 |
|
|
if self.debug(): print "Debug: %s: start flushing"%self |
390 |
|
|
for time_serie in self.__update_time_series: time_serie.flush() |
391 |
jgs |
110 |
|
392 |
jgs |
117 |
def appendFilterToUpdateList(self,time_serie): |
393 |
|
|
if not time_serie.getControler()==self: raise ValueError,"%s: time series time_serie %s is not linked defined on %s."%(self,time_serie,self) |
394 |
|
|
if not self.isEmpty(): raise ValueError,"%s: you can only check in a time series time_serie is controler is empty."%self |
395 |
|
|
self.__update_time_series.append(time_serie) |
396 |
|
|
if self.debug(): print "Debug: %s: %s has been added to update list."%(self,time_serie) |
397 |
jgs |
110 |
|
398 |
jgs |
117 |
def newTimeNode(self,value): |
399 |
|
|
if self.needsFlushing(): self.flush() |
400 |
|
|
self.getDataset().append(value) |
401 |
|
|
if self.debug(): print "Debug: %s: new time node %e has been added."%(self,value) |
402 |
jgs |
110 |
|
403 |
jgs |
117 |
# ============================================ |
404 |
|
|
class TimeSeriesShift(TimeSeries): |
405 |
|
|
"""creates a shift of the time series, i.e. if d[n] is the datum at time t[n], the value at t[n] becomes v[n+shift] on the output""" |
406 |
|
|
def __init__(self,time_serie,shift=1): |
407 |
|
|
if shift<0: |
408 |
|
|
dsc="(%s)<<%d"%(time_serie,-shift) |
409 |
|
|
else: |
410 |
|
|
dsc="(%s)>>%d"%(time_serie,shift) |
411 |
|
|
self.__controler=time_serie.getControler() |
412 |
|
|
TimeSeries.__init__(self,TimeSeriesDataset(time_serie.getDataset(),-shift,time_serie.debug(),"buffer view to "+dsc),\ |
413 |
|
|
time_serie.debug(),dsc) |
414 |
|
|
def getControler(self): |
415 |
|
|
return self.__controler |
416 |
jgs |
110 |
|
417 |
jgs |
117 |
class TimeSeriesSum(TimeSeriesFilter): |
418 |
|
|
"""adds two TimeSeries""" |
419 |
|
|
def __init__(self,time_serie_1,time_serie_2): |
420 |
|
|
dsc="(%s)+(%s)"%(time_serie_1,time_serie_2) |
421 |
|
|
dbg=time_serie_1.debug() or time_serie_2.debug() |
422 |
|
|
cntrl=time_serie_1.getControler() |
423 |
|
|
if not cntrl==time_serie_2.getControler(): |
424 |
|
|
raise ValueError("TimeSeriesSum: %s and %s have different controler."%(time_serie_1,time_serie_2)) |
425 |
|
|
id_first_datum=max(time_serie_1.getIdOfFirstDatum(),time_serie_2.getIdOfFirstDatum()) |
426 |
|
|
TimeSeriesFilter.__init__(self,cntrl, \ |
427 |
|
|
TimeSeriesBuffer(cntrl.getBufferSize(),time_serie_1.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \ |
428 |
|
|
[time_serie_1,time_serie_2],0,0,dbg,dsc) |
429 |
|
|
|
430 |
|
|
def update(self,start,end): |
431 |
|
|
self.append(self.getArgumentDataset(0)[start:end]+self.getArgumentDataset(1)[start:end]) |
432 |
|
|
|
433 |
|
|
class TimeSeriesAddScalar(TimeSeriesFilter): |
434 |
|
|
"""adds a single value to a TimeSeries""" |
435 |
|
|
def __init__(self,time_serie,scalar): |
436 |
|
|
dsc="(%s)+(%s)"%(time_serie,scalar) |
437 |
|
|
dbg=time_serie.debug() |
438 |
|
|
cntrl=time_serie.getControler() |
439 |
|
|
id_first_datum=time_serie.getIdOfFirstDatum() |
440 |
|
|
TimeSeriesFilter.__init__(self,cntrl, \ |
441 |
|
|
TimeSeriesBuffer(cntrl.getBufferSize(),time_serie.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \ |
442 |
|
|
[time_serie],0,0,dbg,dsc) |
443 |
|
|
self.__scalar=scalar |
444 |
|
|
|
445 |
|
|
def update(self,start,end): |
446 |
|
|
self.append(self.getArgumentDataset(0)[start:end]+self.__scalar) |
447 |
|
|
|
448 |
|
|
class TimeSeriesMult(TimeSeriesFilter): |
449 |
|
|
"""multiplies two TimeSeries""" |
450 |
|
|
def __init__(self,time_serie_1,time_serie_2): |
451 |
|
|
dsc="(%s)*(%s)"%(time_serie_1,time_serie_2) |
452 |
|
|
dbg=time_serie_1.debug() or time_serie_2.debug() |
453 |
|
|
cntrl=time_serie_1.getControler() |
454 |
|
|
if not cntrl==time_serie_2.getControler(): |
455 |
|
|
raise ValueError("TimeSeriesMult: %s and %s have different controler."%(time_serie_1,time_serie_2)) |
456 |
|
|
id_first_datum=max(time_serie_1.getIdOfFirstDatum(),time_serie_2.getIdOfFirstDatum()) |
457 |
|
|
TimeSeriesFilter.__init__(self,cntrl, \ |
458 |
|
|
TimeSeriesBuffer(cntrl.getBufferSize(),time_serie_1.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \ |
459 |
|
|
[time_serie_1,time_serie_2],0,0,dbg,dsc) |
460 |
|
|
|
461 |
|
|
def update(self,start,end): |
462 |
|
|
self.append(self.getArgumentDataset(0)[start:end]*self.getArgumentDataset(1)[start:end]) |
463 |
|
|
|
464 |
|
|
class TimeSeriesMultScalar(TimeSeriesFilter): |
465 |
|
|
"""multiplies a TimeSeries with a single value""" |
466 |
|
|
def __init__(self,time_serie,scalar): |
467 |
|
|
dsc="(%s)*%s"%(time_serie,scalar) |
468 |
|
|
dbg=time_serie.debug() |
469 |
|
|
cntrl=time_serie.getControler() |
470 |
|
|
id_first_datum=time_serie.getIdOfFirstDatum() |
471 |
|
|
TimeSeriesFilter.__init__(self,cntrl, \ |
472 |
|
|
TimeSeriesBuffer(cntrl.getBufferSize(),time_serie.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \ |
473 |
|
|
[time_serie],0,0,dbg,dsc) |
474 |
|
|
self.__scalar=scalar |
475 |
|
|
|
476 |
|
|
def update(self,start,end): |
477 |
|
|
self.append(self.getArgumentDataset(0)[start:end]*self.__scalar) |
478 |
|
|
|
479 |
|
|
class TimeSeriesDiv(TimeSeriesFilter): |
480 |
|
|
"""divides two TimeSeries""" |
481 |
|
|
def __init__(self,time_serie_1,time_serie_2): |
482 |
|
|
dsc="(%s)/(%s)"%(time_serie_1,time_serie_2) |
483 |
|
|
dbg=time_serie_1.debug() or time_serie_2.debug() |
484 |
|
|
cntrl=time_serie_1.getControler() |
485 |
|
|
if not cntrl==time_serie_2.getControler(): |
486 |
|
|
raise ValueError("TimeSeriesDiv: %s and %s have different controler."%(time_serie_1,time_serie_2)) |
487 |
|
|
id_first_datum=max(time_serie_1.getIdOfFirstDatum(),time_serie_2.getIdOfFirstDatum()) |
488 |
|
|
TimeSeriesFilter.__init__(self,cntrl, \ |
489 |
|
|
TimeSeriesBuffer(cntrl.getBufferSize(),time_serie_1.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \ |
490 |
|
|
[time_serie_1,time_serie_2],0,0,dbg,dsc) |
491 |
|
|
|
492 |
|
|
def update(self,start,end): |
493 |
|
|
self.append(self.getArgumentDataset(0)[start:end]/self.getArgumentDataset(1)[start:end]) |
494 |
|
|
|
495 |
|
|
class TimeSeriesDivScalar(TimeSeriesFilter): |
496 |
|
|
"""divides a scalar be a TimeSerie""" |
497 |
|
|
def __init__(self,time_serie,scalar): |
498 |
|
|
dsc="(%s)/(%s)"%(scalar,time_serie) |
499 |
|
|
dbg=time_serie.debug() |
500 |
|
|
cntrl=time_serie.getControler() |
501 |
|
|
id_first_datum=time_serie.getIdOfFirstDatum() |
502 |
|
|
TimeSeriesFilter.__init__(self,cntrl, \ |
503 |
|
|
TimeSeriesBuffer(cntrl.getBufferSize(),time_serie.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \ |
504 |
|
|
[time_serie],0,0,dbg,dsc) |
505 |
|
|
self.__scalar=scalar |
506 |
|
|
|
507 |
|
|
def update(self,start,end): |
508 |
|
|
self.append(self.__scalar/self.getArgumentDataset(0)[start:end]) |
509 |
|
|
|
510 |
|
|
class TimeSeriesPower(TimeSeriesFilter): |
511 |
|
|
"""raise one TimeSeries to the power of an other TimeSeries""" |
512 |
|
|
def __init__(self,time_serie_1,time_serie_2): |
513 |
|
|
dsc="(%s)**(%s)"%(time_serie_1,time_serie_2) |
514 |
|
|
dbg=time_serie_1.debug() or time_serie_2.debug() |
515 |
|
|
cntrl=time_serie_1.getControler() |
516 |
|
|
if not cntrl==time_serie_2.getControler(): |
517 |
|
|
raise ValueError("TimeSeriesPower: %s and %s have different controler."%(time_serie_1,time_serie_2)) |
518 |
|
|
id_first_datum=max(time_serie_1.getIdOfFirstDatum(),time_serie_2.getIdOfFirstDatum()) |
519 |
|
|
TimeSeriesFilter.__init__(self,cntrl, \ |
520 |
|
|
TimeSeriesBuffer(cntrl.getBufferSize(),time_serie_1.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \ |
521 |
|
|
[time_serie_1,time_serie_2],0,0,dbg,dsc) |
522 |
|
|
|
523 |
|
|
def update(self,start,end): |
524 |
|
|
self.append(self.getArgumentDataset(0)[start:end]**self.getArgumentDataset(1)[start:end]) |
525 |
|
|
|
526 |
|
|
class TimeSeriesPowerScalar(TimeSeriesFilter): |
527 |
|
|
"""raises a TimeSerie to the power of a scalar""" |
528 |
|
|
def __init__(self,time_serie,scalar): |
529 |
|
|
dsc="(%s)**(%s)"%(time_serie,scalar) |
530 |
|
|
dbg=time_serie.debug() |
531 |
|
|
cntrl=time_serie.getControler() |
532 |
|
|
id_first_datum=time_serie.getIdOfFirstDatum() |
533 |
|
|
TimeSeriesFilter.__init__(self,cntrl, \ |
534 |
|
|
TimeSeriesBuffer(cntrl.getBufferSize(),time_serie.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \ |
535 |
|
|
[time_serie],0,0,dbg,dsc) |
536 |
|
|
self.__scalar=scalar |
537 |
|
|
|
538 |
|
|
def update(self,start,end): |
539 |
|
|
self.append(self.getArgumentDataset(0)[start:end]**self.__scalar) |
540 |
|
|
|
541 |
|
|
class Exp(TimeSeriesFilter): |
542 |
|
|
"""""" |
543 |
|
|
def __init__(self,time_serie): |
544 |
|
|
dsc="exp(%s)"%(time_serie) |
545 |
|
|
dbg=time_serie.debug() |
546 |
|
|
cntrl=time_serie.getControler() |
547 |
|
|
id_first_datum=time_serie.getIdOfFirstDatum() |
548 |
|
|
TimeSeriesFilter.__init__(self,cntrl, \ |
549 |
|
|
TimeSeriesBuffer(cntrl.getBufferSize(),time_serie.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \ |
550 |
|
|
[time_serie],0,0,dbg,dsc) |
551 |
|
|
|
552 |
|
|
def update(self,start,end): |
553 |
|
|
self.append(numarray.exp(self.getArgumentDataset(0)[start:end])) |
554 |
|
|
|
555 |
|
|
class TimeSeriesCumulativeSum(TimeSeriesFilter): |
556 |
|
|
"""creates a shift of the time series, i.e. if d[n] is the datum at time t[n], the value at t[n] becomes v[n+shift] on the output""" |
557 |
|
|
def __init__(self,time_series): |
558 |
|
|
TimeSeriesFilter.__init__(self,1) |
559 |
|
|
TimeSeries.__init__(self,frame_size=time_series.getDatasetSize(),buffer_size=time_series.getBufferSize(), \ |
560 |
|
|
numComponents=time_series.getNumComponents()) |
561 |
|
|
self.setDebug(time_series.debug()) |
562 |
|
|
time_series.checkInUpdate(self) |
563 |
|
|
self.__integral=0 |
564 |
|
|
|
565 |
|
|
def __str__(self): |
566 |
|
|
return "timeseries.Integrator" |
567 |
|
|
|
568 |
|
|
def update(self,times,data): |
569 |
|
|
l=times.shape[0] |
570 |
|
|
self.append(times[1:l],(data[0:l-1]+data[1:l])/2.*(times[1:l]-times[0:l-1])) |
571 |
|
|
|
572 |
|
|
|
573 |
jgs |
110 |
class TimeSeriesCollector(TimeSeries): |
574 |
jgs |
117 |
"""timeseries.Collector collects data at time nodes""" |
575 |
jgs |
110 |
def __init__(self): |
576 |
|
|
TimeSeries.__init__(self) |
577 |
|
|
|
578 |
|
|
def __str__(self): |
579 |
jgs |
117 |
return "timeseries.Collector" |
580 |
jgs |
110 |
|
581 |
|
|
def add(self,time_mark,value): |
582 |
|
|
"""adds the value at time time_mark to the time series""" |
583 |
|
|
self.append(numarray.array([time_mark]),numarray.array([value])) |
584 |
|
|
|
585 |
|
|
def read(self,istream,seperator=","): |
586 |
|
|
"""reads pairs from iostream istream""" |
587 |
|
|
for l in istream: |
588 |
|
|
d=l.strip().split(seperator) |
589 |
|
|
self.add(float(d[0]),float(d[1])) |
590 |
|
|
|
591 |
jgs |
117 |
def Differential(time_series): |
592 |
|
|
"""calculates the derivative Dv of the time series v: |
593 |
|
|
|
594 |
|
|
Dv[n]=(v[n]-v[n-1])/(t[n]-t[n-1]) |
595 |
jgs |
110 |
|
596 |
jgs |
117 |
""" |
597 |
|
|
return (time_series<<1-time_series)/(time_series.getControler()<<1-time_series.getControler()) |
598 |
jgs |
110 |
|
599 |
jgs |
117 |
def Integral(time_series): |
600 |
|
|
"""calculates the intagral Iv of the time series v using the trapozidal rule: |
601 |
jgs |
110 |
|
602 |
jgs |
117 |
Iv[n]=sum_i<n (v[n]+v[n-1])/2*(t[n]-t[n-1]) |
603 |
jgs |
110 |
|
604 |
jgs |
117 |
""" |
605 |
|
|
return TimeSeriesCumulativeSum((time_series<<1+time_series)/2.*(time_series.getControler()-(time_series.getControler<<1)),0.) |
606 |
jgs |
110 |
|
607 |
|
|
|
608 |
|
|
class TimeSeriesViewer(TimeSeriesFilter): |
609 |
|
|
def __init__(self,time_series): |
610 |
|
|
TimeSeriesFilter.__init__(self,0) |
611 |
|
|
time_series.checkInUpdate(self) |
612 |
|
|
|
613 |
|
|
def __str__(self): |
614 |
jgs |
117 |
return "timeseries.Viewer" |
615 |
jgs |
110 |
|
616 |
jgs |
117 |
def update(self,times,data): |
617 |
|
|
for i in range(times.shape[0]): print "[%s: %s]"%(times[i],data[i]) |
618 |
jgs |
110 |
|
619 |
|
|
class TimeSeriesWriter(TimeSeriesFilter): |
620 |
|
|
def __init__(self,time_series,ostream,seperator=","): |
621 |
|
|
TimeSeriesFilter.__init__(self,0) |
622 |
|
|
time_series.checkInUpdate(self) |
623 |
|
|
self.setDebug(time_series.debug()) |
624 |
|
|
self.__ostream=ostream |
625 |
|
|
self.__seperator=seperator |
626 |
|
|
|
627 |
|
|
def __str__(self): |
628 |
jgs |
117 |
return "timeseries.Writer" |
629 |
jgs |
110 |
|
630 |
jgs |
117 |
def update(self,times,data): |
631 |
|
|
for i in range(times.shape[0]): self.__ostream.writelines("%s,%s\n"%(times[i],data[i])) |
632 |
jgs |
110 |
|
633 |
|
|
# test |
634 |
|
|
|
635 |
|
|
if __name__=="__main__": |
636 |
jgs |
117 |
# tests the interfaces to data sets: |
637 |
|
|
print "Test of Datasets:" |
638 |
|
|
print "=================" |
639 |
|
|
bf=TimeSeriesBuffer(buffer_size=5,numComponents=1,debug=True,description="TestBuffer") |
640 |
|
|
bfv_l=TimeSeriesDataset(bf,offset=1,debug=True,description="offset 1") |
641 |
|
|
bfv_r=TimeSeriesDataset(bf,offset=-1,debug=True,description="offset -1") |
642 |
|
|
bf.append([1.,2.,3.,4.]) |
643 |
|
|
print "should be all 2. :",bfv_l[0] |
644 |
|
|
print bf[1] |
645 |
|
|
print bfv_r[2] |
646 |
|
|
bf.append([5.,6.,7.]) |
647 |
|
|
print "should be all 5. :",bfv_l[3],bf[4],bfv_r[5] |
648 |
|
|
print "should be all 6. :",bfv_l[4],bf[5],bfv_r[6] |
649 |
|
|
print "should be all 7. :",bfv_l[5],bf[6],bfv_r[7] |
650 |
|
|
print "should be all [6., 7.] :",bfv_l[4:6],bf[5:7],bfv_r[6:8] |
651 |
jgs |
110 |
|
652 |
jgs |
117 |
print "Test of Controler" |
653 |
|
|
print "=================" |
654 |
|
|
b=Controler(buffer_size=15,debug=True) |
655 |
|
|
s3=b>>3 |
656 |
|
|
s1=b>>1 |
657 |
|
|
s_3=b<<3 |
658 |
|
|
sum=(s_3+b)+(b+s3) |
659 |
|
|
|
660 |
|
|
for i in range(30): |
661 |
|
|
b.newTimeNode(i*1.) |
662 |
|
|
b.flush() |
663 |
|
|
print "should be all 28. :",s_3.getDataset()[25],b.getDataset()[28],s3.getDataset()[31] |
664 |
|
|
print "should be all 29. :",s_3.getDataset()[26],b.getDataset()[29],s3.getDataset()[32] |
665 |
|
|
print "should be all 96. :",sum.getDataset()[24] |
666 |
|
|
|
667 |
|
|
print "Test of operators" |
668 |
|
|
print "=================" |
669 |
|
|
b=Controler(buffer_size=15,debug=True) |
670 |
|
|
b1=b<<1 |
671 |
|
|
a=b+b1 |
672 |
|
|
a_s=b1+1. |
673 |
|
|
s_a=1.+b1 |
674 |
|
|
d=b-b1 |
675 |
|
|
d_s=b1-1. |
676 |
|
|
s_d=1.-b1 |
677 |
|
|
m=b*b1 |
678 |
|
|
m_s=b1*2. |
679 |
|
|
s_m=2.*b1 |
680 |
|
|
dv=b/b1 |
681 |
|
|
dv_s=b1/2. |
682 |
|
|
s_dv=2./b1 |
683 |
|
|
p=b**b1 |
684 |
|
|
p_s=b1**2. |
685 |
|
|
s_p=2.**b1 |
686 |
|
|
pb=+b |
687 |
|
|
mb=-b |
688 |
|
|
for i in range(30): |
689 |
|
|
b.newTimeNode(i*1.) |
690 |
|
|
b.flush() |
691 |
|
|
print "a[28] should be %e: %e"%(28.+29.,a[28]) |
692 |
|
|
print "a_s[28] should be %e: %e"%(29.+1.,a_s[28]) |
693 |
|
|
print "s_a[28] should be %e: %e"%(29.+1.,s_a[28]) |
694 |
|
|
print "d[28] should be %e: %e"%(28.-29.,d[28]) |
695 |
|
|
print "d_s[28] should %e: %e"%(29.-1.,d_s[28]) |
696 |
|
|
print "s_d[28] should %e: %e"%(1.-29.,s_d[28]) |
697 |
|
|
print "m[28] should be %e: %e"%(28.*29.,m[28]) |
698 |
|
|
print "m_s[28] should be %e: %e"%(29.*2.,m_s[28]) |
699 |
|
|
print "s_m[28] should be %e: %e"%(29.*2.,s_m[28]) |
700 |
|
|
print "dv[28] should be %e: %e"%(28./29.,dv[28]) |
701 |
|
|
print "dv_s[28] should be %e: %e"%(29./2.,dv_s[28]) |
702 |
|
|
print "s_dv[28] should be %e: %e"%(2./29.,s_dv[28]) |
703 |
|
|
print "p[28] should be %e: %e"%(28.**29.,p[28]) |
704 |
|
|
print "p_s[28] should be %e: %e"%(29.**2,p_s[28]) |
705 |
|
|
print "s_p[28] should be %e: %e"%(2.**29.,s_p[28]) |
706 |
|
|
print "pb[28] should be %e: %e"%(28.,pb[28]) |
707 |
|
|
print "mb[28] should be %e: %e"%(-28.,mb[28]) |
708 |
|
|
|
709 |
|
|
1/0 |
710 |
|
|
c=TimeSeriesCollector(b) |
711 |
jgs |
110 |
c.setDebugOn() |
712 |
|
|
ii=TimeSeriesIntegrator(c) |
713 |
|
|
d=TimeSeriesDifferential(c) |
714 |
|
|
v=TimeSeriesViewer(ii) |
715 |
|
|
w=TimeSeriesWriter(d,file("test.csv","w")) |
716 |
|
|
|
717 |
|
|
for i in range(15): |
718 |
jgs |
117 |
b.newTime(i*1.) |
719 |
|
|
c.add(i+1.) |
720 |
|
|
|