1 |
ksteube |
1809 |
|
2 |
|
|
######################################################## |
3 |
ksteube |
1312 |
# |
4 |
ksteube |
1809 |
# Copyright (c) 2003-2008 by University of Queensland |
5 |
|
|
# Earth Systems Science Computational Center (ESSCC) |
6 |
|
|
# http://www.uq.edu.au/esscc |
7 |
ksteube |
1312 |
# |
8 |
ksteube |
1809 |
# Primary Business: Queensland, Australia |
9 |
|
|
# Licensed under the Open Software License version 3.0 |
10 |
|
|
# http://www.opensource.org/licenses/osl-3.0.php |
11 |
ksteube |
1312 |
# |
12 |
ksteube |
1809 |
######################################################## |
13 |
jgs |
110 |
|
14 |
ksteube |
1809 |
__copyright__="""Copyright (c) 2003-2008 by University of Queensland |
15 |
|
|
Earth Systems Science Computational Center (ESSCC) |
16 |
|
|
http://www.uq.edu.au/esscc |
17 |
|
|
Primary Business: Queensland, Australia""" |
18 |
|
|
__license__="""Licensed under the Open Software License version 3.0 |
19 |
|
|
http://www.opensource.org/licenses/osl-3.0.php""" |
20 |
|
|
__url__="http://www.uq.edu.au/esscc/escript-finley" |
21 |
|
|
|
22 |
gross |
637 |
""" |
23 |
|
|
Time serieas analysis |
24 |
|
|
|
25 |
|
|
@var __author__: name of author |
26 |
|
|
@var __copyright__: copyrights |
27 |
|
|
@var __license__: licence agreement |
28 |
|
|
@var __url__: url entry point on documentation |
29 |
|
|
@var __version__: version |
30 |
|
|
@var __date__: date of the version |
31 |
|
|
""" |
32 |
|
|
|
33 |
|
|
|
34 |
|
|
__author__="Lutz Gross, l.gross@uq.edu.au" |
35 |
elspeth |
609 |
|
36 |
gross |
637 |
|
37 |
jgs |
110 |
import numarray |
38 |
jgs |
117 |
from types import SliceType |
39 |
jgs |
119 |
DEFAULT_BUFFER_SIZE=1000 |
40 |
jgs |
117 |
DEFAULT_FLOAT_TYPE=numarray.Float64 |
41 |
jgs |
110 |
|
42 |
|
|
class TimeSeriesBase: |
43 |
jgs |
117 |
"""The TimeSeriesBase class is the base class for all class of the TimeSeries module.""" |
44 |
jgs |
110 |
|
45 |
jgs |
119 |
def __init__(self,debug=False,description="TimeSeriesBase"): |
46 |
jgs |
117 |
self.__debug=debug |
47 |
jgs |
119 |
self.setDescription(description) |
48 |
jgs |
110 |
|
49 |
|
|
def __str__(self): |
50 |
jgs |
117 |
return self.__description |
51 |
jgs |
119 |
|
52 |
|
|
def setDescription(self,text): |
53 |
|
|
self.__description=text |
54 |
jgs |
110 |
|
55 |
|
|
def setDebugOn(self): |
56 |
|
|
"""switch on degugging mode""" |
57 |
|
|
self.__debug=True |
58 |
|
|
|
59 |
|
|
def setDebugOff(self): |
60 |
|
|
"""switch off degugging mode""" |
61 |
|
|
self.__debug=False |
62 |
|
|
|
63 |
|
|
def setDebug(self,flag=False): |
64 |
|
|
"""sets debug mode to flag""" |
65 |
|
|
if flag: |
66 |
|
|
self.setDebugOn() |
67 |
|
|
else: |
68 |
|
|
self.setDebugOff() |
69 |
|
|
|
70 |
|
|
def debug(self): |
71 |
|
|
"""returns true if debug mode is on""" |
72 |
|
|
return self.__debug |
73 |
|
|
|
74 |
jgs |
117 |
#============================================================================================================ |
75 |
jgs |
119 |
class TimeSeriesBaseDataset(TimeSeriesBase): |
76 |
jgs |
117 |
"""provides an interface for accessing a set of linearly ordered data.""" |
77 |
jgs |
119 |
def __init__(self,buffer,offset=0,debug=False,description="TimeSeriesDataset"): |
78 |
jgs |
117 |
TimeSeriesBase.__init__(self,debug,description) |
79 |
|
|
self.__buffer=buffer |
80 |
|
|
self.__offset=offset |
81 |
|
|
if self.debug(): print "Debug: %s: offset %d to buffer"%(self,self.getOffset()) |
82 |
jgs |
110 |
|
83 |
jgs |
117 |
def __len__(self): |
84 |
|
|
"""needed to handle negative indexing in slicing""" |
85 |
|
|
return 0 |
86 |
jgs |
110 |
|
87 |
jgs |
117 |
def getNumComponents(self): |
88 |
|
|
"""returns the number of components of the data (may be overwritten by subclass)""" |
89 |
jgs |
119 |
return self.getBaseBuffer().getNumComponents() |
90 |
jgs |
110 |
|
91 |
jgs |
117 |
def getIdOfLastDatum(self): |
92 |
|
|
"""returns the identification number of the last datum in the data set (may be overwritten by subclass)""" |
93 |
jgs |
119 |
return self.getBaseBuffer().getIdOfLastDatum()-self.getOffset() |
94 |
jgs |
110 |
|
95 |
jgs |
117 |
def getIdOfFirstDatum(self): |
96 |
|
|
"""returns the identification number of the first datum (may be overwritten by subclass)""" |
97 |
jgs |
119 |
return self.getBaseBuffer().getIdOfFirstDatum()-self.getOffset() |
98 |
jgs |
110 |
|
99 |
jgs |
119 |
def getIdOfFirstAvailableDatum(self): |
100 |
|
|
"""returns the identification number of the first avaiable datum (may be overwritten by subclass)""" |
101 |
|
|
return self.getBaseBuffer().getIdOfFirstAvailableDatum()-self.getOffset() |
102 |
|
|
|
103 |
|
|
def getOffsetInBaseBuffer(self): |
104 |
|
|
"""returns the offset to access elements in getBaseBuffer() (may be overwritten by subclass)""" |
105 |
jgs |
117 |
return self.getOffset() |
106 |
|
|
|
107 |
jgs |
119 |
def getIdOfLastUnreferencedDatum(self): |
108 |
|
|
"""returns the identification number of the last datum which has been unused by all TimeSeries refering to the TimeSeriesBaseDataset (may be overwritten by subclass)""" |
109 |
|
|
return self.getBaseBuffer().getIdOfLastUnreferencedDatum()-self.getOffset() |
110 |
jgs |
117 |
|
111 |
jgs |
119 |
def updateIdOfLastUnreferencedDatum(self,last_unreferenced_datum): |
112 |
jgs |
117 |
"""updates the identification number of the last unused datum (to be overwritten by subclass)""" |
113 |
jgs |
119 |
self.getBaseBuffer().updateIdOfLastUnreferencedDatum(last_unreferenced_datum+self.getOffset()) |
114 |
jgs |
117 |
|
115 |
|
|
def append(self,values): |
116 |
|
|
"""appends data to the buffer. If the buffer would be full the buffer is rearranged before the data are appended (to be overwritten by subclass)""" |
117 |
jgs |
119 |
self.getBaseBuffer().append(values) |
118 |
jgs |
117 |
|
119 |
jgs |
119 |
def getBaseBufferSize(self): |
120 |
jgs |
117 |
"""returns the size of the buffer (to be overwritten by subclass)""" |
121 |
jgs |
119 |
return self.getBaseBuffer().getBaseBufferSize() |
122 |
jgs |
117 |
|
123 |
|
|
def needsRearrangement(self,num_new_data=0): |
124 |
|
|
"""returns True if the buffer will be full after num_new_data have been appended (to be overwritten by subclass)""" |
125 |
jgs |
119 |
return self.getBaseBuffer().needsRearrangement(num_new_data) |
126 |
jgs |
117 |
|
127 |
|
|
def isEmpty(self): |
128 |
|
|
"""returns true if no data are appeneded to buffer""" |
129 |
|
|
return self.getNumData()<=0 |
130 |
|
|
|
131 |
|
|
def getNumData(self): |
132 |
|
|
"""returns the number of data (not all of them are accessible)""" |
133 |
|
|
return self.getIdOfLastDatum()-self.getIdOfFirstDatum()+1 |
134 |
|
|
|
135 |
jgs |
119 |
def getBaseBuffer(self): |
136 |
|
|
"""return the buffer referenced by the TimeSeriesBaseDataset""" |
137 |
jgs |
117 |
return self.__buffer |
138 |
|
|
|
139 |
|
|
def getOffset(self): |
140 |
|
|
"""return the offset when referring to dataset elements""" |
141 |
|
|
return self.__offset |
142 |
|
|
|
143 |
|
|
def __getitem__(self,index): |
144 |
|
|
"""returns the datum index""" |
145 |
|
|
if type(index)==SliceType: |
146 |
|
|
start=index.start |
147 |
|
|
end=index.stop |
148 |
|
|
if start==end: |
149 |
|
|
return self[start] |
150 |
|
|
else: |
151 |
|
|
if start<self.getIdOfFirstDatum() or start>self.getIdOfLastDatum() or \ |
152 |
|
|
end-1<self.getIdOfFirstDatum() or end-1>self.getIdOfLastDatum(): raise IndexError,"%s: Index [%d:%d] out of range"%(self,start,end) |
153 |
jgs |
119 |
return self.getBaseBuffer()[start+self.getOffsetInBaseBuffer():end+self.getOffsetInBaseBuffer()] |
154 |
jgs |
117 |
else: |
155 |
|
|
if index<self.getIdOfFirstDatum() or index>self.getIdOfLastDatum(): raise IndexError,"%s: Index %d out of range"%(self,index) |
156 |
jgs |
119 |
return self.getBaseBuffer()[index+self.getOffsetInBaseBuffer()] |
157 |
jgs |
117 |
|
158 |
jgs |
119 |
class TimeSeriesBaseBuffer(TimeSeriesBaseDataset): |
159 |
|
|
"""An inplementation of TimeSeriesBaseDataset which actually is storing data into a numarray buffer""" |
160 |
|
|
def __init__(self,buffer_size=DEFAULT_BUFFER_SIZE,numComponents=1,type=DEFAULT_FLOAT_TYPE,id_of_first_datum=0,debug=False,description="TimeSeriesBaseBuffer"): |
161 |
jgs |
110 |
if numComponents<2: |
162 |
jgs |
117 |
buffer=numarray.zeros((buffer_size,),type) |
163 |
jgs |
110 |
else: |
164 |
jgs |
117 |
buffer=numarray.zeros((buffer_size,numComponents),type) |
165 |
jgs |
119 |
TimeSeriesBaseDataset.__init__(self,buffer,id_of_first_datum-1,debug,description) |
166 |
jgs |
117 |
self.__num_data_in_buffer=0 |
167 |
jgs |
119 |
self.__id_last_unreferenced_datum=id_of_first_datum-1 |
168 |
jgs |
117 |
self.__id_last_datum=id_of_first_datum-1 |
169 |
|
|
self.__id_first_datum=id_of_first_datum |
170 |
|
|
if self.debug(): print "Debug: %s : buffer of size %d with %d components allocated (first datum is %d)."% \ |
171 |
jgs |
119 |
(self,self.getBaseBufferSize(),self.getNumComponents(),id_of_first_datum) |
172 |
jgs |
110 |
|
173 |
|
|
|
174 |
jgs |
119 |
def getBaseBufferSize(self): |
175 |
jgs |
117 |
"""returns the size of the buffer""" |
176 |
jgs |
119 |
return self.getBaseBuffer().shape[0] |
177 |
jgs |
117 |
|
178 |
jgs |
110 |
def getNumComponents(self): |
179 |
jgs |
119 |
"""returns the number of components of the data (overwrites TimeSeriesBaseDataset method)""" |
180 |
|
|
if self.getBaseBuffer().rank==1: |
181 |
jgs |
110 |
return 1 |
182 |
|
|
else: |
183 |
jgs |
119 |
self.getBaseBuffer().shape[1] |
184 |
jgs |
110 |
|
185 |
jgs |
119 |
def getNumDataInBaseBuffer(self): |
186 |
jgs |
117 |
"""returns the number of data currently in the buffer""" |
187 |
|
|
return self.__num_data_in_buffer |
188 |
jgs |
110 |
|
189 |
jgs |
117 |
def getIdOfLastDatum(self): |
190 |
jgs |
119 |
"""returns the identification number of the last datum in the data set (overwrites method from TimeSeriesBaseDataset)""" |
191 |
jgs |
117 |
return self.__id_last_datum |
192 |
jgs |
110 |
|
193 |
jgs |
117 |
def getIdOfFirstDatum(self): |
194 |
jgs |
119 |
"""returns the identification number of the first datum (overwrites method from TimeSeriesBaseDataset)""" |
195 |
jgs |
117 |
return self.__id_first_datum |
196 |
jgs |
110 |
|
197 |
jgs |
119 |
def getOffsetInBaseBuffer(self): |
198 |
|
|
"""returns the offset to access elements in the buffer (overwrites method from TimeSeriesBaseDataset)""" |
199 |
|
|
return -self.getIdOfLastDatum()+self.getNumDataInBaseBuffer()-1 |
200 |
jgs |
110 |
|
201 |
jgs |
119 |
def getIdOfLastUnreferencedDatum(self): |
202 |
|
|
"""returns the identification number of the last datum which has been unused by all TimeSeries refering to the TimeSeriesBaseDataset (overwrites method from TimeSeriesBaseDataset)""" |
203 |
|
|
return self.__id_last_unreferenced_datum |
204 |
jgs |
110 |
|
205 |
jgs |
119 |
def updateIdOfLastUnreferencedDatum(self,last_unreferenced_datum): |
206 |
jgs |
117 |
"""updates the identification number of the last unused datum (to be overwritten by subclass)""" |
207 |
jgs |
119 |
self.getBaseBuffer().updateIdOfLastUnreferencedDatum(last_unreferenced_datum-self.getOffset()) |
208 |
jgs |
110 |
|
209 |
jgs |
119 |
def updateIdOfLastUnreferencedDatum(self,last_unreferenced_datum): |
210 |
|
|
"""updates the identification number of the last unused datum (overwrites TimeSeriesBaseDataset method)""" |
211 |
|
|
if self.__id_last_unreferenced_datum>last_unreferenced_datum: |
212 |
|
|
self.__id_last_unreferenced_datum=last_unreferenced_datum |
213 |
|
|
if self.debug(): print "Debug: %s: last unused datum is now %s"%(self,last_unreferenced_datum) |
214 |
jgs |
110 |
|
215 |
jgs |
117 |
def needsRearrangement(self,num_new_data=0): |
216 |
|
|
"""returns True if the buffer will be full after num_new_data have been appended""" |
217 |
jgs |
119 |
return self.getNumDataInBaseBuffer()+num_new_data>self.getBaseBufferSize() |
218 |
jgs |
117 |
|
219 |
jgs |
119 |
def getIdOfFirstAvailableDatum(self): |
220 |
|
|
"""returns the identification number of the first avaiable datum (overwrites TimeSeriesBaseDataset method)""" |
221 |
|
|
return self.getIdOfLastDatum()-self.__num_data_in_buffer+1 |
222 |
|
|
|
223 |
jgs |
117 |
def append(self,data): |
224 |
jgs |
119 |
"""appends data to the buffer. If the buffer would be full the buffer is rearranged before the data are appended (overwrites TimeSeriesBaseDataset method)""" |
225 |
jgs |
117 |
data=numarray.array(data) |
226 |
|
|
nc=self.getNumComponents() |
227 |
|
|
if data.rank==0: |
228 |
|
|
if nc==1: |
229 |
|
|
num_new_data=1 |
230 |
|
|
else: |
231 |
|
|
raise ValueError,"%s: illegal data shape"%self |
232 |
|
|
elif data.rank==1: |
233 |
|
|
if nc==1: |
234 |
|
|
num_new_data=data.shape[0] |
235 |
|
|
else: |
236 |
|
|
num_new_data=1 |
237 |
|
|
elif data.rank==2: |
238 |
|
|
if not nc==data.shape[1]: raise ValueError,"%s: illegal data shape"%self |
239 |
|
|
num_new_data=data.shape[0] |
240 |
|
|
else: |
241 |
|
|
raise ValueError,"%s: illegal rank"%self |
242 |
jgs |
110 |
|
243 |
jgs |
117 |
# check is buffer will be overflown when data are appended: |
244 |
|
|
if self.needsRearrangement(num_new_data): |
245 |
jgs |
119 |
nn=self.getNumDataInBaseBuffer() |
246 |
|
|
num_protected_data=self.getIdOfLastDatum()-self.getIdOfLastUnreferencedDatum() |
247 |
|
|
if num_protected_data+num_new_data>self.getBaseBufferSize(): |
248 |
jgs |
117 |
raise ValueError,"%s: buffer overflow: buffer size has to be bigger than %d"%(self,num_protected_data+num_new_data) |
249 |
jgs |
119 |
if num_protected_data>0: self.getBaseBuffer()[0:num_protected_data]=self.getBaseBuffer()[nn-num_protected_data:nn] |
250 |
jgs |
117 |
self.__num_data_in_buffer=num_protected_data |
251 |
jgs |
119 |
self.__id_last_unreferenced_datum=self.__id_last_datum |
252 |
jgs |
117 |
if self.debug(): |
253 |
jgs |
119 |
print "Debug: %s: rearrangement: first data in buffer is %d."%(self,self.getIdOfLastDatum()-self.getNumDataInBaseBuffer()+1) |
254 |
jgs |
117 |
# copy data over: |
255 |
jgs |
119 |
nn=self.getNumDataInBaseBuffer() |
256 |
|
|
self.getBaseBuffer()[nn:nn+num_new_data]=data |
257 |
jgs |
117 |
self.__num_data_in_buffer+=num_new_data |
258 |
|
|
self.__id_last_datum+=num_new_data |
259 |
jgs |
119 |
self.__id_last_unreferenced_datum+=num_new_data |
260 |
|
|
if self.debug(): print "Debug: %s: %d data appended. Last unreferenced datum is now %d."%(self,num_new_data,self.__id_last_unreferenced_datum) |
261 |
jgs |
110 |
|
262 |
jgs |
117 |
# ====================================== |
263 |
jgs |
119 |
class TimeSeriesControlerView(TimeSeriesBase): |
264 |
|
|
"""A TimeSeriesControlerView is attached to a Controler and moves forward in time by increasing the id of the last processed datum. |
265 |
|
|
Any implementation of a TimeSeriesControlerView must provide the getControler method which returns the controler""" |
266 |
|
|
def __init__(self,id_first_datum=0,debug=False,description="TimeSeries"): |
267 |
|
|
TimeSeriesBase.__init__(self,debug,description) |
268 |
|
|
self.__id_last_processed_datum=id_first_datum-1 |
269 |
|
|
if self.debug(): print "Debug: %s created with first datum %d"%(str(self),id_first_datum) |
270 |
jgs |
110 |
|
271 |
jgs |
117 |
def getIdOfLastProcessedDatum(self): |
272 |
|
|
return self.__id_last_processed_datum |
273 |
jgs |
110 |
|
274 |
jgs |
117 |
def updateIdOfLastProcessedDatum(self,id_last_processed_datum): |
275 |
|
|
self.__id_last_processed_datum=id_last_processed_datum |
276 |
jgs |
110 |
|
277 |
jgs |
119 |
# def getControler(self): |
278 |
|
|
# """returns the Controler of the time series (to be overwritten by subclass)""" |
279 |
|
|
# pass |
280 |
|
|
|
281 |
|
|
class TimeSeries(TimeSeriesBaseDataset,TimeSeriesControlerView): |
282 |
|
|
"""makes TimeSeriesBaseDataset look like a TimeSeries and introduces operations |
283 |
|
|
Any implementation of a TimeSeriesControlerView must provide the getControler method which returns the controler""" |
284 |
|
|
def __init__(self,dataset,debug=False,description="TimeSeries"): |
285 |
|
|
TimeSeriesControlerView.__init__(self,dataset.getIdOfFirstDatum(),debug,description) |
286 |
|
|
TimeSeriesBaseDataset.__init__(self,dataset,0,debug,description) |
287 |
|
|
|
288 |
|
|
def getDataset(self): |
289 |
|
|
"""returns the TimeSeriesBaseDataset of the time series""" |
290 |
|
|
return self.getBaseBuffer() |
291 |
|
|
|
292 |
|
|
# def getControler(self): |
293 |
|
|
# """returns the Controler of the time series (to be overwritten by subclass)""" |
294 |
|
|
# pass |
295 |
|
|
|
296 |
jgs |
117 |
def __add__(self,arg): |
297 |
jgs |
119 |
if isinstance(arg,TimeSeriesBaseDataset): |
298 |
|
|
return TimeSeriesAdd(self,arg) |
299 |
jgs |
117 |
else: |
300 |
|
|
return TimeSeriesAddScalar(self,arg) |
301 |
jgs |
110 |
|
302 |
jgs |
117 |
def __sub__(self,arg): |
303 |
|
|
return self+(-1.)*arg |
304 |
|
|
|
305 |
|
|
def __mul__(self,arg): |
306 |
jgs |
119 |
if isinstance(arg,TimeSeriesBaseDataset): |
307 |
jgs |
117 |
return TimeSeriesMult(self,arg) |
308 |
|
|
else: |
309 |
|
|
return TimeSeriesMultScalar(self,arg) |
310 |
|
|
|
311 |
|
|
def __div__(self,arg): |
312 |
jgs |
119 |
if isinstance(arg,TimeSeriesBaseDataset): |
313 |
jgs |
117 |
return TimeSeriesDiv(self,arg) |
314 |
|
|
else: |
315 |
|
|
return TimeSeriesMultScalar(self,1./arg) |
316 |
|
|
|
317 |
|
|
def __pow__(self,arg): |
318 |
jgs |
119 |
if isinstance(arg,TimeSeriesBaseDataset): |
319 |
jgs |
117 |
return TimeSeriesPower(self,arg) |
320 |
|
|
else: |
321 |
|
|
return TimeSeriesPowerScalar(self,arg) |
322 |
jgs |
110 |
|
323 |
jgs |
117 |
def __radd__(self,arg): |
324 |
|
|
return self.__add__(arg) |
325 |
|
|
|
326 |
|
|
def __rsub__(self,arg): |
327 |
|
|
return arg+(-1.)*self |
328 |
|
|
|
329 |
|
|
def __rmul__(self,arg): |
330 |
|
|
return self.__mul__(arg) |
331 |
|
|
|
332 |
|
|
def __rdiv__(self,arg): |
333 |
jgs |
119 |
if isinstance(arg,TimeSeriesBaseDataset): |
334 |
jgs |
117 |
return TimeSeriesDiv(arg,self) |
335 |
|
|
else: |
336 |
|
|
return TimeSeriesDivScalar(self,arg) |
337 |
|
|
|
338 |
|
|
def __rpow__(self,arg): |
339 |
jgs |
119 |
if isinstance(arg,TimeSeriesBaseDataset): |
340 |
jgs |
117 |
return TimeSeriesPower(arg,self) |
341 |
|
|
else: |
342 |
|
|
return Exp(numarray.log(arg)*self) |
343 |
|
|
|
344 |
|
|
def __lshift__(self,arg): |
345 |
|
|
return TimeSeriesShift(self,-arg) |
346 |
|
|
|
347 |
|
|
def __rshift__(self,arg): |
348 |
|
|
return TimeSeriesShift(self,arg) |
349 |
|
|
|
350 |
|
|
def __neg__(self): |
351 |
|
|
return (-1.0)*self |
352 |
|
|
|
353 |
|
|
def __pos__(self): |
354 |
|
|
return (1.0)*self |
355 |
|
|
|
356 |
jgs |
119 |
class TimeSeriesOperator(TimeSeriesControlerView): |
357 |
|
|
"""a TimeSeriesOperator decribes an operation acting on list of TimeSeries time_series_args. It allows to update its output (if there is any) |
358 |
|
|
through the update method which is overwritten by a particular implementation of the class. The update method is called to process the data [start:end] using |
359 |
|
|
[start-left_wing_size:end+right_wing_size] of its arguments""" |
360 |
|
|
def __init__(self,controler,time_series_args=[],left_wing_size=0,right_wing_size=0,debug=False,description="TimeSeriesOperator"): |
361 |
|
|
id_first_datum=controler.getIdOfFirstDatum() |
362 |
|
|
for i in time_series_args: id_first_datum=max(id_first_datum,i.getIdOfFirstDatum()) |
363 |
|
|
TimeSeriesControlerView.__init__(self,id_first_datum+left_wing_size,debug,description) |
364 |
jgs |
117 |
self.__left_wing_size=left_wing_size |
365 |
|
|
self.__right_wing_size=right_wing_size |
366 |
jgs |
119 |
self.__time_series_args=time_series_args |
367 |
jgs |
117 |
self.__controler=controler |
368 |
jgs |
119 |
controler.appendOperatorToUpdateList(self) |
369 |
|
|
if self.debug(): print "Debug: %s: with left/right wing size %d/%d and %d arguments."%(str(self),left_wing_size,right_wing_size,len(time_series_args)) |
370 |
jgs |
117 |
|
371 |
jgs |
119 |
def __del__(self): |
372 |
|
|
self.getControler().removeOperatorFromUpdateList(self) |
373 |
|
|
|
374 |
jgs |
117 |
def getControler(self): |
375 |
jgs |
119 |
"""returns the Controler updating the TimeSeriesOperator""" |
376 |
jgs |
117 |
return self.__controler |
377 |
|
|
|
378 |
|
|
def getLeftWingSize(self): |
379 |
|
|
"""returns the left wing size""" |
380 |
|
|
return self.__left_wing_size |
381 |
|
|
|
382 |
|
|
def getRightWingSize(self): |
383 |
|
|
"""returns the right wing size""" |
384 |
|
|
return self.__right_wing_size |
385 |
|
|
|
386 |
|
|
def getArguments(self,index=None): |
387 |
|
|
"""returns the list of arguments or, index is present, the argument with index index. In the latter case None is returned if no arguments are present""" |
388 |
|
|
if index==None: |
389 |
jgs |
119 |
return self.__time_series_args |
390 |
jgs |
117 |
else: |
391 |
jgs |
119 |
if len(self.__time_series_args)>0: |
392 |
|
|
return self.__time_series_args[index] |
393 |
jgs |
117 |
else: |
394 |
|
|
return None |
395 |
|
|
|
396 |
|
|
def getArgumentDataset(self,index): |
397 |
|
|
"""returns the dataset of in the argument with index index""" |
398 |
|
|
arg=self.getArguments(index) |
399 |
|
|
if arg==None: |
400 |
|
|
return None |
401 |
|
|
else: |
402 |
|
|
return self.getArguments(index).getDataset() |
403 |
|
|
|
404 |
|
|
def flush(self): |
405 |
|
|
"""calls the update method with all the maximum processable range. It also updates the id of unused datum for all arguments""" |
406 |
|
|
start=self.getIdOfLastProcessedDatum()+1 |
407 |
jgs |
119 |
end=self.getControler().getIdOfLastDatum() |
408 |
|
|
for i in self.getArguments(): end=min(end,i.getIdOfLastDatum()) |
409 |
|
|
if start<=end-self.getRightWingSize(): |
410 |
|
|
if self.debug(): print "Debug: %s: range [%d:%d] is updated."%(self,start,end-self.getRightWingSize()) |
411 |
|
|
self.update(start,end-self.getRightWingSize()+1) |
412 |
|
|
for i in self.getArguments(): i.updateIdOfLastUnreferencedDatum(end-self.getLeftWingSize()) |
413 |
|
|
self.updateIdOfLastProcessedDatum(end) |
414 |
jgs |
117 |
|
415 |
jgs |
119 |
def update(self,start,end): |
416 |
|
|
"""updates the the data [start:end] using [start-left_wing_size:end+right_wing_size] of its arguments (is overwritten by a particular TimeSeriesOperator)""" |
417 |
|
|
pass |
418 |
|
|
|
419 |
|
|
|
420 |
|
|
class TimeSeriesFilter(TimeSeries,TimeSeriesOperator): |
421 |
|
|
"""a TimeSeriesFilter is a TimeSeries taht is created trough a TimeSeriesOperator""" |
422 |
|
|
def __init__(self,controler,dataset,time_series_args=[],left_wing_size=0,right_wing_size=0,debug=False,description="TimeSeriesFilter"): |
423 |
|
|
TimeSeriesOperator.__init__(self,controler,time_series_args,left_wing_size,right_wing_size,debug,description) |
424 |
|
|
TimeSeries.__init__(self,dataset,debug,description) |
425 |
|
|
|
426 |
|
|
def update(self,start,end): |
427 |
|
|
"""appends zeros to the dataset. This method should be overwritten by a particular TimeSeriesFilter""" |
428 |
|
|
nc=self.getNumComponents() |
429 |
|
|
if nc>1: |
430 |
|
|
self.getDataset().append(numarray.zeros([nc,end-start])) |
431 |
|
|
else: |
432 |
|
|
self.getDataset().append(numarray.zeros(end-start)) |
433 |
|
|
|
434 |
jgs |
117 |
class Controler(TimeSeries): |
435 |
|
|
"""controls a set of TimeSeries""" |
436 |
jgs |
119 |
def __init__(self,buffer_size=DEFAULT_BUFFER_SIZE,debug=False,description="TimeSeriesControler"): |
437 |
|
|
TimeSeries.__init__(self,TimeSeriesBaseBuffer(buffer_size,1,DEFAULT_FLOAT_TYPE,0,debug,"node buffer of "+description),debug,"nodes of "+description) |
438 |
jgs |
117 |
self.setFlushRate() |
439 |
|
|
self.__update_time_series=list() |
440 |
|
|
|
441 |
|
|
def getControler(self): |
442 |
|
|
"""returns the Controler of the time series (overwrites method of by TimeSeries)""" |
443 |
|
|
return self |
444 |
|
|
|
445 |
|
|
def setFlushRate(self,rate=50): |
446 |
|
|
"""set the flush rate, i.e. after rate new time nodes have been checked in the flush method is called.""" |
447 |
|
|
self.__flush_rate=rate |
448 |
|
|
if self.debug(): print "Debug: %s: flush rate is set to %d"%(self,rate) |
449 |
jgs |
110 |
|
450 |
jgs |
117 |
def needsFlushing(self): |
451 |
|
|
"""returns true if the depending TimeSeriesFilters needs to be flushed becuase the time nodes buffer is full or because of the set flush rate""" |
452 |
|
|
return self.needsRearrangement(1) or (self.getNumData()+1)%self.__flush_rate==0 |
453 |
jgs |
110 |
|
454 |
|
|
def flush(self): |
455 |
jgs |
117 |
"""flushes all dependend TimeSeriesFilters by processing their flush method""" |
456 |
|
|
if self.debug(): print "Debug: %s: start flushing"%self |
457 |
|
|
for time_serie in self.__update_time_series: time_serie.flush() |
458 |
jgs |
110 |
|
459 |
jgs |
119 |
def appendOperatorToUpdateList(self,time_serie): |
460 |
|
|
if not time_serie.getControler()==self: raise ValueError,"%s: TimeSeries %s is not defined on this controler."%(self,time_serie) |
461 |
jgs |
117 |
if not self.isEmpty(): raise ValueError,"%s: you can only check in a time series time_serie is controler is empty."%self |
462 |
|
|
self.__update_time_series.append(time_serie) |
463 |
|
|
if self.debug(): print "Debug: %s: %s has been added to update list."%(self,time_serie) |
464 |
jgs |
110 |
|
465 |
jgs |
119 |
def removeOperatorFromUpdateList(self,time_serie): |
466 |
|
|
self.__update_time_series.remove(time_serie) |
467 |
|
|
if self.debug(): print "Debug: %s: %s has been removed from update list."%(self,time_serie) |
468 |
|
|
|
469 |
|
|
def nextTime(self,value): |
470 |
jgs |
117 |
if self.needsFlushing(): self.flush() |
471 |
|
|
self.getDataset().append(value) |
472 |
|
|
if self.debug(): print "Debug: %s: new time node %e has been added."%(self,value) |
473 |
jgs |
110 |
|
474 |
jgs |
117 |
class TimeSeriesShift(TimeSeries): |
475 |
|
|
"""creates a shift of the time series, i.e. if d[n] is the datum at time t[n], the value at t[n] becomes v[n+shift] on the output""" |
476 |
|
|
def __init__(self,time_serie,shift=1): |
477 |
|
|
if shift<0: |
478 |
|
|
dsc="(%s)<<%d"%(time_serie,-shift) |
479 |
|
|
else: |
480 |
|
|
dsc="(%s)>>%d"%(time_serie,shift) |
481 |
|
|
self.__controler=time_serie.getControler() |
482 |
jgs |
119 |
TimeSeries.__init__(self,TimeSeriesBaseDataset(time_serie.getDataset(),-shift,time_serie.debug(),"buffer view to "+dsc),time_serie.debug(),dsc) |
483 |
|
|
|
484 |
jgs |
117 |
def getControler(self): |
485 |
|
|
return self.__controler |
486 |
jgs |
110 |
|
487 |
jgs |
119 |
class TimeSeriesAdd(TimeSeriesFilter): |
488 |
jgs |
117 |
"""adds two TimeSeries""" |
489 |
|
|
def __init__(self,time_serie_1,time_serie_2): |
490 |
|
|
dsc="(%s)+(%s)"%(time_serie_1,time_serie_2) |
491 |
|
|
dbg=time_serie_1.debug() or time_serie_2.debug() |
492 |
|
|
cntrl=time_serie_1.getControler() |
493 |
|
|
if not cntrl==time_serie_2.getControler(): |
494 |
jgs |
119 |
raise ValueError("TimeSeriesAdd: %s and %s have different controler."%(time_serie_1,time_serie_2)) |
495 |
jgs |
117 |
id_first_datum=max(time_serie_1.getIdOfFirstDatum(),time_serie_2.getIdOfFirstDatum()) |
496 |
|
|
TimeSeriesFilter.__init__(self,cntrl, \ |
497 |
jgs |
119 |
TimeSeriesBaseBuffer(cntrl.getBaseBufferSize(),time_serie_1.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \ |
498 |
jgs |
117 |
[time_serie_1,time_serie_2],0,0,dbg,dsc) |
499 |
|
|
|
500 |
|
|
def update(self,start,end): |
501 |
|
|
self.append(self.getArgumentDataset(0)[start:end]+self.getArgumentDataset(1)[start:end]) |
502 |
|
|
|
503 |
|
|
class TimeSeriesAddScalar(TimeSeriesFilter): |
504 |
|
|
"""adds a single value to a TimeSeries""" |
505 |
|
|
def __init__(self,time_serie,scalar): |
506 |
|
|
dsc="(%s)+(%s)"%(time_serie,scalar) |
507 |
|
|
dbg=time_serie.debug() |
508 |
|
|
cntrl=time_serie.getControler() |
509 |
|
|
id_first_datum=time_serie.getIdOfFirstDatum() |
510 |
|
|
TimeSeriesFilter.__init__(self,cntrl, \ |
511 |
jgs |
119 |
TimeSeriesBaseBuffer(cntrl.getBaseBufferSize(),time_serie.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \ |
512 |
jgs |
117 |
[time_serie],0,0,dbg,dsc) |
513 |
|
|
self.__scalar=scalar |
514 |
|
|
|
515 |
|
|
def update(self,start,end): |
516 |
|
|
self.append(self.getArgumentDataset(0)[start:end]+self.__scalar) |
517 |
|
|
|
518 |
|
|
class TimeSeriesMult(TimeSeriesFilter): |
519 |
|
|
"""multiplies two TimeSeries""" |
520 |
|
|
def __init__(self,time_serie_1,time_serie_2): |
521 |
|
|
dsc="(%s)*(%s)"%(time_serie_1,time_serie_2) |
522 |
|
|
dbg=time_serie_1.debug() or time_serie_2.debug() |
523 |
|
|
cntrl=time_serie_1.getControler() |
524 |
|
|
if not cntrl==time_serie_2.getControler(): |
525 |
|
|
raise ValueError("TimeSeriesMult: %s and %s have different controler."%(time_serie_1,time_serie_2)) |
526 |
|
|
id_first_datum=max(time_serie_1.getIdOfFirstDatum(),time_serie_2.getIdOfFirstDatum()) |
527 |
|
|
TimeSeriesFilter.__init__(self,cntrl, \ |
528 |
jgs |
119 |
TimeSeriesBaseBuffer(cntrl.getBaseBufferSize(),time_serie_1.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \ |
529 |
jgs |
117 |
[time_serie_1,time_serie_2],0,0,dbg,dsc) |
530 |
|
|
|
531 |
|
|
def update(self,start,end): |
532 |
|
|
self.append(self.getArgumentDataset(0)[start:end]*self.getArgumentDataset(1)[start:end]) |
533 |
|
|
|
534 |
|
|
class TimeSeriesMultScalar(TimeSeriesFilter): |
535 |
|
|
"""multiplies a TimeSeries with a single value""" |
536 |
|
|
def __init__(self,time_serie,scalar): |
537 |
|
|
dsc="(%s)*%s"%(time_serie,scalar) |
538 |
|
|
dbg=time_serie.debug() |
539 |
|
|
cntrl=time_serie.getControler() |
540 |
|
|
id_first_datum=time_serie.getIdOfFirstDatum() |
541 |
|
|
TimeSeriesFilter.__init__(self,cntrl, \ |
542 |
jgs |
119 |
TimeSeriesBaseBuffer(cntrl.getBaseBufferSize(),time_serie.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \ |
543 |
jgs |
117 |
[time_serie],0,0,dbg,dsc) |
544 |
|
|
self.__scalar=scalar |
545 |
|
|
|
546 |
|
|
def update(self,start,end): |
547 |
|
|
self.append(self.getArgumentDataset(0)[start:end]*self.__scalar) |
548 |
|
|
|
549 |
|
|
class TimeSeriesDiv(TimeSeriesFilter): |
550 |
|
|
"""divides two TimeSeries""" |
551 |
|
|
def __init__(self,time_serie_1,time_serie_2): |
552 |
|
|
dsc="(%s)/(%s)"%(time_serie_1,time_serie_2) |
553 |
|
|
dbg=time_serie_1.debug() or time_serie_2.debug() |
554 |
|
|
cntrl=time_serie_1.getControler() |
555 |
|
|
if not cntrl==time_serie_2.getControler(): |
556 |
|
|
raise ValueError("TimeSeriesDiv: %s and %s have different controler."%(time_serie_1,time_serie_2)) |
557 |
|
|
id_first_datum=max(time_serie_1.getIdOfFirstDatum(),time_serie_2.getIdOfFirstDatum()) |
558 |
|
|
TimeSeriesFilter.__init__(self,cntrl, \ |
559 |
jgs |
119 |
TimeSeriesBaseBuffer(cntrl.getBaseBufferSize(),time_serie_1.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \ |
560 |
jgs |
117 |
[time_serie_1,time_serie_2],0,0,dbg,dsc) |
561 |
|
|
|
562 |
|
|
def update(self,start,end): |
563 |
|
|
self.append(self.getArgumentDataset(0)[start:end]/self.getArgumentDataset(1)[start:end]) |
564 |
|
|
|
565 |
|
|
class TimeSeriesDivScalar(TimeSeriesFilter): |
566 |
|
|
"""divides a scalar be a TimeSerie""" |
567 |
|
|
def __init__(self,time_serie,scalar): |
568 |
|
|
dsc="(%s)/(%s)"%(scalar,time_serie) |
569 |
|
|
dbg=time_serie.debug() |
570 |
|
|
cntrl=time_serie.getControler() |
571 |
|
|
id_first_datum=time_serie.getIdOfFirstDatum() |
572 |
|
|
TimeSeriesFilter.__init__(self,cntrl, \ |
573 |
jgs |
119 |
TimeSeriesBaseBuffer(cntrl.getBaseBufferSize(),time_serie.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \ |
574 |
jgs |
117 |
[time_serie],0,0,dbg,dsc) |
575 |
|
|
self.__scalar=scalar |
576 |
|
|
|
577 |
|
|
def update(self,start,end): |
578 |
|
|
self.append(self.__scalar/self.getArgumentDataset(0)[start:end]) |
579 |
|
|
|
580 |
|
|
class TimeSeriesPower(TimeSeriesFilter): |
581 |
|
|
"""raise one TimeSeries to the power of an other TimeSeries""" |
582 |
|
|
def __init__(self,time_serie_1,time_serie_2): |
583 |
|
|
dsc="(%s)**(%s)"%(time_serie_1,time_serie_2) |
584 |
|
|
dbg=time_serie_1.debug() or time_serie_2.debug() |
585 |
|
|
cntrl=time_serie_1.getControler() |
586 |
|
|
if not cntrl==time_serie_2.getControler(): |
587 |
|
|
raise ValueError("TimeSeriesPower: %s and %s have different controler."%(time_serie_1,time_serie_2)) |
588 |
|
|
id_first_datum=max(time_serie_1.getIdOfFirstDatum(),time_serie_2.getIdOfFirstDatum()) |
589 |
|
|
TimeSeriesFilter.__init__(self,cntrl, \ |
590 |
jgs |
119 |
TimeSeriesBaseBuffer(cntrl.getBaseBufferSize(),time_serie_1.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \ |
591 |
jgs |
117 |
[time_serie_1,time_serie_2],0,0,dbg,dsc) |
592 |
|
|
|
593 |
|
|
def update(self,start,end): |
594 |
|
|
self.append(self.getArgumentDataset(0)[start:end]**self.getArgumentDataset(1)[start:end]) |
595 |
|
|
|
596 |
|
|
class TimeSeriesPowerScalar(TimeSeriesFilter): |
597 |
|
|
"""raises a TimeSerie to the power of a scalar""" |
598 |
|
|
def __init__(self,time_serie,scalar): |
599 |
|
|
dsc="(%s)**(%s)"%(time_serie,scalar) |
600 |
|
|
dbg=time_serie.debug() |
601 |
|
|
cntrl=time_serie.getControler() |
602 |
|
|
id_first_datum=time_serie.getIdOfFirstDatum() |
603 |
|
|
TimeSeriesFilter.__init__(self,cntrl, \ |
604 |
jgs |
119 |
TimeSeriesBaseBuffer(cntrl.getBaseBufferSize(),time_serie.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \ |
605 |
jgs |
117 |
[time_serie],0,0,dbg,dsc) |
606 |
|
|
self.__scalar=scalar |
607 |
|
|
|
608 |
|
|
def update(self,start,end): |
609 |
|
|
self.append(self.getArgumentDataset(0)[start:end]**self.__scalar) |
610 |
|
|
|
611 |
|
|
class Exp(TimeSeriesFilter): |
612 |
|
|
"""""" |
613 |
|
|
def __init__(self,time_serie): |
614 |
|
|
dsc="exp(%s)"%(time_serie) |
615 |
|
|
dbg=time_serie.debug() |
616 |
|
|
cntrl=time_serie.getControler() |
617 |
|
|
id_first_datum=time_serie.getIdOfFirstDatum() |
618 |
|
|
TimeSeriesFilter.__init__(self,cntrl, \ |
619 |
jgs |
119 |
TimeSeriesBaseBuffer(cntrl.getBaseBufferSize(),time_serie.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \ |
620 |
jgs |
117 |
[time_serie],0,0,dbg,dsc) |
621 |
|
|
|
622 |
|
|
def update(self,start,end): |
623 |
|
|
self.append(numarray.exp(self.getArgumentDataset(0)[start:end])) |
624 |
|
|
|
625 |
jgs |
119 |
class Writer(TimeSeriesOperator): |
626 |
|
|
"""writes the time series into an output strim ostream which mast have the writeline method. The values are seperated by the string seperator.""" |
627 |
|
|
def __init__(self,time_serie,ostream,seperator=",",commend_tag="#"): |
628 |
|
|
dsc="write %s to %s"%(time_serie,ostream) |
629 |
|
|
dbg=time_serie.debug() |
630 |
|
|
cntrl=time_serie.getControler() |
631 |
|
|
self.__ostream=ostream |
632 |
|
|
self.__seperator=seperator |
633 |
|
|
TimeSeriesOperator.__init__(self,cntrl,[time_serie],0,0,dbg,dsc) |
634 |
|
|
ostream.writelines("%s time series %s\n"%(commend_tag,str(self))) |
635 |
|
|
|
636 |
|
|
def update(self,start,end): |
637 |
|
|
cntrl=self.getControler() |
638 |
|
|
arg=self.getArguments(0) |
639 |
|
|
n=arg.getNumComponents() |
640 |
|
|
if n<2: |
641 |
|
|
for i in range(start,end): self.__ostream.writelines("%s%s%s\n"%(cntrl[i],self.__seperator,arg[i])) |
642 |
|
|
else: |
643 |
|
|
for i in range(start,end): |
644 |
|
|
l="%s"%cntrl[i] |
645 |
|
|
for j in range(n): l=l+"%s%s"(self.__seperator,arg[i][j]) |
646 |
|
|
self.__ostream.writelines("%s\n"%l) |
647 |
|
|
|
648 |
|
|
class DataCatcher(TimeSeries): |
649 |
|
|
"""collects data into a time series.""" |
650 |
|
|
def __init__(self,controler,numComponents=1,description="DataCatcher"): |
651 |
|
|
self.__controler=controler |
652 |
|
|
dbg=controler.debug() |
653 |
|
|
TimeSeries.__init__(self,TimeSeriesBaseBuffer(controler.getBaseBufferSize(),numComponents,DEFAULT_FLOAT_TYPE,controler.getIdOfFirstDatum(),dbg,"buffer for "+description),dbg,description) |
654 |
|
|
|
655 |
|
|
def getControler(self): |
656 |
|
|
return self.__controler |
657 |
|
|
|
658 |
|
|
def nextValue(self,value): |
659 |
|
|
"""append a value to the time series""" |
660 |
|
|
id_last=self.getIdOfLastDatum() |
661 |
|
|
id_current=self.getControler().getIdOfLastDatum() |
662 |
|
|
if id_last+1==id_current: |
663 |
|
|
self.getDataset().append(value) |
664 |
|
|
elif id_last+1<id_current: |
665 |
|
|
if self.isEmpty(): |
666 |
|
|
self.getDataset().append(value) |
667 |
|
|
id_last+=1 |
668 |
|
|
t_last=self.getControler()[id_last] |
669 |
|
|
t_current=self.getControler()[id_current] |
670 |
|
|
value_last=self[id_last] |
671 |
|
|
out=(value_last-value)/(t_last-t_current)*(self.getControler()[id_last+1:id_current+1]-t_current)+value |
672 |
|
|
self.getDataset().append(out) |
673 |
|
|
else : |
674 |
|
|
raise ValueError,"%s: a new time node must be introduced before a new value can be added." |
675 |
|
|
self.updateIdOfLastUnreferencedDatum(id_last) |
676 |
|
|
|
677 |
|
|
|
678 |
jgs |
117 |
class TimeSeriesCumulativeSum(TimeSeriesFilter): |
679 |
jgs |
119 |
"""cummulative sum of the time series values""" |
680 |
|
|
def __init__(self,time_serie): |
681 |
|
|
dsc="cumsum(%s)"%(time_serie) |
682 |
|
|
dbg=time_serie.debug() |
683 |
|
|
cntrl=time_serie.getControler() |
684 |
|
|
id_first_datum=time_serie.getIdOfFirstDatum() |
685 |
|
|
TimeSeriesFilter.__init__(self,cntrl, \ |
686 |
|
|
TimeSeriesBaseBuffer(cntrl.getBaseBufferSize(),time_serie.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \ |
687 |
|
|
[time_serie],0,0,dbg,dsc) |
688 |
|
|
self.__last_value=0 |
689 |
jgs |
117 |
|
690 |
jgs |
119 |
def update(self,start,end): |
691 |
|
|
out=numarray.cumsum(self.getArgumentDataset(0)[start:end])+self.__last_value |
692 |
|
|
self.__last_value=out[end-start-1] |
693 |
|
|
self.append(out) |
694 |
jgs |
117 |
|
695 |
|
|
|
696 |
jgs |
119 |
class Reader(TimeSeriesBase): |
697 |
|
|
"""reads a list of input streams and creates a time series for each input stream but on the same Controler where the first column |
698 |
|
|
is used to create the time nodes""" |
699 |
|
|
def __init__(self,list_of_istreams,buffer_size=DEFAULT_BUFFER_SIZE,seperator=",",commend_tag="#",debug=False): |
700 |
|
|
TimeSeriesBase.__init__(self,debug=debug,description="reader") |
701 |
|
|
if not isinstance(list_of_istreams,list): |
702 |
|
|
self.__list_of_istreams=[list_of_istreams] |
703 |
|
|
else: |
704 |
|
|
self.__list_of_istreams=list_of_istreams |
705 |
|
|
self.__cntrl=Controler(buffer_size,debug,"reader controler") |
706 |
|
|
self.__seperator=seperator |
707 |
|
|
self.__commend_tag=commend_tag |
708 |
|
|
self.__time_series={} |
709 |
|
|
self.__t={} |
710 |
|
|
self.__v={} |
711 |
|
|
# set up the time series: |
712 |
|
|
for i in self.__list_of_istreams: |
713 |
|
|
line=self.__commend_tag |
714 |
|
|
while not line=="" and line[0]==self.__commend_tag: |
715 |
|
|
line=i.readline().strip() |
716 |
|
|
if line=="": |
717 |
|
|
list_of_istreams.remove(i) |
718 |
|
|
else: |
719 |
|
|
d=line.split(self.__seperator) |
720 |
|
|
self.__t[i]=float(d[0]) |
721 |
|
|
tmp=[] |
722 |
|
|
for j in d[1:]: tmp.append(float(j)) |
723 |
|
|
self.__v[i]=numarray.array(tmp) |
724 |
|
|
self.__time_series[i]=DataCatcher(self.__cntrl,len(d)-1,str(i)) |
725 |
jgs |
110 |
|
726 |
jgs |
119 |
# |
727 |
|
|
def run(self): |
728 |
|
|
while len(self.__list_of_istreams)>0: |
729 |
|
|
if len(self.__time_series)>0: |
730 |
|
|
# find list all times with minumum time node: |
731 |
|
|
tminargs=[] |
732 |
|
|
for i in self.__time_series: |
733 |
|
|
if len(tminargs)==0: |
734 |
|
|
tminargs.append(i) |
735 |
|
|
elif abs(t[tminargs[0]]-self.__t[i])<1.e-8*abs(self.__t[i]): |
736 |
|
|
tminargs.append(i) |
737 |
|
|
elif self.__t[i]<t[tminargs[0]]: |
738 |
|
|
tminargs=[i] |
739 |
|
|
# find list all times with minumum time node: |
740 |
|
|
self.__cntrl.nextTime(self.__t[tminargs[0]]) |
741 |
|
|
for i in tminargs: |
742 |
|
|
self.__time_series[i].nextValue(self.__v[i]) |
743 |
|
|
# find next line without leading "#" |
744 |
|
|
line="#" |
745 |
|
|
while not line=="" and line[0]==self.__commend_tag: |
746 |
|
|
line=i.readline().strip() |
747 |
|
|
# if eof reached iostream is removed for searching |
748 |
|
|
if line=="": |
749 |
|
|
self.__list_of_istreams.remove(i) |
750 |
|
|
else: |
751 |
|
|
d=line.split(self.__seperator) |
752 |
|
|
self.__t[i]=float(d[0]) |
753 |
|
|
tmp=[] |
754 |
|
|
for j in d[1:]: tmp.append(float(j)) |
755 |
|
|
self.__v[i]=numarray.array(tmp) |
756 |
jgs |
110 |
|
757 |
jgs |
119 |
def getControler(self): |
758 |
|
|
"""returns the controler shared by all time series created through the input streams""" |
759 |
|
|
return self.__cntrl |
760 |
jgs |
110 |
|
761 |
jgs |
119 |
def getTimeSeries(self,istream=None): |
762 |
|
|
"""returns the time series as a tuple. If istream is present its time series is returned""" |
763 |
|
|
if istream==None: |
764 |
|
|
out=self.__time_series.values() |
765 |
|
|
if len(out)>1: |
766 |
|
|
return tuple(out) |
767 |
|
|
elif len(out)>0: |
768 |
|
|
return out[0] |
769 |
|
|
else: |
770 |
|
|
return None |
771 |
|
|
else: |
772 |
|
|
return self.__time_series[istream] |
773 |
jgs |
110 |
|
774 |
jgs |
119 |
|
775 |
|
|
class Plotter(TimeSeriesOperator): |
776 |
|
|
def __init__(self,time_series,window_size=DEFAULT_BUFFER_SIZE/4,file_name=None,format=None): |
777 |
|
|
if isinstance(time_series,list): |
778 |
|
|
dbg=time_series[0].getControler().debug() |
779 |
|
|
text="" |
780 |
|
|
for i in time_series: |
781 |
|
|
if len(text)==0: |
782 |
|
|
text=str(i) |
783 |
|
|
else: |
784 |
|
|
text=text+","+str(i) |
785 |
|
|
TimeSeriesOperator.__init__(self,time_series[0].getControler(),time_series,window_size,0,dbg,"plot(%s)"%text) |
786 |
|
|
else: |
787 |
|
|
dbg=time_series.getControler().debug() |
788 |
|
|
text=str(time_series) |
789 |
|
|
TimeSeriesOperator.__init__(self,time_series.getControler(),[time_series],window_size,0,dbg,"plot(%s)"%text) |
790 |
|
|
from pyvisi.renderers.gnuplot import LinePlot,Scene,PsImage |
791 |
|
|
self.__renderer=Scene() |
792 |
|
|
self.__line_plot=LinePlot(self.__renderer) |
793 |
|
|
self.__line_plot.setTitle(text) |
794 |
|
|
self.__line_plot.setLineStyle("lines") |
795 |
|
|
self.__line_plot.setXLabel("time") |
796 |
|
|
self.__line_plot.setYLabel("values") |
797 |
|
|
self.__file_name=file_name |
798 |
|
|
if format==None: |
799 |
|
|
self.__format=PsImage() |
800 |
|
|
else: |
801 |
|
|
self.__format=format |
802 |
|
|
self.__window_size=window_size |
803 |
|
|
|
804 |
|
|
def update(self,start,end): |
805 |
|
|
s=max(end-self.__window_size,self.getControler().getIdOfFirstAvailableDatum()) |
806 |
|
|
args=[self.getControler()[s:end]] |
807 |
|
|
for arg in self.getArguments(): args.append(arg[s:end]) |
808 |
|
|
self.__line_plot.setData(*args) |
809 |
|
|
self.__line_plot.render() |
810 |
|
|
if self.__file_name==None: |
811 |
|
|
raise SystemError,"Online viewing is not avilabel yet!" |
812 |
|
|
else: |
813 |
|
|
self.__renderer.save(fname=self.__file_name, format=self.__format) |
814 |
|
|
|
815 |
|
|
|
816 |
|
|
def viewer(time_serie,seperator=","): |
817 |
|
|
"""creates a viewer for a time series""" |
818 |
|
|
import sys |
819 |
|
|
return Writer(time_serie,sys.stdout,seperator) |
820 |
|
|
|
821 |
|
|
def differential(time_serie): |
822 |
jgs |
117 |
"""calculates the derivative Dv of the time series v: |
823 |
|
|
|
824 |
|
|
Dv[n]=(v[n]-v[n-1])/(t[n]-t[n-1]) |
825 |
jgs |
110 |
|
826 |
jgs |
117 |
""" |
827 |
jgs |
119 |
out=(((time_serie<<1)-time_serie)/((time_serie.getControler()<<1)-time_serie.getControler())+ \ |
828 |
|
|
((time_serie>>1)-time_serie)/((time_serie.getControler()>>1)-time_serie.getControler()))/2. |
829 |
|
|
out.setDescription("d(%s)/dt"%str(time_serie)) |
830 |
|
|
out.setDebug(time_serie.debug()) |
831 |
|
|
return out |
832 |
jgs |
110 |
|
833 |
jgs |
119 |
def integral(time_serie): |
834 |
jgs |
117 |
"""calculates the intagral Iv of the time series v using the trapozidal rule: |
835 |
jgs |
110 |
|
836 |
jgs |
119 |
Iv[n]=int_{t_0}^{t_n} v ~ sum_{0<i<=n} n (v[i]+v[i-1])/2*(t[i]-t[i-1]) |
837 |
jgs |
110 |
|
838 |
jgs |
117 |
""" |
839 |
jgs |
119 |
out=TimeSeriesCumulativeSum(((time_serie>>1)+time_serie)/2.*(time_serie.getControler()-(time_serie.getControler()>>1))) |
840 |
|
|
out.setDescription("I (%s) dt"%str(time_serie)) |
841 |
|
|
out.setDebug(time_serie.debug()) |
842 |
|
|
return out |
843 |
jgs |
110 |
|
844 |
jgs |
119 |
def smooth(time_serie,range=5): |
845 |
|
|
"""smoothes a time series using the at each time the previous and next range values""" |
846 |
|
|
i=integral(time_serie) |
847 |
|
|
out=((i>>range)-(i<<range))/((time_serie.getControler()>>range)-(time_serie.getControler()<<range)) |
848 |
|
|
out.setDescription("smooth(%s,-%d:%d) dt"%(str(time_serie),range,range)) |
849 |
|
|
out.setDebug(time_serie.debug()) |
850 |
|
|
return out |
851 |
jgs |
110 |
|
852 |
jgs |
119 |
def leakySmooth(time_serie,l=0.99): |
853 |
|
|
"""leaky smoother: s(t)=int_{t_0}^{t} v(r) l^{t-r} dr/ int_{t_0}^{t} l^{t-r} dr """ |
854 |
|
|
w=l**(-time_serie.getControler()) |
855 |
|
|
out=integrate(time_serie*w)/integrate(w) |
856 |
|
|
out.setDescription("leaky smoother(%s)"%str(time_serie)) |
857 |
|
|
return out |
858 |
jgs |
110 |
|
859 |
|
|
# test |
860 |
|
|
|
861 |
|
|
if __name__=="__main__": |
862 |
jgs |
117 |
# tests the interfaces to data sets: |
863 |
|
|
print "Test of Datasets:" |
864 |
|
|
print "=================" |
865 |
jgs |
119 |
bf=TimeSeriesBaseBuffer(buffer_size=5,numComponents=1,debug=True,description="TestBaseBuffer") |
866 |
|
|
bfv_l=TimeSeriesBaseDataset(bf,offset=1,debug=True,description="offset 1") |
867 |
|
|
bfv_r=TimeSeriesBaseDataset(bf,offset=-1,debug=True,description="offset -1") |
868 |
jgs |
117 |
bf.append([1.,2.,3.,4.]) |
869 |
|
|
print "should be all 2. :",bfv_l[0] |
870 |
|
|
print bf[1] |
871 |
|
|
print bfv_r[2] |
872 |
|
|
bf.append([5.,6.,7.]) |
873 |
|
|
print "should be all 5. :",bfv_l[3],bf[4],bfv_r[5] |
874 |
|
|
print "should be all 6. :",bfv_l[4],bf[5],bfv_r[6] |
875 |
|
|
print "should be all 7. :",bfv_l[5],bf[6],bfv_r[7] |
876 |
|
|
print "should be all [6., 7.] :",bfv_l[4:6],bf[5:7],bfv_r[6:8] |
877 |
jgs |
110 |
|
878 |
jgs |
117 |
print "Test of Controler" |
879 |
|
|
print "=================" |
880 |
|
|
b=Controler(buffer_size=15,debug=True) |
881 |
|
|
s3=b>>3 |
882 |
|
|
s1=b>>1 |
883 |
|
|
s_3=b<<3 |
884 |
jgs |
119 |
print s_3 |
885 |
|
|
print b |
886 |
|
|
print b+s3 |
887 |
jgs |
117 |
sum=(s_3+b)+(b+s3) |
888 |
|
|
|
889 |
|
|
for i in range(30): |
890 |
jgs |
119 |
b.nextTime(i*1.) |
891 |
jgs |
117 |
b.flush() |
892 |
|
|
print "should be all 28. :",s_3.getDataset()[25],b.getDataset()[28],s3.getDataset()[31] |
893 |
|
|
print "should be all 29. :",s_3.getDataset()[26],b.getDataset()[29],s3.getDataset()[32] |
894 |
|
|
print "should be all 96. :",sum.getDataset()[24] |
895 |
|
|
|
896 |
|
|
print "Test of operators" |
897 |
|
|
print "=================" |
898 |
|
|
b=Controler(buffer_size=15,debug=True) |
899 |
jgs |
119 |
b.setFlushRate(2) |
900 |
|
|
q=DataCatcher(b) |
901 |
jgs |
117 |
b1=b<<1 |
902 |
|
|
a=b+b1 |
903 |
|
|
a_s=b1+1. |
904 |
|
|
s_a=1.+b1 |
905 |
|
|
d=b-b1 |
906 |
|
|
d_s=b1-1. |
907 |
|
|
s_d=1.-b1 |
908 |
|
|
m=b*b1 |
909 |
|
|
m_s=b1*2. |
910 |
|
|
s_m=2.*b1 |
911 |
|
|
dv=b/b1 |
912 |
|
|
dv_s=b1/2. |
913 |
|
|
s_dv=2./b1 |
914 |
|
|
p=b**b1 |
915 |
|
|
p_s=b1**2. |
916 |
|
|
s_p=2.**b1 |
917 |
|
|
pb=+b |
918 |
|
|
mb=-b |
919 |
jgs |
119 |
sum=TimeSeriesCumulativeSum(b) |
920 |
|
|
diff=differential(b) |
921 |
|
|
smt=smooth(b,2) |
922 |
|
|
int=integral(b*2) |
923 |
|
|
fl=file("/tmp/test.csv","w") |
924 |
|
|
w=Writer(q,fl) |
925 |
|
|
v=viewer(q) |
926 |
|
|
plo=Plotter([a,a_s],window_size=4,file_name="s.ps") |
927 |
jgs |
117 |
for i in range(30): |
928 |
jgs |
119 |
b.nextTime(i*1.) |
929 |
|
|
if i%2==1: q.nextValue(i*28.) |
930 |
jgs |
117 |
b.flush() |
931 |
|
|
print "a[28] should be %e: %e"%(28.+29.,a[28]) |
932 |
|
|
print "a_s[28] should be %e: %e"%(29.+1.,a_s[28]) |
933 |
|
|
print "s_a[28] should be %e: %e"%(29.+1.,s_a[28]) |
934 |
|
|
print "d[28] should be %e: %e"%(28.-29.,d[28]) |
935 |
|
|
print "d_s[28] should %e: %e"%(29.-1.,d_s[28]) |
936 |
|
|
print "s_d[28] should %e: %e"%(1.-29.,s_d[28]) |
937 |
|
|
print "m[28] should be %e: %e"%(28.*29.,m[28]) |
938 |
|
|
print "m_s[28] should be %e: %e"%(29.*2.,m_s[28]) |
939 |
|
|
print "s_m[28] should be %e: %e"%(29.*2.,s_m[28]) |
940 |
|
|
print "dv[28] should be %e: %e"%(28./29.,dv[28]) |
941 |
|
|
print "dv_s[28] should be %e: %e"%(29./2.,dv_s[28]) |
942 |
|
|
print "s_dv[28] should be %e: %e"%(2./29.,s_dv[28]) |
943 |
|
|
print "p[28] should be %e: %e"%(28.**29.,p[28]) |
944 |
|
|
print "p_s[28] should be %e: %e"%(29.**2,p_s[28]) |
945 |
|
|
print "s_p[28] should be %e: %e"%(2.**29.,s_p[28]) |
946 |
|
|
print "pb[28] should be %e: %e"%(28.,pb[28]) |
947 |
|
|
print "mb[28] should be %e: %e"%(-28.,mb[28]) |
948 |
jgs |
119 |
print "sum[28] should be %e: %e"%(28*29./2,sum[28]) |
949 |
|
|
print "diff[28] should be %e: %e"%(1.,diff[28]) |
950 |
|
|
print "smt[27] should be %e: %e"%(27.,smt[27]) |
951 |
|
|
print "int[28] should be %e: %e"%(28.**2,int[28]) |
952 |
|
|
print "q[27] should be %e: %e"%(27*28.,q[27]) |
953 |
|
|
print "q[28] should be %e: %e"%(28*28.,q[28]) |
954 |
|
|
print "q[29] should be %e: %e"%(29*28.,q[29]) |
955 |
|
|
fl.flush() |
956 |
|
|
|
957 |
|
|
rin=Reader(file("/tmp/test.csv","r+"),buffer_size=15,debug=True) |
958 |
|
|
rin.run() |
959 |
|
|
inp=rin.getTimeSeries() |
960 |
|
|
print "inp[27] should be %e: %e"%(27*28.,inp[27]) |
961 |
|
|
print "inp[28] should be %e: %e"%(28*28.,inp[28]) |
962 |
|
|
print "inp[29] should be %e: %e"%(29*28.,inp[29]) |
963 |
jgs |
117 |
|