/[escript]/trunk/esys2/escript/py_src/timeseries.py
ViewVC logotype

Annotation of /trunk/esys2/escript/py_src/timeseries.py

Parent Directory Parent Directory | Revision Log Revision Log


Revision 110 - (hide annotations)
Mon Feb 14 04:14:42 2005 UTC (18 years, 1 month ago) by jgs
File MIME type: text/x-python
File size: 13117 byte(s)
*** empty log message ***

1 jgs 110 # $Id$
2    
3     import numarray
4    
5     class TimeSeriesBase:
6     """The TimeSeriesBase class is the base class for all class of the TimeSeries module.
7     It takes care of the updating depending TimeSeriesBase objects and the debuging mechnism"""
8    
9     def __init__(self):
10     self.__debug=False
11    
12     def __str__(self):
13     return "TimeSeriesBase"
14    
15     def setDebugOn(self):
16     """switch on degugging mode"""
17     self.__debug=True
18    
19     def setDebugOff(self):
20     """switch off degugging mode"""
21     self.__debug=False
22    
23     def setDebug(self,flag=False):
24     """sets debug mode to flag"""
25     if flag:
26     self.setDebugOn()
27     else:
28     self.setDebugOff()
29    
30     def debug(self):
31     """returns true if debug mode is on"""
32     return self.__debug
33    
34     class TimeSeriesFilter(TimeSeriesBase):
35     """TimeSeriesFilter objects are applied to TimeSeries objects to filer out information or to convert it.
36     A TimeSeriesFilter objects is called by the TimeSeries object it is depending on to consider the values currently in the buffer for
37     updating. Some TimeSeriesFilter may require values outside the buffer. The TimeSeries object maintains the last buffer_overlap values
38     in the buffer so they can be used to process (not neccesarily all) value in the buffer."""
39    
40     def __init__(self,buffer_overlap=0):
41     self.__left_required_extension=buffer_overlap
42    
43     def __str__(self):
44     return "TimeSeriesFilter"
45    
46     def getBufferOverlapNeededForUpdate(self):
47     return self.__left_required_extension
48    
49     def update(self,times,values):
50     pass
51    
52     _DEFAULT_CACHE_SIZE=9
53     _DEFAULT_BUFFER_SIZE=5
54     _FLOATING_TYPE=numarray.Float64
55    
56     class TimeSeries(TimeSeriesBase):
57     def __init__(self,buffer_overlap=0,buffer_size=_DEFAULT_BUFFER_SIZE,cache_size=_DEFAULT_CACHE_SIZE,numComponents=1):
58     if buffer_size>cache_size: raise ValueError,"buffer size has to be less or equal cache size"
59     TimeSeriesBase.__init__(self)
60     self.__updates=list()
61     self.__max_buffer_overlap=0
62     self.__buffer_overlap=0
63     self.__numNodes=0
64     self.__numNodesInBuffer=0
65     self.__numNodesInCache=0
66     self.__firstNodeInBuffer=0
67     self.__firstNodeInCache=0
68     self.__buffer_size=buffer_size
69     self.__node_cache=numarray.zeros((cache_size,),_FLOATING_TYPE)
70     self.__attachment_cache=[]
71     if numComponents<2:
72     self.__value_cache=numarray.zeros((cache_size,),_FLOATING_TYPE)
73     else:
74     self.__value_cache=numarray.zeros((cache_size,numComponents),_FLOATING_TYPE)
75     self.resizeMaxBufferOverlap(buffer_overlap)
76    
77     def __del__(self):
78     self.flush()
79    
80     def __str__(self):
81     return "TimeSeries"
82    
83     def getNumComponents(self):
84     if self.__value_cache.rank==1:
85     return 1
86     else:
87     self.__value_cache.shape[1]
88    
89     def getNumNodes(self):
90     """returns the number of time nodes in the time series"""
91     return self.__numNodes
92    
93     def getCacheSize(self):
94     """returns the cache size"""
95     return self.__node_cache.shape[0]
96    
97     def getBufferSize(self):
98     """returns the cache size"""
99     return self.__buffer_size
100    
101     def getNumNodesInCache(self):
102     """returns the number of nodes in cache"""
103     return self.__numNodesInCache
104    
105     def getNumNodesInBuffer(self):
106     """returns the number of nodes in cache"""
107     return self.__numNodesInBuffer
108    
109     def getFirstNodeInCache(self):
110     """returns the id number of the first node in the cache"""
111     return self.__firstNodeInCache
112    
113     def getFirstNodeInBuffer(self):
114     """returns the id number of the first node in the buffer"""
115     return self.__firstNodeInBuffer
116    
117     def getFirstNodeOfBufferInCache(self):
118     """returns the first location of the first node in the buffer relative to the cache"""
119     return self.getFirstNodeInBuffer()-self.getFirstNodeInCache()
120    
121     def getBufferOverlap(self):
122     """returns the current size of the left extension"""
123     return self.__buffer_overlap
124    
125     def getMaxBufferOverlap(self):
126     """returns the maximum size of the left extension"""
127     return self.__max_buffer_overlap
128    
129     def resizeMaxBufferOverlap(self,new_buffer_overlap=0):
130     if new_buffer_overlap>self.__max_buffer_overlap:
131     if self.getNumNodes()>0: raise ValueError,"left extension can only be resized for empty time series"
132     if self.getCacheSize()<self.getBufferSize()+new_buffer_overlap:
133     raise ValueError,"Cache size is too small! required cache size is %s"%self.getBufferSize()+new_buffer_overlap
134     self.__max_buffer_overlap=new_buffer_overlap
135     if self.debug(): print "Debug: %s: left extension is increased to %d"%(self,new_buffer_overlap)
136    
137     def getLastNode(self):
138     if self.getNumNodesInCache()>0:
139     return self.__node_cache[self.getNumNodesInCache()-1]
140     else:
141     return -1.e300
142    
143     def getLastValue(self):
144     if self.getNumNodesInCache()>0:
145     return self.__node_cache[self.getNumNodesInCache()-1]
146     else:
147     raise ValueError,"No value available"
148    
149     def checkInUpdate(self,time_series_filter):
150     """checks in a time_series_filter object to be updated when buffer is full"""
151     if self.getNumNodes()>0:
152     raise TypeError,"Check in of TimeSeries requires empty buffer."
153     self.__updates.append(time_series_filter)
154     self.resizeMaxBufferOverlap(time_series_filter.getBufferOverlapNeededForUpdate())
155     if self.debug(): print "Debug: %s: %s checked in successfully."%(self,time_series_filter)
156    
157     def append(self,time_nodes,values,attachments=None):
158     """appends the time_nodes and values into the buffer"""
159     num_additional_nodes=time_nodes.shape[0]
160     if num_additional_nodes<1: return
161     if self.debug():
162     if num_additional_nodes>1:
163     print "Debug: %s: values %d to %d are added to time series."%(self,self.getNumNodes(),self.getNumNodes()+num_additional_nodes-1)
164     else:
165     print "Debug: %s: value %d is added to time series."%(self,self.getNumNodes())
166     if not num_additional_nodes==values.shape[0]:
167     raise ValueError,"Number time nodes and number of values don't match."
168     if self.getLastNode()>=time_nodes[0]:
169     raise ValueError,"first time node to be checked in is less than last previously checked in node"
170    
171     if num_additional_nodes>1:
172     if min(time_nodes[1:num_additional_nodes]-time_nodes[0:num_additional_nodes-1])<=0:
173     raise ValueError,"time nodes have to be strictly increasing"
174    
175     # full cache requires a shift:
176     if self.getNumNodesInCache()+num_additional_nodes>self.getCacheSize():
177     new_num_nodes_in_cache=self.getNumNodesInBuffer()+self.getBufferOverlap()
178     if new_num_nodes_in_cache+num_additional_nodes>self.getCacheSize():
179     raise ValueError,"Cache overflow: Expected size is bigger than %d"%(new_num_nodes_in_cache+num_additional_nodes)
180     start=self.getNumNodesInCache()-new_num_nodes_in_cache
181     end=start+new_num_nodes_in_cache
182     self.__node_cache[0:new_num_nodes_in_cache]=self.__node_cache[start:end]
183     self.__value_cache[0:new_num_nodes_in_cache]=self.__value_cache[start:end]
184     self.__attachment_cache[0:new_num_nodes_in_cache]=self.__attachment_cache[start:end]
185    
186     self.__firstNodeInCache+=start
187     self.__numNodesInCache=new_num_nodes_in_cache
188     if self.debug(): print "Debug: %s: %d values from %d onwards are moved to the beginning of the cache (first node in cache is now %d)."% \
189     (self,new_num_nodes_in_cache,start,self.__firstNodeInCache)
190    
191     # copy values into cache:
192     if self.getNumNodesInCache()+num_additional_nodes>self.getCacheSize():
193     raise ValueError,"Cache overflow: Expected size is bigger than %d"%(self.getNumNodesInCache()+num_additional_nodes)
194     if self.debug():
195     if num_additional_nodes>1:
196     print "Debug: %s: values %d to %d of cache are updated"%(self,self.getNumNodesInCache(),self.getNumNodesInCache()+num_additional_nodes-1)
197     else:
198     print "Debug: %s: value %d of cache is updated."%(self,self.getNumNodesInCache())
199     self.__node_cache[self.getNumNodesInCache():self.getNumNodesInCache()+num_additional_nodes]=time_nodes
200     self.__value_cache[self.getNumNodesInCache():self.getNumNodesInCache()+num_additional_nodes]=values
201     self.__numNodes+=num_additional_nodes
202     self.__numNodesInBuffer+=num_additional_nodes
203     self.__numNodesInCache+=num_additional_nodes
204     print self.__node_cache
205     print self.__value_cache
206     # copy values into cache:
207     if self.getNumNodesInBuffer()>=self.getBufferSize():
208     if self.debug() and len(self.__updates)>0: print "Debug: %s: buffer is full. Updating process is started"%self
209     self.processBuffer()
210    
211     def flush(self):
212     self.processBuffer()
213    
214     def processBuffer(self):
215     if self.getNumNodesInBuffer()>0:
216     for i in self.__updates:
217     if self.debug(): print "Debug: %s: update for %s started"%(self,i)
218     if i.getBufferOverlapNeededForUpdate()>self.getBufferOverlap():
219     s=self.getFirstNodeOfBufferInCache()
220     l=self.getNumNodesInBuffer()
221     else:
222     s=self.getFirstNodeOfBufferInCache()-i.getBufferOverlapNeededForUpdate()
223     l=self.getNumNodesInBuffer()+i.getBufferOverlapNeededForUpdate()
224     i.update(self.__node_cache[s:s+l],self.__value_cache[s:s+l])
225     self.__firstNodeInBuffer+=self.__numNodesInBuffer
226     self.__numNodesInBuffer=0
227     self.__buffer_overlap=self.getMaxBufferOverlap()
228     if self.debug(): print "Debug: %s: first node in buffer is now %d"%(self,self.__firstNodeInBuffer)
229    
230    
231    
232    
233     class TimeSeriesCollector(TimeSeries):
234     """TimeSeriesCollector collects values at time nodes"""
235     def __init__(self):
236     TimeSeries.__init__(self)
237    
238     def __str__(self):
239     return "TimeSeriesCollector"
240    
241     def add(self,time_mark,value):
242     """adds the value at time time_mark to the time series"""
243     self.append(numarray.array([time_mark]),numarray.array([value]))
244    
245     def read(self,istream,seperator=","):
246     """reads pairs from iostream istream"""
247     for l in istream:
248     d=l.strip().split(seperator)
249     self.add(float(d[0]),float(d[1]))
250    
251     class TimeSeriesIntegrator(TimeSeries,TimeSeriesFilter):
252     def __init__(self,time_series):
253     TimeSeriesFilter.__init__(self,1)
254     TimeSeries.__init__(self,buffer_size=time_series.getBufferSize(),cache_size=time_series.getCacheSize(), \
255     numComponents=time_series.getNumComponents())
256     self.setDebug(time_series.debug())
257     time_series.checkInUpdate(self)
258     self.__integral=0
259    
260     def __str__(self):
261     return "TimeSeriesIntegrator"
262    
263     def update(self,times,values):
264     l=times.shape[0]
265     self.append(times[1:l],(values[0:l-1]+values[1:l])/2.*(times[1:l]-times[0:l-1]))
266    
267    
268     class TimeSeriesDifferential(TimeSeries,TimeSeriesFilter):
269     def __init__(self,time_series):
270     TimeSeriesFilter.__init__(self,1)
271     TimeSeries.__init__(self,buffer_size=time_series.getBufferSize(),cache_size=time_series.getCacheSize(), \
272     numComponents=time_series.getNumComponents())
273     self.setDebug(time_series.debug())
274     time_series.checkInUpdate(self)
275    
276     def __str__(self):
277     return "TimeSeriesDifferential"
278    
279     def update(self,times,values):
280     l=times.shape[0]
281     self.append((times[0:l-1]+times[1:l])/2,(values[0:l-1]-values[1:l])/(times[0:l-1]-times[1:l]))
282    
283     class TimeSeriesViewer(TimeSeriesFilter):
284     def __init__(self,time_series):
285     TimeSeriesFilter.__init__(self,0)
286     time_series.checkInUpdate(self)
287    
288     def __str__(self):
289     return "TimeSeriesViewer"
290    
291     def update(self,times,values):
292     for i in range(times.shape[0]): print "[%s: %s]"%(times[i],values[i])
293    
294     class TimeSeriesWriter(TimeSeriesFilter):
295     def __init__(self,time_series,ostream,seperator=","):
296     TimeSeriesFilter.__init__(self,0)
297     time_series.checkInUpdate(self)
298     self.setDebug(time_series.debug())
299     self.__ostream=ostream
300     self.__seperator=seperator
301    
302     def __str__(self):
303     return "TimeSeriesWriter"
304    
305     def update(self,times,values):
306     for i in range(times.shape[0]): self.__ostream.writelines("%s,%s\n"%(times[i],values[i]))
307    
308     # test
309    
310     if __name__=="__main__":
311    
312     c=TimeSeriesCollector()
313     c.setDebugOn()
314     ii=TimeSeriesIntegrator(c)
315     d=TimeSeriesDifferential(c)
316     v=TimeSeriesViewer(ii)
317     w=TimeSeriesWriter(d,file("test.csv","w"))
318    
319     for i in range(15):
320     c.add(i*1.,i+1.)

Properties

Name Value
svn:eol-style native
svn:keywords Author Date Id Revision

  ViewVC Help
Powered by ViewVC 1.1.26