/[escript]/trunk/escript/py_src/timeseries.py
ViewVC logotype

Diff of /trunk/escript/py_src/timeseries.py

Parent Directory Parent Directory | Revision Log Revision Log | View Patch Patch

trunk/esys2/escript/py_src/timeseries.py revision 110 by jgs, Mon Feb 14 04:14:42 2005 UTC trunk/escript/py_src/timeseries.py revision 614 by elspeth, Wed Mar 22 01:37:07 2006 UTC
# Line 1  Line 1 
1  # $Id$  # $Id$
2    
3    __copyright__="""  Copyright (c) 2006 by ACcESS MNRF
4                        http://www.access.edu.au
5                    Primary Business: Queensland, Australia"""
6    __license__="""Licensed under the Open Software License version 3.0
7                 http://www.opensource.org/licenses/osl-3.0.php"""
8    
9  import numarray  import numarray
10    from types import SliceType
11    DEFAULT_BUFFER_SIZE=1000
12    DEFAULT_FLOAT_TYPE=numarray.Float64
13    
14  class TimeSeriesBase:  class TimeSeriesBase:
15     """The TimeSeriesBase class is the base class for all class of the TimeSeries module.     """The TimeSeriesBase class is the base class for all class of the TimeSeries module."""
       It takes care of the updating depending TimeSeriesBase objects and the debuging mechnism"""  
16    
17     def __init__(self):     def __init__(self,debug=False,description="TimeSeriesBase"):
18         self.__debug=False         self.__debug=debug
19           self.setDescription(description)
20    
21     def __str__(self):     def __str__(self):
22         return "TimeSeriesBase"         return self.__description
23      
24       def setDescription(self,text):
25           self.__description=text
26    
27     def setDebugOn(self):     def setDebugOn(self):
28        """switch on degugging mode"""        """switch on degugging mode"""
# Line 30  class TimeSeriesBase: Line 42  class TimeSeriesBase:
42     def debug(self):     def debug(self):
43        """returns true if debug mode is on"""        """returns true if debug mode is on"""
44        return self.__debug        return self.__debug
         
 class TimeSeriesFilter(TimeSeriesBase):  
    """TimeSeriesFilter objects are applied to TimeSeries objects to filer out information or to convert it.  
       A TimeSeriesFilter objects is called by the TimeSeries object it is depending on to consider the values currently in the buffer for  
       updating. Some TimeSeriesFilter may require values outside the buffer. The TimeSeries object maintains the last buffer_overlap values  
       in the buffer so they can be used to process (not neccesarily all) value in the buffer."""  
45    
46     def __init__(self,buffer_overlap=0):  #============================================================================================================
47         self.__left_required_extension=buffer_overlap  class TimeSeriesBaseDataset(TimeSeriesBase):
48       """provides an interface for accessing a set of linearly ordered data."""
49       def __init__(self,buffer,offset=0,debug=False,description="TimeSeriesDataset"):
50           TimeSeriesBase.__init__(self,debug,description)
51           self.__buffer=buffer
52           self.__offset=offset
53           if self.debug(): print "Debug: %s: offset %d to buffer"%(self,self.getOffset())
54    
55       def __len__(self):
56           """needed to handle negative indexing in slicing"""
57           return 0
58    
59     def __str__(self):     def getNumComponents(self):
60         return "TimeSeriesFilter"         """returns the number of components of the data (may be overwritten by subclass)"""
61           return self.getBaseBuffer().getNumComponents()
    def getBufferOverlapNeededForUpdate(self):  
        return self.__left_required_extension  
62    
63     def update(self,times,values):     def getIdOfLastDatum(self):
64         pass        """returns the identification number of the last datum in the data set (may be overwritten by subclass)"""
65          return self.getBaseBuffer().getIdOfLastDatum()-self.getOffset()
66    
67       def getIdOfFirstDatum(self):
68          """returns the identification number of the first datum (may be overwritten by subclass)"""
69          return self.getBaseBuffer().getIdOfFirstDatum()-self.getOffset()
70    
71       def getIdOfFirstAvailableDatum(self):
72          """returns the identification number of the first avaiable datum (may be overwritten by subclass)"""
73          return self.getBaseBuffer().getIdOfFirstAvailableDatum()-self.getOffset()
74    
75       def getOffsetInBaseBuffer(self):
76          """returns the offset to access elements in getBaseBuffer() (may be overwritten by subclass)"""
77          return  self.getOffset()
78    
79       def getIdOfLastUnreferencedDatum(self):
80           """returns the identification number of the last datum which has been unused by all TimeSeries refering to the TimeSeriesBaseDataset (may be overwritten by subclass)"""
81           return self.getBaseBuffer().getIdOfLastUnreferencedDatum()-self.getOffset()
82    
83       def updateIdOfLastUnreferencedDatum(self,last_unreferenced_datum):
84           """updates the identification number of the last unused datum (to be overwritten by subclass)"""
85           self.getBaseBuffer().updateIdOfLastUnreferencedDatum(last_unreferenced_datum+self.getOffset())
86    
87       def append(self,values):
88           """appends data to the buffer. If the buffer would be full the buffer is rearranged before the data are appended  (to be overwritten by subclass)"""
89           self.getBaseBuffer().append(values)
90    
91       def getBaseBufferSize(self):
92           """returns the size of the buffer (to be overwritten by subclass)"""
93           return self.getBaseBuffer().getBaseBufferSize()
94      
95       def needsRearrangement(self,num_new_data=0):
96           """returns True if the buffer will be full after num_new_data have been appended (to be overwritten by subclass)"""
97           return self.getBaseBuffer().needsRearrangement(num_new_data)
98    
99       def isEmpty(self):
100          """returns true if no data are appeneded to buffer"""
101          return self.getNumData()<=0
102      
103       def getNumData(self):
104          """returns the number of data (not all of them are accessible)"""
105          return self.getIdOfLastDatum()-self.getIdOfFirstDatum()+1
106    
107       def getBaseBuffer(self):
108          """return the buffer referenced by the TimeSeriesBaseDataset"""
109          return self.__buffer
110    
111       def getOffset(self):
112          """return the offset when referring to dataset elements"""
113          return self.__offset
114    
115       def __getitem__(self,index):
116          """returns the datum index"""
117          if type(index)==SliceType:
118             start=index.start
119             end=index.stop
120             if start==end:
121                return self[start]
122             else:
123                 if start<self.getIdOfFirstDatum() or start>self.getIdOfLastDatum() or \
124                     end-1<self.getIdOfFirstDatum() or end-1>self.getIdOfLastDatum(): raise IndexError,"%s: Index [%d:%d] out of range"%(self,start,end)
125                 return self.getBaseBuffer()[start+self.getOffsetInBaseBuffer():end+self.getOffsetInBaseBuffer()]
126          else:
127             if index<self.getIdOfFirstDatum() or index>self.getIdOfLastDatum(): raise IndexError,"%s: Index %d out of range"%(self,index)
128             return self.getBaseBuffer()[index+self.getOffsetInBaseBuffer()]
129    
130  _DEFAULT_CACHE_SIZE=9  class TimeSeriesBaseBuffer(TimeSeriesBaseDataset):
131  _DEFAULT_BUFFER_SIZE=5     """An inplementation of TimeSeriesBaseDataset which actually is storing data into a numarray buffer"""
132  _FLOATING_TYPE=numarray.Float64     def __init__(self,buffer_size=DEFAULT_BUFFER_SIZE,numComponents=1,type=DEFAULT_FLOAT_TYPE,id_of_first_datum=0,debug=False,description="TimeSeriesBaseBuffer"):
   
 class TimeSeries(TimeSeriesBase):  
    def __init__(self,buffer_overlap=0,buffer_size=_DEFAULT_BUFFER_SIZE,cache_size=_DEFAULT_CACHE_SIZE,numComponents=1):  
        if buffer_size>cache_size: raise ValueError,"buffer size has to be less or equal cache size"  
        TimeSeriesBase.__init__(self)  
        self.__updates=list()  
        self.__max_buffer_overlap=0  
        self.__buffer_overlap=0  
        self.__numNodes=0  
        self.__numNodesInBuffer=0  
        self.__numNodesInCache=0  
        self.__firstNodeInBuffer=0  
        self.__firstNodeInCache=0  
        self.__buffer_size=buffer_size  
        self.__node_cache=numarray.zeros((cache_size,),_FLOATING_TYPE)  
        self.__attachment_cache=[]  
133         if numComponents<2:         if numComponents<2:
134            self.__value_cache=numarray.zeros((cache_size,),_FLOATING_TYPE)            buffer=numarray.zeros((buffer_size,),type)
135         else:         else:
136            self.__value_cache=numarray.zeros((cache_size,numComponents),_FLOATING_TYPE)            buffer=numarray.zeros((buffer_size,numComponents),type)
137         self.resizeMaxBufferOverlap(buffer_overlap)         TimeSeriesBaseDataset.__init__(self,buffer,id_of_first_datum-1,debug,description)
138           self.__num_data_in_buffer=0
139     def __del__(self):         self.__id_last_unreferenced_datum=id_of_first_datum-1
140         self.flush()         self.__id_last_datum=id_of_first_datum-1
141           self.__id_first_datum=id_of_first_datum
142           if self.debug(): print "Debug: %s : buffer of size %d with %d components allocated (first datum is %d)."% \
143                           (self,self.getBaseBufferSize(),self.getNumComponents(),id_of_first_datum)
144    
    def __str__(self):  
        return "TimeSeries"  
145    
146       def getBaseBufferSize(self):
147           """returns the size of the buffer"""
148           return self.getBaseBuffer().shape[0]
149      
150     def getNumComponents(self):     def getNumComponents(self):
151         if self.__value_cache.rank==1:         """returns the number of components of the data (overwrites TimeSeriesBaseDataset method)"""
152           if self.getBaseBuffer().rank==1:
153            return 1            return 1
154         else:         else:
155            self.__value_cache.shape[1]            self.getBaseBuffer().shape[1]
156    
157     def getNumNodes(self):     def getNumDataInBaseBuffer(self):
158         """returns the number of time nodes in the time series"""         """returns the number of data currently in the buffer"""
159         return self.__numNodes         return self.__num_data_in_buffer
160    
161     def getCacheSize(self):     def getIdOfLastDatum(self):
162         """returns the cache size"""        """returns the identification number of the last datum in the data set (overwrites method from TimeSeriesBaseDataset)"""
163         return self.__node_cache.shape[0]        return self.__id_last_datum
164    
165     def getBufferSize(self):     def getIdOfFirstDatum(self):
166         """returns the cache size"""        """returns the identification number of the first datum (overwrites method from TimeSeriesBaseDataset)"""
167         return self.__buffer_size        return self.__id_first_datum
168    
169     def getNumNodesInCache(self):     def getOffsetInBaseBuffer(self):
170         """returns the number of nodes in cache"""        """returns the offset to access elements in the buffer (overwrites method from TimeSeriesBaseDataset)"""  
171         return self.__numNodesInCache        return -self.getIdOfLastDatum()+self.getNumDataInBaseBuffer()-1  
172    
173     def getNumNodesInBuffer(self):     def getIdOfLastUnreferencedDatum(self):
174         """returns the number of nodes in cache"""         """returns the identification number of the last datum which has been unused by all TimeSeries refering to the TimeSeriesBaseDataset (overwrites method from TimeSeriesBaseDataset)"""
175         return self.__numNodesInBuffer         return self.__id_last_unreferenced_datum
176        
177     def getFirstNodeInCache(self):     def updateIdOfLastUnreferencedDatum(self,last_unreferenced_datum):
178         """returns the id number of the first node in the cache"""         """updates the identification number of the last unused datum (to be overwritten by subclass)"""
179         return self.__firstNodeInCache         self.getBaseBuffer().updateIdOfLastUnreferencedDatum(last_unreferenced_datum-self.getOffset())
180    
181     def getFirstNodeInBuffer(self):     def updateIdOfLastUnreferencedDatum(self,last_unreferenced_datum):
182         """returns the id number of the first node in the buffer"""         """updates the identification number of the last unused datum (overwrites TimeSeriesBaseDataset method)"""
183         return self.__firstNodeInBuffer         if self.__id_last_unreferenced_datum>last_unreferenced_datum:
184               self.__id_last_unreferenced_datum=last_unreferenced_datum
185     def getFirstNodeOfBufferInCache(self):             if self.debug(): print "Debug: %s: last unused datum is now %s"%(self,last_unreferenced_datum)
186         """returns the first location of the first node in the buffer relative to the cache"""  
187         return self.getFirstNodeInBuffer()-self.getFirstNodeInCache()     def needsRearrangement(self,num_new_data=0):
188           """returns True if the buffer will be full after num_new_data have been appended"""
189     def getBufferOverlap(self):         return self.getNumDataInBaseBuffer()+num_new_data>self.getBaseBufferSize()
190         """returns the current size of the left extension"""          
191         return self.__buffer_overlap     def getIdOfFirstAvailableDatum(self):
192          """returns the identification number of the first avaiable datum (overwrites TimeSeriesBaseDataset method)"""
193     def getMaxBufferOverlap(self):        return self.getIdOfLastDatum()-self.__num_data_in_buffer+1
194         """returns the maximum size of the left extension"""  
195         return self.__max_buffer_overlap     def append(self,data):
196          """appends data to the buffer. If the buffer would be full the buffer is rearranged before the data are appended (overwrites TimeSeriesBaseDataset method)"""
197     def resizeMaxBufferOverlap(self,new_buffer_overlap=0):        data=numarray.array(data)
198         if new_buffer_overlap>self.__max_buffer_overlap:        nc=self.getNumComponents()
199            if self.getNumNodes()>0: raise ValueError,"left extension can only be resized for empty time series"        if data.rank==0:
200            if self.getCacheSize()<self.getBufferSize()+new_buffer_overlap:          if nc==1:
201                 raise ValueError,"Cache size is too small! required cache size is %s"%self.getBufferSize()+new_buffer_overlap             num_new_data=1
202            self.__max_buffer_overlap=new_buffer_overlap          else:
203            if self.debug(): print "Debug: %s: left extension is increased to %d"%(self,new_buffer_overlap)             raise ValueError,"%s: illegal data shape"%self
204          elif data.rank==1:
205     def getLastNode(self):          if nc==1:
206         if self.getNumNodesInCache()>0:               num_new_data=data.shape[0]
207            return self.__node_cache[self.getNumNodesInCache()-1]          else:
208         else:               num_new_data=1  
209            return -1.e300        elif data.rank==2:
210            if not nc==data.shape[1]: raise ValueError,"%s: illegal data shape"%self
211     def getLastValue(self):          num_new_data=data.shape[0]
212         if self.getNumNodesInCache()>0:        else:
213            return self.__node_cache[self.getNumNodesInCache()-1]           raise ValueError,"%s: illegal rank"%self
        else:  
           raise ValueError,"No value available"  
214    
215     def checkInUpdate(self,time_series_filter):        # check is buffer will be overflown when data are appended:
216         """checks in a time_series_filter object to be updated when buffer is full"""        if self.needsRearrangement(num_new_data):
217         if self.getNumNodes()>0:          nn=self.getNumDataInBaseBuffer()
218            raise TypeError,"Check in of TimeSeries requires empty buffer."          num_protected_data=self.getIdOfLastDatum()-self.getIdOfLastUnreferencedDatum()
219         self.__updates.append(time_series_filter)          if num_protected_data+num_new_data>self.getBaseBufferSize():
220         self.resizeMaxBufferOverlap(time_series_filter.getBufferOverlapNeededForUpdate())                raise ValueError,"%s: buffer overflow: buffer size has to be bigger than %d"%(self,num_protected_data+num_new_data)
221         if self.debug(): print "Debug: %s: %s checked in successfully."%(self,time_series_filter)          if num_protected_data>0: self.getBaseBuffer()[0:num_protected_data]=self.getBaseBuffer()[nn-num_protected_data:nn]
222            self.__num_data_in_buffer=num_protected_data
223     def append(self,time_nodes,values,attachments=None):          self.__id_last_unreferenced_datum=self.__id_last_datum
224         """appends the time_nodes and values into the buffer"""          if self.debug():
225         num_additional_nodes=time_nodes.shape[0]               print "Debug: %s: rearrangement: first data in buffer is %d."%(self,self.getIdOfLastDatum()-self.getNumDataInBaseBuffer()+1)
226         if num_additional_nodes<1: return        # copy data over:
227         if self.debug():        nn=self.getNumDataInBaseBuffer()
228              if num_additional_nodes>1:        self.getBaseBuffer()[nn:nn+num_new_data]=data
229                 print "Debug: %s: values %d to %d are added to time series."%(self,self.getNumNodes(),self.getNumNodes()+num_additional_nodes-1)        self.__num_data_in_buffer+=num_new_data
230              else:        self.__id_last_datum+=num_new_data
231                 print "Debug: %s: value %d is added to time series."%(self,self.getNumNodes())        self.__id_last_unreferenced_datum+=num_new_data
232         if not num_additional_nodes==values.shape[0]:        if self.debug(): print "Debug: %s: %d data appended. Last unreferenced datum is now %d."%(self,num_new_data,self.__id_last_unreferenced_datum)
233            raise ValueError,"Number time nodes and number of values don't match."  
234         if self.getLastNode()>=time_nodes[0]:  # ======================================
235            raise ValueError,"first time node to be checked in is less than last previously checked in node"  class TimeSeriesControlerView(TimeSeriesBase):
236          """A TimeSeriesControlerView is attached to a Controler and moves forward in time by increasing the id of the last processed datum.
237         if num_additional_nodes>1:           Any implementation of a TimeSeriesControlerView must provide the getControler method which returns the controler"""
238              if min(time_nodes[1:num_additional_nodes]-time_nodes[0:num_additional_nodes-1])<=0:        def __init__(self,id_first_datum=0,debug=False,description="TimeSeries"):
239                raise ValueError,"time nodes have to be strictly increasing"          TimeSeriesBase.__init__(self,debug,description)
240            self.__id_last_processed_datum=id_first_datum-1
241            if self.debug(): print "Debug: %s  created with first datum %d"%(str(self),id_first_datum)
242    
243          def getIdOfLastProcessedDatum(self):
244              return self.__id_last_processed_datum
245    
246          def updateIdOfLastProcessedDatum(self,id_last_processed_datum):
247              self.__id_last_processed_datum=id_last_processed_datum
248    
249          # def getControler(self):
250          #      """returns the Controler of the time series (to be overwritten by subclass)"""
251          #      pass
252    
253    class TimeSeries(TimeSeriesBaseDataset,TimeSeriesControlerView):
254          """makes TimeSeriesBaseDataset look like a TimeSeries and introduces operations
255             Any implementation of a TimeSeriesControlerView must provide the getControler method which returns the controler"""
256          def __init__(self,dataset,debug=False,description="TimeSeries"):
257            TimeSeriesControlerView.__init__(self,dataset.getIdOfFirstDatum(),debug,description)
258            TimeSeriesBaseDataset.__init__(self,dataset,0,debug,description)
259                
260         # full cache requires a shift:        def getDataset(self):
261         if self.getNumNodesInCache()+num_additional_nodes>self.getCacheSize():            """returns the TimeSeriesBaseDataset of the time series"""
262             new_num_nodes_in_cache=self.getNumNodesInBuffer()+self.getBufferOverlap()            return self.getBaseBuffer()
263             if new_num_nodes_in_cache+num_additional_nodes>self.getCacheSize():  
264                raise ValueError,"Cache overflow: Expected size is bigger than %d"%(new_num_nodes_in_cache+num_additional_nodes)        # def getControler(self):
265             start=self.getNumNodesInCache()-new_num_nodes_in_cache        #      """returns the Controler of the time series (to be overwritten by subclass)"""
266             end=start+new_num_nodes_in_cache        #      pass
267             self.__node_cache[0:new_num_nodes_in_cache]=self.__node_cache[start:end]  
268             self.__value_cache[0:new_num_nodes_in_cache]=self.__value_cache[start:end]        def __add__(self,arg):
269             self.__attachment_cache[0:new_num_nodes_in_cache]=self.__attachment_cache[start:end]           if isinstance(arg,TimeSeriesBaseDataset):
270                return TimeSeriesAdd(self,arg)
271             self.__firstNodeInCache+=start           else:
272             self.__numNodesInCache=new_num_nodes_in_cache              return TimeSeriesAddScalar(self,arg)
273             if self.debug(): print "Debug: %s: %d values from %d onwards are moved to the beginning of the cache (first node in cache is now %d)."% \  
274                                                                                      (self,new_num_nodes_in_cache,start,self.__firstNodeInCache)        def __sub__(self,arg):
275                       return self+(-1.)*arg
276         # copy values into cache:  
277         if self.getNumNodesInCache()+num_additional_nodes>self.getCacheSize():        def __mul__(self,arg):
278             raise ValueError,"Cache overflow: Expected size is bigger than %d"%(self.getNumNodesInCache()+num_additional_nodes)           if isinstance(arg,TimeSeriesBaseDataset):
279         if self.debug():              return TimeSeriesMult(self,arg)
280             if num_additional_nodes>1:           else:
281                print "Debug: %s: values %d to %d of cache are updated"%(self,self.getNumNodesInCache(),self.getNumNodesInCache()+num_additional_nodes-1)              return TimeSeriesMultScalar(self,arg)
282             else:  
283                print "Debug: %s: value %d of cache is updated."%(self,self.getNumNodesInCache())        def __div__(self,arg):
284         self.__node_cache[self.getNumNodesInCache():self.getNumNodesInCache()+num_additional_nodes]=time_nodes           if isinstance(arg,TimeSeriesBaseDataset):
285         self.__value_cache[self.getNumNodesInCache():self.getNumNodesInCache()+num_additional_nodes]=values              return TimeSeriesDiv(self,arg)
286         self.__numNodes+=num_additional_nodes           else:
287         self.__numNodesInBuffer+=num_additional_nodes              return TimeSeriesMultScalar(self,1./arg)
288         self.__numNodesInCache+=num_additional_nodes  
289         print self.__node_cache        def __pow__(self,arg):
290         print self.__value_cache           if isinstance(arg,TimeSeriesBaseDataset):
291         # copy values into cache:              return TimeSeriesPower(self,arg)
292         if self.getNumNodesInBuffer()>=self.getBufferSize():           else:
293                if self.debug() and len(self.__updates)>0: print "Debug: %s: buffer is full. Updating process is started"%self              return TimeSeriesPowerScalar(self,arg)
294                self.processBuffer()        
295          def __radd__(self,arg):
296             return self.__add__(arg)
297    
298     def flush(self):        def __rsub__(self,arg):
299        self.processBuffer()           return arg+(-1.)*self
300    
301          def __rmul__(self,arg):
302             return self.__mul__(arg)
303    
304     def processBuffer(self):        def __rdiv__(self,arg):
305          if self.getNumNodesInBuffer()>0:           if isinstance(arg,TimeSeriesBaseDataset):
306             for i in self.__updates:              return TimeSeriesDiv(arg,self)
307               if self.debug(): print "Debug: %s: update for %s started"%(self,i)           else:
308               if i.getBufferOverlapNeededForUpdate()>self.getBufferOverlap():              return TimeSeriesDivScalar(self,arg)
309                  s=self.getFirstNodeOfBufferInCache()  
310                  l=self.getNumNodesInBuffer()        def __rpow__(self,arg):
311             if isinstance(arg,TimeSeriesBaseDataset):
312                return TimeSeriesPower(arg,self)
313             else:
314                return Exp(numarray.log(arg)*self)
315    
316          def __lshift__(self,arg):
317             return TimeSeriesShift(self,-arg)
318    
319          def __rshift__(self,arg):
320             return TimeSeriesShift(self,arg)
321    
322          def __neg__(self):
323             return (-1.0)*self
324    
325          def __pos__(self):
326             return (1.0)*self
327    
328    class TimeSeriesOperator(TimeSeriesControlerView):
329          """a TimeSeriesOperator decribes an operation acting on list of TimeSeries time_series_args. It allows to update its output (if there is any)
330             through the update method which is overwritten by a particular implementation of the class. The update method is called to process the data [start:end] using
331             [start-left_wing_size:end+right_wing_size] of its arguments"""
332          def __init__(self,controler,time_series_args=[],left_wing_size=0,right_wing_size=0,debug=False,description="TimeSeriesOperator"):
333              id_first_datum=controler.getIdOfFirstDatum()
334              for i in time_series_args: id_first_datum=max(id_first_datum,i.getIdOfFirstDatum())
335              TimeSeriesControlerView.__init__(self,id_first_datum+left_wing_size,debug,description)
336              self.__left_wing_size=left_wing_size
337              self.__right_wing_size=right_wing_size
338              self.__time_series_args=time_series_args
339              self.__controler=controler
340              controler.appendOperatorToUpdateList(self)
341              if self.debug(): print "Debug: %s: with left/right wing size %d/%d and %d arguments."%(str(self),left_wing_size,right_wing_size,len(time_series_args))
342    
343          def __del__(self):
344              self.getControler().removeOperatorFromUpdateList(self)
345    
346          def getControler(self):
347              """returns the Controler updating the TimeSeriesOperator"""
348              return self.__controler
349    
350          def getLeftWingSize(self):
351              """returns the left wing size"""  
352              return self.__left_wing_size
353    
354          def getRightWingSize(self):
355              """returns the right wing size"""
356              return self.__right_wing_size
357    
358          def getArguments(self,index=None):
359              """returns the list of arguments or, index is present, the argument with index index. In the latter case None is returned if no arguments are present"""
360              if index==None:
361                 return self.__time_series_args
362              else:
363                 if len(self.__time_series_args)>0:
364                    return self.__time_series_args[index]
365               else:               else:
366                  s=self.getFirstNodeOfBufferInCache()-i.getBufferOverlapNeededForUpdate()                  return None
                 l=self.getNumNodesInBuffer()+i.getBufferOverlapNeededForUpdate()  
              i.update(self.__node_cache[s:s+l],self.__value_cache[s:s+l])  
            self.__firstNodeInBuffer+=self.__numNodesInBuffer  
            self.__numNodesInBuffer=0  
         self.__buffer_overlap=self.getMaxBufferOverlap()  
         if self.debug(): print "Debug: %s: first node in buffer is now %d"%(self,self.__firstNodeInBuffer)  
367    
368            def getArgumentDataset(self,index):
369              """returns the dataset of in the argument with index index"""
370              arg=self.getArguments(index)
371              if arg==None:
372                 return None
373              else:
374                  return self.getArguments(index).getDataset()
375    
376          def flush(self):
377              """calls the update method with all the maximum processable range. It also updates the id of unused datum for all arguments"""
378              start=self.getIdOfLastProcessedDatum()+1
379              end=self.getControler().getIdOfLastDatum()
380              for i in self.getArguments(): end=min(end,i.getIdOfLastDatum())
381              if start<=end-self.getRightWingSize():
382                 if self.debug(): print "Debug: %s: range [%d:%d] is updated."%(self,start,end-self.getRightWingSize())
383                 self.update(start,end-self.getRightWingSize()+1)      
384                 for i in self.getArguments(): i.updateIdOfLastUnreferencedDatum(end-self.getLeftWingSize())
385                 self.updateIdOfLastProcessedDatum(end)
386    
387          def update(self,start,end):
388              """updates the the data [start:end] using [start-left_wing_size:end+right_wing_size] of its arguments (is overwritten by a particular TimeSeriesOperator)"""
389              pass
390    
391    
392    class TimeSeriesFilter(TimeSeries,TimeSeriesOperator):
393          """a TimeSeriesFilter is a TimeSeries taht is created trough a TimeSeriesOperator"""
394          def __init__(self,controler,dataset,time_series_args=[],left_wing_size=0,right_wing_size=0,debug=False,description="TimeSeriesFilter"):
395             TimeSeriesOperator.__init__(self,controler,time_series_args,left_wing_size,right_wing_size,debug,description)
396             TimeSeries.__init__(self,dataset,debug,description)
397    
398          def update(self,start,end):
399              """appends zeros to the dataset. This method should be overwritten by a particular TimeSeriesFilter"""
400              nc=self.getNumComponents()
401              if nc>1:
402                 self.getDataset().append(numarray.zeros([nc,end-start]))
403              else:
404                 self.getDataset().append(numarray.zeros(end-start))
405    
406    class Controler(TimeSeries):
407       """controls a set of TimeSeries"""
408       def __init__(self,buffer_size=DEFAULT_BUFFER_SIZE,debug=False,description="TimeSeriesControler"):
409            TimeSeries.__init__(self,TimeSeriesBaseBuffer(buffer_size,1,DEFAULT_FLOAT_TYPE,0,debug,"node buffer of "+description),debug,"nodes of "+description)
410            self.setFlushRate()  
411            self.__update_time_series=list()
412          
413       def __del__(self):
414           self.flush()
415    
416       def getControler(self):
417           """returns the Controler of the time series (overwrites method of by TimeSeries)"""
418           return self
419    
420       def setFlushRate(self,rate=50):
421           """set the flush rate, i.e. after rate new time nodes have been checked in the flush method is called."""
422           self.__flush_rate=rate
423           if self.debug(): print "Debug: %s: flush rate is set to %d"%(self,rate)
424    
425       def needsFlushing(self):
426          """returns true if the depending TimeSeriesFilters needs to be flushed becuase the time nodes buffer is full or because of the set flush rate"""
427          return self.needsRearrangement(1) or (self.getNumData()+1)%self.__flush_rate==0
428    
429       def flush(self):
430           """flushes all dependend TimeSeriesFilters by processing their flush method"""
431           if self.debug(): print "Debug: %s: start flushing"%self
432           for time_serie in self.__update_time_series: time_serie.flush()
433    
434       def appendOperatorToUpdateList(self,time_serie):
435           if not time_serie.getControler()==self: raise ValueError,"%s: TimeSeries %s is not defined on this controler."%(self,time_serie)
436           if not self.isEmpty(): raise ValueError,"%s: you can only check in a time series time_serie is controler is empty."%self
437           self.__update_time_series.append(time_serie)
438           if self.debug(): print "Debug: %s: %s has been added to update list."%(self,time_serie)
439    
440       def removeOperatorFromUpdateList(self,time_serie):
441           self.__update_time_series.remove(time_serie)
442           if self.debug(): print "Debug: %s: %s has been removed from update list."%(self,time_serie)
443    
444       def nextTime(self,value):
445           if self.needsFlushing(): self.flush()
446           self.getDataset().append(value)
447           if self.debug(): print "Debug: %s: new time node %e has been added."%(self,value)
448    
449    class TimeSeriesShift(TimeSeries):
450          """creates a shift of the time series, i.e. if d[n] is the datum at time t[n], the value at t[n] becomes v[n+shift] on the output"""
451          def __init__(self,time_serie,shift=1):
452              if shift<0:
453                  dsc="(%s)<<%d"%(time_serie,-shift)
454              else:
455                  dsc="(%s)>>%d"%(time_serie,shift)
456              self.__controler=time_serie.getControler()
457              TimeSeries.__init__(self,TimeSeriesBaseDataset(time_serie.getDataset(),-shift,time_serie.debug(),"buffer view to "+dsc),time_serie.debug(),dsc)
458    
459          def getControler(self):
460              return self.__controler
461    
462    class TimeSeriesAdd(TimeSeriesFilter):
463          """adds two TimeSeries"""
464          def __init__(self,time_serie_1,time_serie_2):
465              dsc="(%s)+(%s)"%(time_serie_1,time_serie_2)
466              dbg=time_serie_1.debug() or time_serie_2.debug()
467              cntrl=time_serie_1.getControler()
468              if not cntrl==time_serie_2.getControler():
469                      raise ValueError("TimeSeriesAdd: %s and %s have different controler."%(time_serie_1,time_serie_2))
470              id_first_datum=max(time_serie_1.getIdOfFirstDatum(),time_serie_2.getIdOfFirstDatum())
471              TimeSeriesFilter.__init__(self,cntrl, \
472                                  TimeSeriesBaseBuffer(cntrl.getBaseBufferSize(),time_serie_1.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \
473                                  [time_serie_1,time_serie_2],0,0,dbg,dsc)
474    
475          def update(self,start,end):
476              self.append(self.getArgumentDataset(0)[start:end]+self.getArgumentDataset(1)[start:end])
477    
478    class TimeSeriesAddScalar(TimeSeriesFilter):
479          """adds a single value to a TimeSeries"""
480          def __init__(self,time_serie,scalar):
481              dsc="(%s)+(%s)"%(time_serie,scalar)
482              dbg=time_serie.debug()
483              cntrl=time_serie.getControler()
484              id_first_datum=time_serie.getIdOfFirstDatum()
485              TimeSeriesFilter.__init__(self,cntrl, \
486                           TimeSeriesBaseBuffer(cntrl.getBaseBufferSize(),time_serie.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \
487                           [time_serie],0,0,dbg,dsc)
488              self.__scalar=scalar
489    
490          def update(self,start,end):
491              self.append(self.getArgumentDataset(0)[start:end]+self.__scalar)
492    
493    class TimeSeriesMult(TimeSeriesFilter):
494          """multiplies two TimeSeries"""
495          def __init__(self,time_serie_1,time_serie_2):
496              dsc="(%s)*(%s)"%(time_serie_1,time_serie_2)
497              dbg=time_serie_1.debug() or time_serie_2.debug()
498              cntrl=time_serie_1.getControler()
499              if not cntrl==time_serie_2.getControler():
500                      raise ValueError("TimeSeriesMult: %s and %s have different controler."%(time_serie_1,time_serie_2))
501              id_first_datum=max(time_serie_1.getIdOfFirstDatum(),time_serie_2.getIdOfFirstDatum())
502              TimeSeriesFilter.__init__(self,cntrl, \
503                       TimeSeriesBaseBuffer(cntrl.getBaseBufferSize(),time_serie_1.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \
504                       [time_serie_1,time_serie_2],0,0,dbg,dsc)
505    
506          def update(self,start,end):
507              self.append(self.getArgumentDataset(0)[start:end]*self.getArgumentDataset(1)[start:end])
508    
509    class TimeSeriesMultScalar(TimeSeriesFilter):
510          """multiplies a TimeSeries with a single value"""
511          def __init__(self,time_serie,scalar):
512              dsc="(%s)*%s"%(time_serie,scalar)
513              dbg=time_serie.debug()
514              cntrl=time_serie.getControler()
515              id_first_datum=time_serie.getIdOfFirstDatum()
516              TimeSeriesFilter.__init__(self,cntrl, \
517                           TimeSeriesBaseBuffer(cntrl.getBaseBufferSize(),time_serie.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \
518                           [time_serie],0,0,dbg,dsc)
519              self.__scalar=scalar
520    
521          def update(self,start,end):
522              self.append(self.getArgumentDataset(0)[start:end]*self.__scalar)
523    
524    class TimeSeriesDiv(TimeSeriesFilter):
525          """divides two TimeSeries"""
526          def __init__(self,time_serie_1,time_serie_2):
527              dsc="(%s)/(%s)"%(time_serie_1,time_serie_2)
528              dbg=time_serie_1.debug() or time_serie_2.debug()
529              cntrl=time_serie_1.getControler()
530              if not cntrl==time_serie_2.getControler():
531                      raise ValueError("TimeSeriesDiv: %s and %s have different controler."%(time_serie_1,time_serie_2))
532              id_first_datum=max(time_serie_1.getIdOfFirstDatum(),time_serie_2.getIdOfFirstDatum())
533              TimeSeriesFilter.__init__(self,cntrl, \
534                         TimeSeriesBaseBuffer(cntrl.getBaseBufferSize(),time_serie_1.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \
535                         [time_serie_1,time_serie_2],0,0,dbg,dsc)
536    
537          def update(self,start,end):
538              self.append(self.getArgumentDataset(0)[start:end]/self.getArgumentDataset(1)[start:end])
539    
540    class TimeSeriesDivScalar(TimeSeriesFilter):
541          """divides a scalar be a TimeSerie"""
542          def __init__(self,time_serie,scalar):
543              dsc="(%s)/(%s)"%(scalar,time_serie)
544              dbg=time_serie.debug()
545              cntrl=time_serie.getControler()
546              id_first_datum=time_serie.getIdOfFirstDatum()
547              TimeSeriesFilter.__init__(self,cntrl, \
548                           TimeSeriesBaseBuffer(cntrl.getBaseBufferSize(),time_serie.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \
549                           [time_serie],0,0,dbg,dsc)
550              self.__scalar=scalar
551    
552          def update(self,start,end):
553              self.append(self.__scalar/self.getArgumentDataset(0)[start:end])
554    
555    class TimeSeriesPower(TimeSeriesFilter):
556          """raise one TimeSeries to the power of an other TimeSeries"""
557          def __init__(self,time_serie_1,time_serie_2):
558              dsc="(%s)**(%s)"%(time_serie_1,time_serie_2)
559              dbg=time_serie_1.debug() or time_serie_2.debug()
560              cntrl=time_serie_1.getControler()
561              if not cntrl==time_serie_2.getControler():
562                      raise ValueError("TimeSeriesPower: %s and %s have different controler."%(time_serie_1,time_serie_2))
563              id_first_datum=max(time_serie_1.getIdOfFirstDatum(),time_serie_2.getIdOfFirstDatum())
564              TimeSeriesFilter.__init__(self,cntrl, \
565                    TimeSeriesBaseBuffer(cntrl.getBaseBufferSize(),time_serie_1.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \
566                    [time_serie_1,time_serie_2],0,0,dbg,dsc)
567    
568          def update(self,start,end):
569              self.append(self.getArgumentDataset(0)[start:end]**self.getArgumentDataset(1)[start:end])
570    
571    class TimeSeriesPowerScalar(TimeSeriesFilter):
572          """raises a TimeSerie to the power of a scalar"""
573          def __init__(self,time_serie,scalar):
574              dsc="(%s)**(%s)"%(time_serie,scalar)
575              dbg=time_serie.debug()
576              cntrl=time_serie.getControler()
577              id_first_datum=time_serie.getIdOfFirstDatum()
578              TimeSeriesFilter.__init__(self,cntrl, \
579                           TimeSeriesBaseBuffer(cntrl.getBaseBufferSize(),time_serie.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \
580                           [time_serie],0,0,dbg,dsc)
581              self.__scalar=scalar
582    
583          def update(self,start,end):
584              self.append(self.getArgumentDataset(0)[start:end]**self.__scalar)
585    
586    class Exp(TimeSeriesFilter):
587          """"""
588          def __init__(self,time_serie):
589              dsc="exp(%s)"%(time_serie)
590              dbg=time_serie.debug()
591              cntrl=time_serie.getControler()
592              id_first_datum=time_serie.getIdOfFirstDatum()
593              TimeSeriesFilter.__init__(self,cntrl, \
594                         TimeSeriesBaseBuffer(cntrl.getBaseBufferSize(),time_serie.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \
595                         [time_serie],0,0,dbg,dsc)
596    
597          def update(self,start,end):
598              self.append(numarray.exp(self.getArgumentDataset(0)[start:end]))
599    
600    class Writer(TimeSeriesOperator):
601          """writes the time series into an output strim ostream which mast have the writeline method. The values are seperated by the string seperator."""
602          def __init__(self,time_serie,ostream,seperator=",",commend_tag="#"):
603             dsc="write %s to %s"%(time_serie,ostream)
604             dbg=time_serie.debug()
605             cntrl=time_serie.getControler()
606             self.__ostream=ostream
607             self.__seperator=seperator
608             TimeSeriesOperator.__init__(self,cntrl,[time_serie],0,0,dbg,dsc)
609             ostream.writelines("%s time series %s\n"%(commend_tag,str(self)))
610    
611  class TimeSeriesCollector(TimeSeries):        def update(self,start,end):
612        """TimeSeriesCollector collects values at time nodes"""           cntrl=self.getControler()
613        def __init__(self):           arg=self.getArguments(0)
614           TimeSeries.__init__(self)           n=arg.getNumComponents()
615             if n<2:
616        def __str__(self):              for i in range(start,end): self.__ostream.writelines("%s%s%s\n"%(cntrl[i],self.__seperator,arg[i]))
617           return "TimeSeriesCollector"           else:
618                for i in range(start,end):
619        def add(self,time_mark,value):                 l="%s"%cntrl[i]
620             """adds the value at time time_mark to the time series"""                 for j in range(n): l=l+"%s%s"(self.__seperator,arg[i][j])
621             self.append(numarray.array([time_mark]),numarray.array([value]))                 self.__ostream.writelines("%s\n"%l)
622    
623        def read(self,istream,seperator=","):  class DataCatcher(TimeSeries):
624          """reads pairs from iostream istream"""        """collects data into a time series."""
625          for l in istream:        def __init__(self,controler,numComponents=1,description="DataCatcher"):
626             d=l.strip().split(seperator)           self.__controler=controler
627             self.add(float(d[0]),float(d[1]))           dbg=controler.debug()
628             TimeSeries.__init__(self,TimeSeriesBaseBuffer(controler.getBaseBufferSize(),numComponents,DEFAULT_FLOAT_TYPE,controler.getIdOfFirstDatum(),dbg,"buffer for "+description),dbg,description)
629  class TimeSeriesIntegrator(TimeSeries,TimeSeriesFilter):  
630        def __init__(self,time_series):        def getControler(self):
631           TimeSeriesFilter.__init__(self,1)            return self.__controler
632           TimeSeries.__init__(self,buffer_size=time_series.getBufferSize(),cache_size=time_series.getCacheSize(), \  
633                                                                           numComponents=time_series.getNumComponents())        def nextValue(self,value):
634           self.setDebug(time_series.debug())            """append a value to the time series"""
635           time_series.checkInUpdate(self)            id_last=self.getIdOfLastDatum()
636           self.__integral=0            id_current=self.getControler().getIdOfLastDatum()
637              if id_last+1==id_current:
638        def __str__(self):               self.getDataset().append(value)
639           return "TimeSeriesIntegrator"            elif id_last+1<id_current:
640                   if self.isEmpty():
641        def update(self,times,values):                     self.getDataset().append(value)
642            l=times.shape[0]                     id_last+=1
643            self.append(times[1:l],(values[0:l-1]+values[1:l])/2.*(times[1:l]-times[0:l-1]))                 t_last=self.getControler()[id_last]
644                   t_current=self.getControler()[id_current]
645                   value_last=self[id_last]
646                   out=(value_last-value)/(t_last-t_current)*(self.getControler()[id_last+1:id_current+1]-t_current)+value
647                   self.getDataset().append(out)
648              else :
649                 raise ValueError,"%s: a new time node must be introduced before a new value can be added."
650              self.updateIdOfLastUnreferencedDatum(id_last)
651              
652      
653    class TimeSeriesCumulativeSum(TimeSeriesFilter):
654          """cummulative sum of the time series values"""
655          def __init__(self,time_serie):
656             dsc="cumsum(%s)"%(time_serie)
657             dbg=time_serie.debug()
658             cntrl=time_serie.getControler()
659             id_first_datum=time_serie.getIdOfFirstDatum()
660             TimeSeriesFilter.__init__(self,cntrl, \
661                         TimeSeriesBaseBuffer(cntrl.getBaseBufferSize(),time_serie.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \
662                         [time_serie],0,0,dbg,dsc)
663             self.__last_value=0
664    
665          def update(self,start,end):
666              out=numarray.cumsum(self.getArgumentDataset(0)[start:end])+self.__last_value
667              self.__last_value=out[end-start-1]
668              self.append(out)
669                    
670    
671  class TimeSeriesDifferential(TimeSeries,TimeSeriesFilter):  class Reader(TimeSeriesBase):
672        def __init__(self,time_series):        """reads a list of input streams and creates a time series for each input stream but on the same Controler where the first column
673           TimeSeriesFilter.__init__(self,1)           is used to create the time nodes"""
674           TimeSeries.__init__(self,buffer_size=time_series.getBufferSize(),cache_size=time_series.getCacheSize(), \        def __init__(self,list_of_istreams,buffer_size=DEFAULT_BUFFER_SIZE,seperator=",",commend_tag="#",debug=False):
675                                                                           numComponents=time_series.getNumComponents())           TimeSeriesBase.__init__(self,debug=debug,description="reader")
676           self.setDebug(time_series.debug())           if not isinstance(list_of_istreams,list):
677           time_series.checkInUpdate(self)                self.__list_of_istreams=[list_of_istreams]
678             else:
679        def __str__(self):                self.__list_of_istreams=list_of_istreams
680           return "TimeSeriesDifferential"           self.__cntrl=Controler(buffer_size,debug,"reader controler")
   
       def update(self,times,values):  
           l=times.shape[0]  
           self.append((times[0:l-1]+times[1:l])/2,(values[0:l-1]-values[1:l])/(times[0:l-1]-times[1:l]))  
   
 class TimeSeriesViewer(TimeSeriesFilter):  
       def __init__(self,time_series):  
          TimeSeriesFilter.__init__(self,0)  
          time_series.checkInUpdate(self)  
   
       def __str__(self):  
          return "TimeSeriesViewer"  
   
       def update(self,times,values):  
           for i in range(times.shape[0]): print "[%s: %s]"%(times[i],values[i])  
   
 class TimeSeriesWriter(TimeSeriesFilter):  
       def __init__(self,time_series,ostream,seperator=","):  
          TimeSeriesFilter.__init__(self,0)  
          time_series.checkInUpdate(self)  
          self.setDebug(time_series.debug())  
          self.__ostream=ostream  
681           self.__seperator=seperator           self.__seperator=seperator
682             self.__commend_tag=commend_tag
683             self.__time_series={}
684             self.__t={}
685             self.__v={}
686             # set up the time series:
687             for i in self.__list_of_istreams:
688               line=self.__commend_tag
689               while  not line=="" and line[0]==self.__commend_tag:
690                   line=i.readline().strip()
691               if line=="":
692                  list_of_istreams.remove(i)
693               else:
694                  d=line.split(self.__seperator)
695                  self.__t[i]=float(d[0])
696                  tmp=[]
697                  for j in d[1:]: tmp.append(float(j))
698                  self.__v[i]=numarray.array(tmp)
699                  self.__time_series[i]=DataCatcher(self.__cntrl,len(d)-1,str(i))
700    
701             #
702          def run(self):
703             while len(self.__list_of_istreams)>0:
704                if len(self.__time_series)>0:
705                   # find list all times with minumum time node:
706                   tminargs=[]
707                   for i in self.__time_series:
708                       if len(tminargs)==0:
709                           tminargs.append(i)
710                       elif abs(t[tminargs[0]]-self.__t[i])<1.e-8*abs(self.__t[i]):
711                           tminargs.append(i)
712                       elif self.__t[i]<t[tminargs[0]]:
713                           tminargs=[i]
714                   # find list all times with minumum time node:
715                   self.__cntrl.nextTime(self.__t[tminargs[0]])
716                   for i in tminargs:
717                       self.__time_series[i].nextValue(self.__v[i])
718                       # find next line without leading "#"
719                       line="#"
720                       while not line=="" and line[0]==self.__commend_tag:
721                           line=i.readline().strip()
722                       # if eof reached iostream is removed for searching
723                       if line=="":
724                          self.__list_of_istreams.remove(i)
725                       else:
726                          d=line.split(self.__seperator)
727                          self.__t[i]=float(d[0])
728                          tmp=[]
729                          for j in d[1:]: tmp.append(float(j))
730                          self.__v[i]=numarray.array(tmp)
731    
732          def getControler(self):
733             """returns the controler shared by all time series created through the input streams"""
734             return self.__cntrl
735    
736          def getTimeSeries(self,istream=None):
737             """returns the time series as a tuple. If istream is present its time series is returned"""
738             if istream==None:
739                out=self.__time_series.values()
740                if len(out)>1:
741                   return tuple(out)
742                elif len(out)>0:
743                   return out[0]
744                else:
745                   return None
746             else:
747                return self.__time_series[istream]
748    
749    
750    class Plotter(TimeSeriesOperator):
751        def __init__(self,time_series,window_size=DEFAULT_BUFFER_SIZE/4,file_name=None,format=None):
752             if isinstance(time_series,list):
753                 dbg=time_series[0].getControler().debug()
754                 text=""
755                 for i in time_series:
756                   if len(text)==0:
757                      text=str(i)
758                   else:
759                      text=text+","+str(i)
760                 TimeSeriesOperator.__init__(self,time_series[0].getControler(),time_series,window_size,0,dbg,"plot(%s)"%text)
761             else:
762                 dbg=time_series.getControler().debug()
763                 text=str(time_series)
764                 TimeSeriesOperator.__init__(self,time_series.getControler(),[time_series],window_size,0,dbg,"plot(%s)"%text)
765             from pyvisi.renderers.gnuplot import LinePlot,Scene,PsImage
766             self.__renderer=Scene()
767             self.__line_plot=LinePlot(self.__renderer)
768             self.__line_plot.setTitle(text)
769             self.__line_plot.setLineStyle("lines")
770             self.__line_plot.setXLabel("time")
771             self.__line_plot.setYLabel("values")
772             self.__file_name=file_name
773             if format==None:
774                 self.__format=PsImage()
775             else:
776                 self.__format=format
777             self.__window_size=window_size
778    
779        def update(self,start,end):
780             s=max(end-self.__window_size,self.getControler().getIdOfFirstAvailableDatum())
781             args=[self.getControler()[s:end]]
782             for arg in self.getArguments(): args.append(arg[s:end])
783             self.__line_plot.setData(*args)
784             self.__line_plot.render()
785             if self.__file_name==None:
786                 raise SystemError,"Online viewing is not avilabel yet!"
787             else:
788                 self.__renderer.save(fname=self.__file_name, format=self.__format)
789                
790    
791    def viewer(time_serie,seperator=","):
792          """creates a viewer for a time series"""
793          import sys
794          return Writer(time_serie,sys.stdout,seperator)
795    
796        def __str__(self):  def differential(time_serie):
797           return "TimeSeriesWriter"        """calculates the derivative Dv of the time series v:
798            
799                Dv[n]=(v[n]-v[n-1])/(t[n]-t[n-1])
800    
801        def update(self,times,values):        """
802          for i in range(times.shape[0]): self.__ostream.writelines("%s,%s\n"%(times[i],values[i]))        out=(((time_serie<<1)-time_serie)/((time_serie.getControler()<<1)-time_serie.getControler())+ \
803               ((time_serie>>1)-time_serie)/((time_serie.getControler()>>1)-time_serie.getControler()))/2.
804          out.setDescription("d(%s)/dt"%str(time_serie))
805          out.setDebug(time_serie.debug())
806          return out
807    
808    def integral(time_serie):
809          """calculates the intagral Iv of the time series v using the trapozidal rule:
810            
811                Iv[n]=int_{t_0}^{t_n} v ~ sum_{0<i<=n} n (v[i]+v[i-1])/2*(t[i]-t[i-1])
812    
813          """
814          out=TimeSeriesCumulativeSum(((time_serie>>1)+time_serie)/2.*(time_serie.getControler()-(time_serie.getControler()>>1)))
815          out.setDescription("I (%s) dt"%str(time_serie))
816          out.setDebug(time_serie.debug())
817          return out
818    
819    def smooth(time_serie,range=5):
820         """smoothes a time series using the at each time the previous and next range values"""
821         i=integral(time_serie)
822         out=((i>>range)-(i<<range))/((time_serie.getControler()>>range)-(time_serie.getControler()<<range))
823         out.setDescription("smooth(%s,-%d:%d) dt"%(str(time_serie),range,range))
824         out.setDebug(time_serie.debug())
825         return out
826    
827    def leakySmooth(time_serie,l=0.99):
828         """leaky smoother: s(t)=int_{t_0}^{t} v(r) l^{t-r} dr/ int_{t_0}^{t} l^{t-r} dr """
829         w=l**(-time_serie.getControler())
830         out=integrate(time_serie*w)/integrate(w)
831         out.setDescription("leaky smoother(%s)"%str(time_serie))
832         return out
833    
834  # test  # test
835    
836  if __name__=="__main__":  if __name__=="__main__":
837       # tests the interfaces to data sets:
838       print "Test of Datasets:"
839       print "================="
840       bf=TimeSeriesBaseBuffer(buffer_size=5,numComponents=1,debug=True,description="TestBaseBuffer")
841       bfv_l=TimeSeriesBaseDataset(bf,offset=1,debug=True,description="offset 1")
842       bfv_r=TimeSeriesBaseDataset(bf,offset=-1,debug=True,description="offset -1")
843       bf.append([1.,2.,3.,4.])
844       print "should be all 2. :",bfv_l[0]
845       print bf[1]
846       print bfv_r[2]
847       bf.append([5.,6.,7.])
848       print "should be all 5. :",bfv_l[3],bf[4],bfv_r[5]
849       print "should be all 6. :",bfv_l[4],bf[5],bfv_r[6]
850       print "should be all 7. :",bfv_l[5],bf[6],bfv_r[7]
851       print "should be all [6., 7.] :",bfv_l[4:6],bf[5:7],bfv_r[6:8]
852    
853       print "Test of Controler"
854       print "================="
855       b=Controler(buffer_size=15,debug=True)
856       s3=b>>3
857       s1=b>>1
858       s_3=b<<3
859       print s_3
860       print b
861       print b+s3
862       sum=(s_3+b)+(b+s3)
863      
864       for i in range(30):
865           b.nextTime(i*1.)
866       b.flush()
867       print "should be all 28. :",s_3.getDataset()[25],b.getDataset()[28],s3.getDataset()[31]
868       print "should be all 29. :",s_3.getDataset()[26],b.getDataset()[29],s3.getDataset()[32]
869       print "should be all 96. :",sum.getDataset()[24]
870      
871       print "Test of operators"
872       print "================="
873       b=Controler(buffer_size=15,debug=True)
874       b.setFlushRate(2)
875       q=DataCatcher(b)
876       b1=b<<1
877       a=b+b1
878       a_s=b1+1.
879       s_a=1.+b1
880       d=b-b1
881       d_s=b1-1.
882       s_d=1.-b1
883       m=b*b1
884       m_s=b1*2.
885       s_m=2.*b1
886       dv=b/b1
887       dv_s=b1/2.
888       s_dv=2./b1
889       p=b**b1
890       p_s=b1**2.
891       s_p=2.**b1
892       pb=+b
893       mb=-b
894       sum=TimeSeriesCumulativeSum(b)
895       diff=differential(b)
896       smt=smooth(b,2)
897       int=integral(b*2)
898       fl=file("/tmp/test.csv","w")
899       w=Writer(q,fl)
900       v=viewer(q)
901       plo=Plotter([a,a_s],window_size=4,file_name="s.ps")
902       for i in range(30):
903           b.nextTime(i*1.)
904           if i%2==1: q.nextValue(i*28.)
905       b.flush()
906       print "a[28] should be %e: %e"%(28.+29.,a[28])
907       print "a_s[28] should be %e: %e"%(29.+1.,a_s[28])
908       print "s_a[28] should be %e: %e"%(29.+1.,s_a[28])
909       print "d[28] should be %e: %e"%(28.-29.,d[28])
910       print "d_s[28] should %e: %e"%(29.-1.,d_s[28])
911       print "s_d[28] should %e: %e"%(1.-29.,s_d[28])
912       print "m[28] should be %e: %e"%(28.*29.,m[28])
913       print "m_s[28] should be %e: %e"%(29.*2.,m_s[28])
914       print "s_m[28] should be %e: %e"%(29.*2.,s_m[28])
915       print "dv[28] should be %e: %e"%(28./29.,dv[28])
916       print "dv_s[28] should be %e: %e"%(29./2.,dv_s[28])
917       print "s_dv[28] should be %e: %e"%(2./29.,s_dv[28])
918       print "p[28] should be %e: %e"%(28.**29.,p[28])
919       print "p_s[28] should be %e: %e"%(29.**2,p_s[28])
920       print "s_p[28] should be %e: %e"%(2.**29.,s_p[28])
921       print "pb[28] should be %e: %e"%(28.,pb[28])
922       print "mb[28] should be %e: %e"%(-28.,mb[28])
923       print "sum[28] should be %e: %e"%(28*29./2,sum[28])
924       print "diff[28] should be %e: %e"%(1.,diff[28])
925       print "smt[27] should be %e: %e"%(27.,smt[27])
926       print "int[28] should be %e: %e"%(28.**2,int[28])
927       print "q[27] should be %e: %e"%(27*28.,q[27])
928       print "q[28] should be %e: %e"%(28*28.,q[28])
929       print "q[29] should be %e: %e"%(29*28.,q[29])
930       fl.flush()
931      
932       rin=Reader(file("/tmp/test.csv","r+"),buffer_size=15,debug=True)
933       rin.run()
934       inp=rin.getTimeSeries()
935       print "inp[27] should be %e: %e"%(27*28.,inp[27])
936       print "inp[28] should be %e: %e"%(28*28.,inp[28])
937       print "inp[29] should be %e: %e"%(29*28.,inp[29])
938    
    c=TimeSeriesCollector()  
    c.setDebugOn()  
    ii=TimeSeriesIntegrator(c)  
    d=TimeSeriesDifferential(c)  
    v=TimeSeriesViewer(ii)  
    w=TimeSeriesWriter(d,file("test.csv","w"))  
   
    for i in range(15):  
       c.add(i*1.,i+1.)  

Legend:
Removed from v.110  
changed lines
  Added in v.614

  ViewVC Help
Powered by ViewVC 1.1.26