/[escript]/trunk/esys2/escript/py_src/timeseries.py
ViewVC logotype

Diff of /trunk/esys2/escript/py_src/timeseries.py

Parent Directory Parent Directory | Revision Log Revision Log | View Patch Patch

revision 110 by jgs, Mon Feb 14 04:14:42 2005 UTC revision 117 by jgs, Fri Apr 1 05:48:57 2005 UTC
# Line 1  Line 1 
1  # $Id$  # $Id$
2    
3  import numarray  import numarray
4    from types import SliceType
5    DEFAULT_BUFFER_SIZE=9
6    DEFAULT_FLOAT_TYPE=numarray.Float64
7    
8  class TimeSeriesBase:  class TimeSeriesBase:
9     """The TimeSeriesBase class is the base class for all class of the TimeSeries module.     """The TimeSeriesBase class is the base class for all class of the TimeSeries module."""
       It takes care of the updating depending TimeSeriesBase objects and the debuging mechnism"""  
10    
11     def __init__(self):     def __init__(self,debug=False,description="timeseries.Base"):
12         self.__debug=False         self.__debug=debug
13           self.__description=description
14    
15     def __str__(self):     def __str__(self):
16         return "TimeSeriesBase"         return self.__description
17    
18     def setDebugOn(self):     def setDebugOn(self):
19        """switch on degugging mode"""        """switch on degugging mode"""
# Line 30  class TimeSeriesBase: Line 33  class TimeSeriesBase:
33     def debug(self):     def debug(self):
34        """returns true if debug mode is on"""        """returns true if debug mode is on"""
35        return self.__debug        return self.__debug
         
 class TimeSeriesFilter(TimeSeriesBase):  
    """TimeSeriesFilter objects are applied to TimeSeries objects to filer out information or to convert it.  
       A TimeSeriesFilter objects is called by the TimeSeries object it is depending on to consider the values currently in the buffer for  
       updating. Some TimeSeriesFilter may require values outside the buffer. The TimeSeries object maintains the last buffer_overlap values  
       in the buffer so they can be used to process (not neccesarily all) value in the buffer."""  
36    
37     def __init__(self,buffer_overlap=0):  #============================================================================================================
38         self.__left_required_extension=buffer_overlap  class TimeSeriesDataset(TimeSeriesBase):
39       """provides an interface for accessing a set of linearly ordered data."""
40       def __init__(self,buffer,offset=0,debug=False,description="timeseries.Dataset"):
41           TimeSeriesBase.__init__(self,debug,description)
42           self.__buffer=buffer
43           self.__offset=offset
44           if self.debug(): print "Debug: %s: offset %d to buffer"%(self,self.getOffset())
45    
46       def __len__(self):
47           """needed to handle negative indexing in slicing"""
48           return 0
49    
50     def __str__(self):     def getNumComponents(self):
51         return "TimeSeriesFilter"         """returns the number of components of the data (may be overwritten by subclass)"""
52           return self.getBuffer().getNumComponents()
53    
54     def getBufferOverlapNeededForUpdate(self):     def getIdOfLastDatum(self):
55         return self.__left_required_extension        """returns the identification number of the last datum in the data set (may be overwritten by subclass)"""
56          return self.getBuffer().getIdOfLastDatum()-self.getOffset()
57    
58       def getIdOfFirstDatum(self):
59          """returns the identification number of the first datum (may be overwritten by subclass)"""
60          return self.getBuffer().getIdOfFirstDatum()-self.getOffset()
61    
62       def getOffsetInBuffer(self):
63          """returns the offset to access elements in getBuffer() (may be overwritten by subclass)"""
64          return  self.getOffset()
65    
66       def getIdOfLastUnusedDatum(self):
67           """returns the identification number of the last datum which has been unused by all TimeSeries refering to the TimeSeriesDataset (may be overwritten by subclass)"""
68           return self.getBuffer().getIdOfLastUnusedDatum()-self.getOffset()
69    
70       def updateIdOfLastUnusedDatum(self,last_unused_datum):
71           """updates the identification number of the last unused datum (to be overwritten by subclass)"""
72           self.getBuffer().updateIdOfLastUnusedDatum(last_unused_datum+self.getOffset())
73    
74       def append(self,values):
75           """appends data to the buffer. If the buffer would be full the buffer is rearranged before the data are appended  (to be overwritten by subclass)"""
76           self.getBuffer().append(values)
77    
78     def update(self,times,values):     def getBufferSize(self):
79         pass         """returns the size of the buffer (to be overwritten by subclass)"""
80           return self.getBuffer().getBufferSize()
81      
82       def needsRearrangement(self,num_new_data=0):
83           """returns True if the buffer will be full after num_new_data have been appended (to be overwritten by subclass)"""
84           return self.getBuffer().needsRearrangement(num_new_data)
85    
86       def isEmpty(self):
87          """returns true if no data are appeneded to buffer"""
88          return self.getNumData()<=0
89      
90       def getNumData(self):
91          """returns the number of data (not all of them are accessible)"""
92          return self.getIdOfLastDatum()-self.getIdOfFirstDatum()+1
93    
94       def getBuffer(self):
95          """return the buffer referenced by the TimeSeriesDataset"""
96          return self.__buffer
97    
98       def getOffset(self):
99          """return the offset when referring to dataset elements"""
100          return self.__offset
101    
102       def __getitem__(self,index):
103          """returns the datum index"""
104          if type(index)==SliceType:
105             start=index.start
106             end=index.stop
107             if start==end:
108                return self[start]
109             else:
110                 if start<self.getIdOfFirstDatum() or start>self.getIdOfLastDatum() or \
111                     end-1<self.getIdOfFirstDatum() or end-1>self.getIdOfLastDatum(): raise IndexError,"%s: Index [%d:%d] out of range"%(self,start,end)
112                 return self.getBuffer()[start+self.getOffsetInBuffer():end+self.getOffsetInBuffer()]
113          else:
114             if index<self.getIdOfFirstDatum() or index>self.getIdOfLastDatum(): raise IndexError,"%s: Index %d out of range"%(self,index)
115             return self.getBuffer()[index+self.getOffsetInBuffer()]
116    
117  _DEFAULT_CACHE_SIZE=9  class TimeSeriesBuffer(TimeSeriesDataset):
118  _DEFAULT_BUFFER_SIZE=5     """An inplementation of TimeSeriesDataset which actually is storing data into a numarray buffer"""
119  _FLOATING_TYPE=numarray.Float64     def __init__(self,buffer_size=DEFAULT_BUFFER_SIZE,numComponents=1,type=DEFAULT_FLOAT_TYPE,id_of_first_datum=0,debug=False,description="timeseries.Buffer"):
   
 class TimeSeries(TimeSeriesBase):  
    def __init__(self,buffer_overlap=0,buffer_size=_DEFAULT_BUFFER_SIZE,cache_size=_DEFAULT_CACHE_SIZE,numComponents=1):  
        if buffer_size>cache_size: raise ValueError,"buffer size has to be less or equal cache size"  
        TimeSeriesBase.__init__(self)  
        self.__updates=list()  
        self.__max_buffer_overlap=0  
        self.__buffer_overlap=0  
        self.__numNodes=0  
        self.__numNodesInBuffer=0  
        self.__numNodesInCache=0  
        self.__firstNodeInBuffer=0  
        self.__firstNodeInCache=0  
        self.__buffer_size=buffer_size  
        self.__node_cache=numarray.zeros((cache_size,),_FLOATING_TYPE)  
        self.__attachment_cache=[]  
120         if numComponents<2:         if numComponents<2:
121            self.__value_cache=numarray.zeros((cache_size,),_FLOATING_TYPE)            buffer=numarray.zeros((buffer_size,),type)
122         else:         else:
123            self.__value_cache=numarray.zeros((cache_size,numComponents),_FLOATING_TYPE)            buffer=numarray.zeros((buffer_size,numComponents),type)
124         self.resizeMaxBufferOverlap(buffer_overlap)         TimeSeriesDataset.__init__(self,buffer,id_of_first_datum-1,debug,description)
125           self.__num_data_in_buffer=0
126           self.__id_last_unused_datum=id_of_first_datum-1
127           self.__id_last_datum=id_of_first_datum-1
128           self.__id_first_datum=id_of_first_datum
129           if self.debug(): print "Debug: %s : buffer of size %d with %d components allocated (first datum is %d)."% \
130                           (self,self.getBufferSize(),self.getNumComponents(),id_of_first_datum)
131    
    def __del__(self):  
        self.flush()  
   
    def __str__(self):  
        return "TimeSeries"  
132    
133       def getBufferSize(self):
134           """returns the size of the buffer"""
135           return self.getBuffer().shape[0]
136      
137     def getNumComponents(self):     def getNumComponents(self):
138         if self.__value_cache.rank==1:         """returns the number of components of the data (overwrites TimeSeriesDataset method)"""
139           if self.getBuffer().rank==1:
140            return 1            return 1
141         else:         else:
142            self.__value_cache.shape[1]            self.getBuffer().shape[1]
143    
144     def getNumNodes(self):     def getNumDataInBuffer(self):
145         """returns the number of time nodes in the time series"""         """returns the number of data currently in the buffer"""
146         return self.__numNodes         return self.__num_data_in_buffer
147    
148     def getCacheSize(self):     def getIdOfLastDatum(self):
149         """returns the cache size"""        """returns the identification number of the last datum in the data set (overwrites method from TimeSeriesDataset)"""
150         return self.__node_cache.shape[0]        return self.__id_last_datum
151    
152       def getIdOfFirstDatum(self):
153          """returns the identification number of the first datum (overwrites method from TimeSeriesDataset)"""
154          return self.__id_first_datum
155    
156       def getOffsetInBuffer(self):
157          """returns the offset to access elements in the buffer (overwrites method from TimeSeriesDataset)"""  
158          return -self.getIdOfLastDatum()+self.getNumDataInBuffer()-1  
159    
160       def getIdOfLastUnusedDatum(self):
161           """returns the identification number of the last datum which has been unused by all TimeSeries refering to the TimeSeriesDataset (overwrites method from TimeSeriesDataset)"""
162           return self.__id_last_unused_datum
163    
164       def updateIdOfLastUnusedDatum(self,last_unused_datum):
165           """updates the identification number of the last unused datum (to be overwritten by subclass)"""
166           self.getBuffer().updateIdOfLastUnusedDatum(last_unused_datum-self.getOffset())
167    
168       def updateIdOfLastUnusedDatum(self,last_unused_datum):
169           """updates the identification number of the last unused datum (overwrites TimeSeriesDataset method)"""
170           if self.__id_last_unused_datum>last_unused_datum:
171               self.__id_last_unused_datum=last_unused_datum
172               if self.debug(): print "Debug: %s: last unused datum is now %s"%(self,last_unused_datum)
173    
174       def needsRearrangement(self,num_new_data=0):
175           """returns True if the buffer will be full after num_new_data have been appended"""
176           return self.getNumDataInBuffer()+num_new_data>self.getBufferSize()
177            
178       def append(self,data):
179          """appends data to the buffer. If the buffer would be full the buffer is rearranged before the data are appended (overwrites TimeSeriesDataset method)"""
180          data=numarray.array(data)
181          nc=self.getNumComponents()
182          if data.rank==0:
183            if nc==1:
184               num_new_data=1
185            else:
186               raise ValueError,"%s: illegal data shape"%self
187          elif data.rank==1:
188            if nc==1:
189                 num_new_data=data.shape[0]
190            else:
191                 num_new_data=1  
192          elif data.rank==2:
193            if not nc==data.shape[1]: raise ValueError,"%s: illegal data shape"%self
194            num_new_data=data.shape[0]
195          else:
196             raise ValueError,"%s: illegal rank"%self
197    
198     def getBufferSize(self):        # check is buffer will be overflown when data are appended:
199         """returns the cache size"""        if self.needsRearrangement(num_new_data):
200         return self.__buffer_size          nn=self.getNumDataInBuffer()
201            num_protected_data=self.getIdOfLastDatum()-self.getIdOfLastUnusedDatum()
202            if num_protected_data+num_new_data>self.getBufferSize():
203                  raise ValueError,"%s: buffer overflow: buffer size has to be bigger than %d"%(self,num_protected_data+num_new_data)
204            if num_protected_data>0: self.getBuffer()[0:num_protected_data]=self.getBuffer()[nn-num_protected_data:nn]
205            self.__num_data_in_buffer=num_protected_data
206            self.__id_last_unused_datum=self.__id_last_datum
207            if self.debug():
208                 print "Debug: %s: rearrangement: first data in buffer is %d."%(self,self.getIdOfLastDatum()-self.getNumDataInBuffer()+1)
209          # copy data over:
210          nn=self.getNumDataInBuffer()
211          self.getBuffer()[nn:nn+num_new_data]=data
212          self.__num_data_in_buffer+=num_new_data
213          self.__id_last_datum+=num_new_data
214          self.__id_last_unused_datum+=num_new_data
215          if self.debug(): print "Debug: %s: %d data appended. Last unused datum is now %d."%(self,num_new_data,self.__id_last_unused_datum)
216    
217    # ======================================
218    class TimeSeries(TimeSeriesDataset):
219          """a TimeSeries glues a Controler controler and a TimeSeriesDataset dataset together. It also provides a TimeSeriesDataset view to the datset"""
220          def __init__(self,dataset,debug=False,description="timeseries."):
221               TimeSeriesDataset.__init__(self,dataset,0,debug,description)
222               self.__id_last_processed_datum=dataset.getIdOfFirstDatum()-1        
223          
224          def getDataset(self):
225              """returns the TimeSeriesDataset of the time series"""
226              return self.getBuffer()
227    
228          def getControler(self):
229              """returns the Controler of the time series (to be overwritten by subclass)"""
230              pass
231    
232          def getIdOfLastProcessedDatum(self):
233              return self.__id_last_processed_datum
234    
235          def updateIdOfLastProcessedDatum(self,id_last_processed_datum):
236              self.__id_last_processed_datum=id_last_processed_datum
237    
238          def __add__(self,arg):
239             if isinstance(arg,TimeSeriesDataset):
240                return TimeSeriesSum(self,arg)
241             else:
242                return TimeSeriesAddScalar(self,arg)
243    
244          def __sub__(self,arg):
245             return self+(-1.)*arg
246    
247          def __mul__(self,arg):
248             if isinstance(arg,TimeSeriesDataset):
249                return TimeSeriesMult(self,arg)
250             else:
251                return TimeSeriesMultScalar(self,arg)
252    
253          def __div__(self,arg):
254             if isinstance(arg,TimeSeriesDataset):
255                return TimeSeriesDiv(self,arg)
256             else:
257                return TimeSeriesMultScalar(self,1./arg)
258    
259          def __pow__(self,arg):
260             if isinstance(arg,TimeSeriesDataset):
261                return TimeSeriesPower(self,arg)
262             else:
263                return TimeSeriesPowerScalar(self,arg)
264          
265          def __radd__(self,arg):
266             return self.__add__(arg)
267    
268     def getNumNodesInCache(self):        def __rsub__(self,arg):
269         """returns the number of nodes in cache"""           return arg+(-1.)*self
        return self.__numNodesInCache  
   
    def getNumNodesInBuffer(self):  
        """returns the number of nodes in cache"""  
        return self.__numNodesInBuffer  
       
    def getFirstNodeInCache(self):  
        """returns the id number of the first node in the cache"""  
        return self.__firstNodeInCache  
   
    def getFirstNodeInBuffer(self):  
        """returns the id number of the first node in the buffer"""  
        return self.__firstNodeInBuffer  
   
    def getFirstNodeOfBufferInCache(self):  
        """returns the first location of the first node in the buffer relative to the cache"""  
        return self.getFirstNodeInBuffer()-self.getFirstNodeInCache()  
   
    def getBufferOverlap(self):  
        """returns the current size of the left extension"""  
        return self.__buffer_overlap  
   
    def getMaxBufferOverlap(self):  
        """returns the maximum size of the left extension"""  
        return self.__max_buffer_overlap  
   
    def resizeMaxBufferOverlap(self,new_buffer_overlap=0):  
        if new_buffer_overlap>self.__max_buffer_overlap:  
           if self.getNumNodes()>0: raise ValueError,"left extension can only be resized for empty time series"  
           if self.getCacheSize()<self.getBufferSize()+new_buffer_overlap:  
                raise ValueError,"Cache size is too small! required cache size is %s"%self.getBufferSize()+new_buffer_overlap  
           self.__max_buffer_overlap=new_buffer_overlap  
           if self.debug(): print "Debug: %s: left extension is increased to %d"%(self,new_buffer_overlap)  
   
    def getLastNode(self):  
        if self.getNumNodesInCache()>0:  
           return self.__node_cache[self.getNumNodesInCache()-1]  
        else:  
           return -1.e300  
270    
271     def getLastValue(self):        def __rmul__(self,arg):
272         if self.getNumNodesInCache()>0:           return self.__mul__(arg)
273            return self.__node_cache[self.getNumNodesInCache()-1]  
274         else:        def __rdiv__(self,arg):
275            raise ValueError,"No value available"           if isinstance(arg,TimeSeriesDataset):
276                return TimeSeriesDiv(arg,self)
277             else:
278                return TimeSeriesDivScalar(self,arg)
279    
280          def __rpow__(self,arg):
281             if isinstance(arg,TimeSeriesDataset):
282                return TimeSeriesPower(arg,self)
283             else:
284                return Exp(numarray.log(arg)*self)
285    
286          def __lshift__(self,arg):
287             return TimeSeriesShift(self,-arg)
288    
289          def __rshift__(self,arg):
290             return TimeSeriesShift(self,arg)
291    
292          def __neg__(self):
293             return (-1.0)*self
294    
295          def __pos__(self):
296             return (1.0)*self
297    
298    class TimeSeriesFilter(TimeSeries):
299          """a TimeSeriesFilter is a TimeSeriesDataset attached to a Controler where the TimeSeriesDataset provides data
300              at the time nodes defined by the Controler. Additional to a TimeSeries a TimeSeriesFilter allows to update
301              the underlying TimeSeriesDataset through the update method which is overwritten by a particular implementation of the
302              class. The update method is called to append the data [start:end] to the attached dataset by the the attached TimeSerieControler"""
303          def __init__(self,controler,dataset,args=[],left_wing_size=0,right_wing_size=0,debug=False,description="timeseries.Filter"):
304              TimeSeries.__init__(self,dataset,debug,description)
305              self.__left_wing_size=left_wing_size
306              self.__right_wing_size=right_wing_size
307              self.__args=args
308              self.__controler=controler
309              controler.appendFilterToUpdateList(self)
310    
311          def getControler(self):
312              """returns the Controler of the time series (overwrites method of by TimeSeries)"""
313              return self.__controler
314    
315          def update(self,start,end):
316              """appends zeros to the dataset. This method should be overwritten by a particular TimeSeriesFilter"""
317              nc=self.getNumComponents()
318              if nc>1:
319                 self.getDataset().append(numarray.zeros([nc,end-start]))
320              else:
321                 self.getDataset().append(numarray.zeros(end-start))
322          def getLeftWingSize(self):
323              """returns the left wing size"""  
324              return self.__left_wing_size
325    
326          def getRightWingSize(self):
327              """returns the right wing size"""
328              return self.__right_wing_size
329    
330          def getArguments(self,index=None):
331              """returns the list of arguments or, index is present, the argument with index index. In the latter case None is returned if no arguments are present"""
332              if index==None:
333                 return self.__args
334              else:
335                 if len(self.__args)>0:
336                    return self.__args[index]
337                 else:
338                    return None
339    
340     def checkInUpdate(self,time_series_filter):        def getArgumentDataset(self,index):
341         """checks in a time_series_filter object to be updated when buffer is full"""            """returns the dataset of in the argument with index index"""
342         if self.getNumNodes()>0:            arg=self.getArguments(index)
343            raise TypeError,"Check in of TimeSeries requires empty buffer."            if arg==None:
344         self.__updates.append(time_series_filter)               return None
345         self.resizeMaxBufferOverlap(time_series_filter.getBufferOverlapNeededForUpdate())            else:
346         if self.debug(): print "Debug: %s: %s checked in successfully."%(self,time_series_filter)                return self.getArguments(index).getDataset()
347    
348     def append(self,time_nodes,values,attachments=None):        def flush(self):
349         """appends the time_nodes and values into the buffer"""            """calls the update method with all the maximum processable range. It also updates the id of unused datum for all arguments"""
350         num_additional_nodes=time_nodes.shape[0]            start=self.getIdOfLastProcessedDatum()+1
351         if num_additional_nodes<1: return            end=None
352         if self.debug():            for i in self.getArguments():
353              if num_additional_nodes>1:               if end==None:
354                 print "Debug: %s: values %d to %d are added to time series."%(self,self.getNumNodes(),self.getNumNodes()+num_additional_nodes-1)                  end=i.getIdOfLastDatum()
355              else:               else:
356                 print "Debug: %s: value %d is added to time series."%(self,self.getNumNodes())                  end=min(end,i.getIdOfLastDatum())
357         if not num_additional_nodes==values.shape[0]:            if not end==None:
358            raise ValueError,"Number time nodes and number of values don't match."                if self.debug(): print "Debug: %s: range [%d:%d] is updated."%(self,start,end-self.getRightWingSize())
359         if self.getLastNode()>=time_nodes[0]:                self.update(start,end-self.getRightWingSize()+1)      
360            raise ValueError,"first time node to be checked in is less than last previously checked in node"                for i in self.getArguments(): i.updateIdOfLastUnusedDatum(end-self.getLeftWingSize())
361                  self.updateIdOfLastProcessedDatum(end)
362         if num_additional_nodes>1:  
363              if min(time_nodes[1:num_additional_nodes]-time_nodes[0:num_additional_nodes-1])<=0:  class Controler(TimeSeries):
364                raise ValueError,"time nodes have to be strictly increasing"     """controls a set of TimeSeries"""
365       def __init__(self,buffer_size=DEFAULT_BUFFER_SIZE,debug=False,description="timeseries.Controler"):
366            TimeSeries.__init__(self,TimeSeriesBuffer(buffer_size,1,DEFAULT_FLOAT_TYPE,0,debug,"Time nodes buffer of "+description),\
367                                                                                       debug,"Time nodes of "+description)
368            self.setFlushRate()  
369            self.__update_time_series=list()
370                
371         # full cache requires a shift:     def __del__(self):
372         if self.getNumNodesInCache()+num_additional_nodes>self.getCacheSize():         self.flush()
373             new_num_nodes_in_cache=self.getNumNodesInBuffer()+self.getBufferOverlap()  
374             if new_num_nodes_in_cache+num_additional_nodes>self.getCacheSize():     def getControler(self):
375                raise ValueError,"Cache overflow: Expected size is bigger than %d"%(new_num_nodes_in_cache+num_additional_nodes)         """returns the Controler of the time series (overwrites method of by TimeSeries)"""
376             start=self.getNumNodesInCache()-new_num_nodes_in_cache         return self
377             end=start+new_num_nodes_in_cache  
378             self.__node_cache[0:new_num_nodes_in_cache]=self.__node_cache[start:end]     def setFlushRate(self,rate=50):
379             self.__value_cache[0:new_num_nodes_in_cache]=self.__value_cache[start:end]         """set the flush rate, i.e. after rate new time nodes have been checked in the flush method is called."""
380             self.__attachment_cache[0:new_num_nodes_in_cache]=self.__attachment_cache[start:end]         self.__flush_rate=rate
381           if self.debug(): print "Debug: %s: flush rate is set to %d"%(self,rate)
382    
383             self.__firstNodeInCache+=start     def needsFlushing(self):
384             self.__numNodesInCache=new_num_nodes_in_cache        """returns true if the depending TimeSeriesFilters needs to be flushed becuase the time nodes buffer is full or because of the set flush rate"""
385             if self.debug(): print "Debug: %s: %d values from %d onwards are moved to the beginning of the cache (first node in cache is now %d)."% \        return self.needsRearrangement(1) or (self.getNumData()+1)%self.__flush_rate==0
                                                                                     (self,new_num_nodes_in_cache,start,self.__firstNodeInCache)  
             
        # copy values into cache:  
        if self.getNumNodesInCache()+num_additional_nodes>self.getCacheSize():  
            raise ValueError,"Cache overflow: Expected size is bigger than %d"%(self.getNumNodesInCache()+num_additional_nodes)  
        if self.debug():  
            if num_additional_nodes>1:  
               print "Debug: %s: values %d to %d of cache are updated"%(self,self.getNumNodesInCache(),self.getNumNodesInCache()+num_additional_nodes-1)  
            else:  
               print "Debug: %s: value %d of cache is updated."%(self,self.getNumNodesInCache())  
        self.__node_cache[self.getNumNodesInCache():self.getNumNodesInCache()+num_additional_nodes]=time_nodes  
        self.__value_cache[self.getNumNodesInCache():self.getNumNodesInCache()+num_additional_nodes]=values  
        self.__numNodes+=num_additional_nodes  
        self.__numNodesInBuffer+=num_additional_nodes  
        self.__numNodesInCache+=num_additional_nodes  
        print self.__node_cache  
        print self.__value_cache  
        # copy values into cache:  
        if self.getNumNodesInBuffer()>=self.getBufferSize():  
               if self.debug() and len(self.__updates)>0: print "Debug: %s: buffer is full. Updating process is started"%self  
               self.processBuffer()  
386    
387     def flush(self):     def flush(self):
388        self.processBuffer()         """flushes all dependend TimeSeriesFilters by processing their flush method"""
389           if self.debug(): print "Debug: %s: start flushing"%self
390           for time_serie in self.__update_time_series: time_serie.flush()
391    
392       def appendFilterToUpdateList(self,time_serie):
393           if not time_serie.getControler()==self: raise ValueError,"%s: time series time_serie %s is not linked defined on %s."%(self,time_serie,self)
394           if not self.isEmpty(): raise ValueError,"%s: you can only check in a time series time_serie is controler is empty."%self
395           self.__update_time_series.append(time_serie)
396           if self.debug(): print "Debug: %s: %s has been added to update list."%(self,time_serie)
397    
398       def newTimeNode(self,value):
399           if self.needsFlushing(): self.flush()
400           self.getDataset().append(value)
401           if self.debug(): print "Debug: %s: new time node %e has been added."%(self,value)
402    
403    # ============================================
404    class TimeSeriesShift(TimeSeries):
405          """creates a shift of the time series, i.e. if d[n] is the datum at time t[n], the value at t[n] becomes v[n+shift] on the output"""
406          def __init__(self,time_serie,shift=1):
407              if shift<0:
408                  dsc="(%s)<<%d"%(time_serie,-shift)
409              else:
410                  dsc="(%s)>>%d"%(time_serie,shift)
411              self.__controler=time_serie.getControler()
412              TimeSeries.__init__(self,TimeSeriesDataset(time_serie.getDataset(),-shift,time_serie.debug(),"buffer view to "+dsc),\
413                                              time_serie.debug(),dsc)
414          def getControler(self):
415              return self.__controler
416    
417    class TimeSeriesSum(TimeSeriesFilter):
418          """adds two TimeSeries"""
419          def __init__(self,time_serie_1,time_serie_2):
420              dsc="(%s)+(%s)"%(time_serie_1,time_serie_2)
421              dbg=time_serie_1.debug() or time_serie_2.debug()
422              cntrl=time_serie_1.getControler()
423              if not cntrl==time_serie_2.getControler():
424                      raise ValueError("TimeSeriesSum: %s and %s have different controler."%(time_serie_1,time_serie_2))
425              id_first_datum=max(time_serie_1.getIdOfFirstDatum(),time_serie_2.getIdOfFirstDatum())
426              TimeSeriesFilter.__init__(self,cntrl, \
427                                  TimeSeriesBuffer(cntrl.getBufferSize(),time_serie_1.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \
428                                  [time_serie_1,time_serie_2],0,0,dbg,dsc)
429    
430          def update(self,start,end):
431              self.append(self.getArgumentDataset(0)[start:end]+self.getArgumentDataset(1)[start:end])
432    
433    class TimeSeriesAddScalar(TimeSeriesFilter):
434          """adds a single value to a TimeSeries"""
435          def __init__(self,time_serie,scalar):
436              dsc="(%s)+(%s)"%(time_serie,scalar)
437              dbg=time_serie.debug()
438              cntrl=time_serie.getControler()
439              id_first_datum=time_serie.getIdOfFirstDatum()
440              TimeSeriesFilter.__init__(self,cntrl, \
441                           TimeSeriesBuffer(cntrl.getBufferSize(),time_serie.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \
442                           [time_serie],0,0,dbg,dsc)
443              self.__scalar=scalar
444    
445          def update(self,start,end):
446              self.append(self.getArgumentDataset(0)[start:end]+self.__scalar)
447    
448    class TimeSeriesMult(TimeSeriesFilter):
449          """multiplies two TimeSeries"""
450          def __init__(self,time_serie_1,time_serie_2):
451              dsc="(%s)*(%s)"%(time_serie_1,time_serie_2)
452              dbg=time_serie_1.debug() or time_serie_2.debug()
453              cntrl=time_serie_1.getControler()
454              if not cntrl==time_serie_2.getControler():
455                      raise ValueError("TimeSeriesMult: %s and %s have different controler."%(time_serie_1,time_serie_2))
456              id_first_datum=max(time_serie_1.getIdOfFirstDatum(),time_serie_2.getIdOfFirstDatum())
457              TimeSeriesFilter.__init__(self,cntrl, \
458                       TimeSeriesBuffer(cntrl.getBufferSize(),time_serie_1.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \
459                       [time_serie_1,time_serie_2],0,0,dbg,dsc)
460    
461          def update(self,start,end):
462              self.append(self.getArgumentDataset(0)[start:end]*self.getArgumentDataset(1)[start:end])
463    
464    class TimeSeriesMultScalar(TimeSeriesFilter):
465          """multiplies a TimeSeries with a single value"""
466          def __init__(self,time_serie,scalar):
467              dsc="(%s)*%s"%(time_serie,scalar)
468              dbg=time_serie.debug()
469              cntrl=time_serie.getControler()
470              id_first_datum=time_serie.getIdOfFirstDatum()
471              TimeSeriesFilter.__init__(self,cntrl, \
472                           TimeSeriesBuffer(cntrl.getBufferSize(),time_serie.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \
473                           [time_serie],0,0,dbg,dsc)
474              self.__scalar=scalar
475    
476          def update(self,start,end):
477              self.append(self.getArgumentDataset(0)[start:end]*self.__scalar)
478    
479    class TimeSeriesDiv(TimeSeriesFilter):
480          """divides two TimeSeries"""
481          def __init__(self,time_serie_1,time_serie_2):
482              dsc="(%s)/(%s)"%(time_serie_1,time_serie_2)
483              dbg=time_serie_1.debug() or time_serie_2.debug()
484              cntrl=time_serie_1.getControler()
485              if not cntrl==time_serie_2.getControler():
486                      raise ValueError("TimeSeriesDiv: %s and %s have different controler."%(time_serie_1,time_serie_2))
487              id_first_datum=max(time_serie_1.getIdOfFirstDatum(),time_serie_2.getIdOfFirstDatum())
488              TimeSeriesFilter.__init__(self,cntrl, \
489                         TimeSeriesBuffer(cntrl.getBufferSize(),time_serie_1.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \
490                         [time_serie_1,time_serie_2],0,0,dbg,dsc)
491    
492          def update(self,start,end):
493              self.append(self.getArgumentDataset(0)[start:end]/self.getArgumentDataset(1)[start:end])
494    
495    class TimeSeriesDivScalar(TimeSeriesFilter):
496          """divides a scalar be a TimeSerie"""
497          def __init__(self,time_serie,scalar):
498              dsc="(%s)/(%s)"%(scalar,time_serie)
499              dbg=time_serie.debug()
500              cntrl=time_serie.getControler()
501              id_first_datum=time_serie.getIdOfFirstDatum()
502              TimeSeriesFilter.__init__(self,cntrl, \
503                           TimeSeriesBuffer(cntrl.getBufferSize(),time_serie.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \
504                           [time_serie],0,0,dbg,dsc)
505              self.__scalar=scalar
506    
507          def update(self,start,end):
508              self.append(self.__scalar/self.getArgumentDataset(0)[start:end])
509    
510    class TimeSeriesPower(TimeSeriesFilter):
511          """raise one TimeSeries to the power of an other TimeSeries"""
512          def __init__(self,time_serie_1,time_serie_2):
513              dsc="(%s)**(%s)"%(time_serie_1,time_serie_2)
514              dbg=time_serie_1.debug() or time_serie_2.debug()
515              cntrl=time_serie_1.getControler()
516              if not cntrl==time_serie_2.getControler():
517                      raise ValueError("TimeSeriesPower: %s and %s have different controler."%(time_serie_1,time_serie_2))
518              id_first_datum=max(time_serie_1.getIdOfFirstDatum(),time_serie_2.getIdOfFirstDatum())
519              TimeSeriesFilter.__init__(self,cntrl, \
520                    TimeSeriesBuffer(cntrl.getBufferSize(),time_serie_1.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \
521                    [time_serie_1,time_serie_2],0,0,dbg,dsc)
522    
523          def update(self,start,end):
524              self.append(self.getArgumentDataset(0)[start:end]**self.getArgumentDataset(1)[start:end])
525    
526    class TimeSeriesPowerScalar(TimeSeriesFilter):
527          """raises a TimeSerie to the power of a scalar"""
528          def __init__(self,time_serie,scalar):
529              dsc="(%s)**(%s)"%(time_serie,scalar)
530              dbg=time_serie.debug()
531              cntrl=time_serie.getControler()
532              id_first_datum=time_serie.getIdOfFirstDatum()
533              TimeSeriesFilter.__init__(self,cntrl, \
534                           TimeSeriesBuffer(cntrl.getBufferSize(),time_serie.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \
535                           [time_serie],0,0,dbg,dsc)
536              self.__scalar=scalar
537    
538          def update(self,start,end):
539              self.append(self.getArgumentDataset(0)[start:end]**self.__scalar)
540    
541    class Exp(TimeSeriesFilter):
542          """"""
543          def __init__(self,time_serie):
544              dsc="exp(%s)"%(time_serie)
545              dbg=time_serie.debug()
546              cntrl=time_serie.getControler()
547              id_first_datum=time_serie.getIdOfFirstDatum()
548              TimeSeriesFilter.__init__(self,cntrl, \
549                         TimeSeriesBuffer(cntrl.getBufferSize(),time_serie.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \
550                         [time_serie],0,0,dbg,dsc)
551    
552     def processBuffer(self):        def update(self,start,end):
553          if self.getNumNodesInBuffer()>0:            self.append(numarray.exp(self.getArgumentDataset(0)[start:end]))
            for i in self.__updates:  
              if self.debug(): print "Debug: %s: update for %s started"%(self,i)  
              if i.getBufferOverlapNeededForUpdate()>self.getBufferOverlap():  
                 s=self.getFirstNodeOfBufferInCache()  
                 l=self.getNumNodesInBuffer()  
              else:  
                 s=self.getFirstNodeOfBufferInCache()-i.getBufferOverlapNeededForUpdate()  
                 l=self.getNumNodesInBuffer()+i.getBufferOverlapNeededForUpdate()  
              i.update(self.__node_cache[s:s+l],self.__value_cache[s:s+l])  
            self.__firstNodeInBuffer+=self.__numNodesInBuffer  
            self.__numNodesInBuffer=0  
         self.__buffer_overlap=self.getMaxBufferOverlap()  
         if self.debug(): print "Debug: %s: first node in buffer is now %d"%(self,self.__firstNodeInBuffer)  
554    
555      class TimeSeriesCumulativeSum(TimeSeriesFilter):
556          """creates a shift of the time series, i.e. if d[n] is the datum at time t[n], the value at t[n] becomes v[n+shift] on the output"""
557          def __init__(self,time_series):
558             TimeSeriesFilter.__init__(self,1)
559             TimeSeries.__init__(self,frame_size=time_series.getDatasetSize(),buffer_size=time_series.getBufferSize(), \
560                                                                             numComponents=time_series.getNumComponents())
561             self.setDebug(time_series.debug())
562             time_series.checkInUpdate(self)
563             self.__integral=0
564    
565          def __str__(self):
566             return "timeseries.Integrator"
567    
568          def update(self,times,data):
569              l=times.shape[0]
570              self.append(times[1:l],(data[0:l-1]+data[1:l])/2.*(times[1:l]-times[0:l-1]))
571            
572    
573  class TimeSeriesCollector(TimeSeries):  class TimeSeriesCollector(TimeSeries):
574        """TimeSeriesCollector collects values at time nodes"""        """timeseries.Collector collects data at time nodes"""
575        def __init__(self):        def __init__(self):
576           TimeSeries.__init__(self)           TimeSeries.__init__(self)
577    
578        def __str__(self):        def __str__(self):
579           return "TimeSeriesCollector"           return "timeseries.Collector"
580    
581        def add(self,time_mark,value):        def add(self,time_mark,value):
582             """adds the value at time time_mark to the time series"""             """adds the value at time time_mark to the time series"""
# Line 248  class TimeSeriesCollector(TimeSeries): Line 588  class TimeSeriesCollector(TimeSeries):
588             d=l.strip().split(seperator)             d=l.strip().split(seperator)
589             self.add(float(d[0]),float(d[1]))             self.add(float(d[0]),float(d[1]))
590    
591  class TimeSeriesIntegrator(TimeSeries,TimeSeriesFilter):  def Differential(time_series):
592        def __init__(self,time_series):        """calculates the derivative Dv of the time series v:
593           TimeSeriesFilter.__init__(self,1)          
594           TimeSeries.__init__(self,buffer_size=time_series.getBufferSize(),cache_size=time_series.getCacheSize(), \              Dv[n]=(v[n]-v[n-1])/(t[n]-t[n-1])
                                                                          numComponents=time_series.getNumComponents())  
          self.setDebug(time_series.debug())  
          time_series.checkInUpdate(self)  
          self.__integral=0  
595    
596        def __str__(self):        """
597           return "TimeSeriesIntegrator"        return (time_series<<1-time_series)/(time_series.getControler()<<1-time_series.getControler())
598    
599        def update(self,times,values):  def Integral(time_series):
600            l=times.shape[0]        """calculates the intagral Iv of the time series v using the trapozidal rule:
           self.append(times[1:l],(values[0:l-1]+values[1:l])/2.*(times[1:l]-times[0:l-1]))  
601                    
602                Iv[n]=sum_i<n (v[n]+v[n-1])/2*(t[n]-t[n-1])
603    
604  class TimeSeriesDifferential(TimeSeries,TimeSeriesFilter):        """
605        def __init__(self,time_series):        return TimeSeriesCumulativeSum((time_series<<1+time_series)/2.*(time_series.getControler()-(time_series.getControler<<1)),0.)
          TimeSeriesFilter.__init__(self,1)  
          TimeSeries.__init__(self,buffer_size=time_series.getBufferSize(),cache_size=time_series.getCacheSize(), \  
                                                                          numComponents=time_series.getNumComponents())  
          self.setDebug(time_series.debug())  
          time_series.checkInUpdate(self)  
   
       def __str__(self):  
          return "TimeSeriesDifferential"  
606    
       def update(self,times,values):  
           l=times.shape[0]  
           self.append((times[0:l-1]+times[1:l])/2,(values[0:l-1]-values[1:l])/(times[0:l-1]-times[1:l]))  
607    
608  class TimeSeriesViewer(TimeSeriesFilter):  class TimeSeriesViewer(TimeSeriesFilter):
609        def __init__(self,time_series):        def __init__(self,time_series):
# Line 286  class TimeSeriesViewer(TimeSeriesFilter) Line 611  class TimeSeriesViewer(TimeSeriesFilter)
611           time_series.checkInUpdate(self)           time_series.checkInUpdate(self)
612    
613        def __str__(self):        def __str__(self):
614           return "TimeSeriesViewer"           return "timeseries.Viewer"
615    
616        def update(self,times,values):        def update(self,times,data):
617            for i in range(times.shape[0]): print "[%s: %s]"%(times[i],values[i])            for i in range(times.shape[0]): print "[%s: %s]"%(times[i],data[i])
618    
619  class TimeSeriesWriter(TimeSeriesFilter):  class TimeSeriesWriter(TimeSeriesFilter):
620        def __init__(self,time_series,ostream,seperator=","):        def __init__(self,time_series,ostream,seperator=","):
# Line 300  class TimeSeriesWriter(TimeSeriesFilter) Line 625  class TimeSeriesWriter(TimeSeriesFilter)
625           self.__seperator=seperator           self.__seperator=seperator
626    
627        def __str__(self):        def __str__(self):
628           return "TimeSeriesWriter"           return "timeseries.Writer"
629    
630        def update(self,times,values):        def update(self,times,data):
631          for i in range(times.shape[0]): self.__ostream.writelines("%s,%s\n"%(times[i],values[i]))          for i in range(times.shape[0]): self.__ostream.writelines("%s,%s\n"%(times[i],data[i]))
632    
633  # test  # test
634    
635  if __name__=="__main__":  if __name__=="__main__":
636       # tests the interfaces to data sets:
637       print "Test of Datasets:"
638       print "================="
639       bf=TimeSeriesBuffer(buffer_size=5,numComponents=1,debug=True,description="TestBuffer")
640       bfv_l=TimeSeriesDataset(bf,offset=1,debug=True,description="offset 1")
641       bfv_r=TimeSeriesDataset(bf,offset=-1,debug=True,description="offset -1")
642       bf.append([1.,2.,3.,4.])
643       print "should be all 2. :",bfv_l[0]
644       print bf[1]
645       print bfv_r[2]
646       bf.append([5.,6.,7.])
647       print "should be all 5. :",bfv_l[3],bf[4],bfv_r[5]
648       print "should be all 6. :",bfv_l[4],bf[5],bfv_r[6]
649       print "should be all 7. :",bfv_l[5],bf[6],bfv_r[7]
650       print "should be all [6., 7.] :",bfv_l[4:6],bf[5:7],bfv_r[6:8]
651    
652       print "Test of Controler"
653       print "================="
654       b=Controler(buffer_size=15,debug=True)
655       s3=b>>3
656       s1=b>>1
657       s_3=b<<3
658       sum=(s_3+b)+(b+s3)
659      
660       for i in range(30):
661           b.newTimeNode(i*1.)
662       b.flush()
663       print "should be all 28. :",s_3.getDataset()[25],b.getDataset()[28],s3.getDataset()[31]
664       print "should be all 29. :",s_3.getDataset()[26],b.getDataset()[29],s3.getDataset()[32]
665       print "should be all 96. :",sum.getDataset()[24]
666      
667       print "Test of operators"
668       print "================="
669       b=Controler(buffer_size=15,debug=True)
670       b1=b<<1
671       a=b+b1
672       a_s=b1+1.
673       s_a=1.+b1
674       d=b-b1
675       d_s=b1-1.
676       s_d=1.-b1
677       m=b*b1
678       m_s=b1*2.
679       s_m=2.*b1
680       dv=b/b1
681       dv_s=b1/2.
682       s_dv=2./b1
683       p=b**b1
684       p_s=b1**2.
685       s_p=2.**b1
686       pb=+b
687       mb=-b
688       for i in range(30):
689           b.newTimeNode(i*1.)
690       b.flush()
691       print "a[28] should be %e: %e"%(28.+29.,a[28])
692       print "a_s[28] should be %e: %e"%(29.+1.,a_s[28])
693       print "s_a[28] should be %e: %e"%(29.+1.,s_a[28])
694       print "d[28] should be %e: %e"%(28.-29.,d[28])
695       print "d_s[28] should %e: %e"%(29.-1.,d_s[28])
696       print "s_d[28] should %e: %e"%(1.-29.,s_d[28])
697       print "m[28] should be %e: %e"%(28.*29.,m[28])
698       print "m_s[28] should be %e: %e"%(29.*2.,m_s[28])
699       print "s_m[28] should be %e: %e"%(29.*2.,s_m[28])
700       print "dv[28] should be %e: %e"%(28./29.,dv[28])
701       print "dv_s[28] should be %e: %e"%(29./2.,dv_s[28])
702       print "s_dv[28] should be %e: %e"%(2./29.,s_dv[28])
703       print "p[28] should be %e: %e"%(28.**29.,p[28])
704       print "p_s[28] should be %e: %e"%(29.**2,p_s[28])
705       print "s_p[28] should be %e: %e"%(2.**29.,s_p[28])
706       print "pb[28] should be %e: %e"%(28.,pb[28])
707       print "mb[28] should be %e: %e"%(-28.,mb[28])
708    
709     c=TimeSeriesCollector()     1/0
710       c=TimeSeriesCollector(b)
711     c.setDebugOn()     c.setDebugOn()
712     ii=TimeSeriesIntegrator(c)     ii=TimeSeriesIntegrator(c)
713     d=TimeSeriesDifferential(c)     d=TimeSeriesDifferential(c)
# Line 317  if __name__=="__main__": Line 715  if __name__=="__main__":
715     w=TimeSeriesWriter(d,file("test.csv","w"))     w=TimeSeriesWriter(d,file("test.csv","w"))
716    
717     for i in range(15):     for i in range(15):
718        c.add(i*1.,i+1.)        b.newTime(i*1.)
719          c.add(i+1.)
720      

Legend:
Removed from v.110  
changed lines
  Added in v.117

  ViewVC Help
Powered by ViewVC 1.1.26