/[escript]/trunk/escript/py_src/timeseries.py
ViewVC logotype

Diff of /trunk/escript/py_src/timeseries.py

Parent Directory Parent Directory | Revision Log Revision Log | View Patch Patch

trunk/esys2/escript/py_src/timeseries.py revision 110 by jgs, Mon Feb 14 04:14:42 2005 UTC trunk/escript/py_src/timeseries.py revision 1809 by ksteube, Thu Sep 25 06:43:44 2008 UTC
# Line 1  Line 1 
1  # $Id$  
2    ########################################################
3    #
4    # Copyright (c) 2003-2008 by University of Queensland
5    # Earth Systems Science Computational Center (ESSCC)
6    # http://www.uq.edu.au/esscc
7    #
8    # Primary Business: Queensland, Australia
9    # Licensed under the Open Software License version 3.0
10    # http://www.opensource.org/licenses/osl-3.0.php
11    #
12    ########################################################
13    
14    __copyright__="""Copyright (c) 2003-2008 by University of Queensland
15    Earth Systems Science Computational Center (ESSCC)
16    http://www.uq.edu.au/esscc
17    Primary Business: Queensland, Australia"""
18    __license__="""Licensed under the Open Software License version 3.0
19    http://www.opensource.org/licenses/osl-3.0.php"""
20    __url__="http://www.uq.edu.au/esscc/escript-finley"
21    
22    """
23    Time serieas analysis
24    
25    @var __author__: name of author
26    @var __copyright__: copyrights
27    @var __license__: licence agreement
28    @var __url__: url entry point on documentation
29    @var __version__: version
30    @var __date__: date of the version
31    """
32    
33    
34    __author__="Lutz Gross, l.gross@uq.edu.au"
35    
36    
37  import numarray  import numarray
38    from types import SliceType
39    DEFAULT_BUFFER_SIZE=1000
40    DEFAULT_FLOAT_TYPE=numarray.Float64
41    
42  class TimeSeriesBase:  class TimeSeriesBase:
43     """The TimeSeriesBase class is the base class for all class of the TimeSeries module.     """The TimeSeriesBase class is the base class for all class of the TimeSeries module."""
       It takes care of the updating depending TimeSeriesBase objects and the debuging mechnism"""  
44    
45     def __init__(self):     def __init__(self,debug=False,description="TimeSeriesBase"):
46         self.__debug=False         self.__debug=debug
47           self.setDescription(description)
48    
49     def __str__(self):     def __str__(self):
50         return "TimeSeriesBase"         return self.__description
51      
52       def setDescription(self,text):
53           self.__description=text
54    
55     def setDebugOn(self):     def setDebugOn(self):
56        """switch on degugging mode"""        """switch on degugging mode"""
# Line 30  class TimeSeriesBase: Line 70  class TimeSeriesBase:
70     def debug(self):     def debug(self):
71        """returns true if debug mode is on"""        """returns true if debug mode is on"""
72        return self.__debug        return self.__debug
         
 class TimeSeriesFilter(TimeSeriesBase):  
    """TimeSeriesFilter objects are applied to TimeSeries objects to filer out information or to convert it.  
       A TimeSeriesFilter objects is called by the TimeSeries object it is depending on to consider the values currently in the buffer for  
       updating. Some TimeSeriesFilter may require values outside the buffer. The TimeSeries object maintains the last buffer_overlap values  
       in the buffer so they can be used to process (not neccesarily all) value in the buffer."""  
73    
74     def __init__(self,buffer_overlap=0):  #============================================================================================================
75         self.__left_required_extension=buffer_overlap  class TimeSeriesBaseDataset(TimeSeriesBase):
76       """provides an interface for accessing a set of linearly ordered data."""
77       def __init__(self,buffer,offset=0,debug=False,description="TimeSeriesDataset"):
78           TimeSeriesBase.__init__(self,debug,description)
79           self.__buffer=buffer
80           self.__offset=offset
81           if self.debug(): print "Debug: %s: offset %d to buffer"%(self,self.getOffset())
82    
83       def __len__(self):
84           """needed to handle negative indexing in slicing"""
85           return 0
86    
87     def __str__(self):     def getNumComponents(self):
88         return "TimeSeriesFilter"         """returns the number of components of the data (may be overwritten by subclass)"""
89           return self.getBaseBuffer().getNumComponents()
    def getBufferOverlapNeededForUpdate(self):  
        return self.__left_required_extension  
90    
91     def update(self,times,values):     def getIdOfLastDatum(self):
92         pass        """returns the identification number of the last datum in the data set (may be overwritten by subclass)"""
93          return self.getBaseBuffer().getIdOfLastDatum()-self.getOffset()
94    
95       def getIdOfFirstDatum(self):
96          """returns the identification number of the first datum (may be overwritten by subclass)"""
97          return self.getBaseBuffer().getIdOfFirstDatum()-self.getOffset()
98    
99       def getIdOfFirstAvailableDatum(self):
100          """returns the identification number of the first avaiable datum (may be overwritten by subclass)"""
101          return self.getBaseBuffer().getIdOfFirstAvailableDatum()-self.getOffset()
102    
103       def getOffsetInBaseBuffer(self):
104          """returns the offset to access elements in getBaseBuffer() (may be overwritten by subclass)"""
105          return  self.getOffset()
106    
107       def getIdOfLastUnreferencedDatum(self):
108           """returns the identification number of the last datum which has been unused by all TimeSeries refering to the TimeSeriesBaseDataset (may be overwritten by subclass)"""
109           return self.getBaseBuffer().getIdOfLastUnreferencedDatum()-self.getOffset()
110    
111       def updateIdOfLastUnreferencedDatum(self,last_unreferenced_datum):
112           """updates the identification number of the last unused datum (to be overwritten by subclass)"""
113           self.getBaseBuffer().updateIdOfLastUnreferencedDatum(last_unreferenced_datum+self.getOffset())
114    
115       def append(self,values):
116           """appends data to the buffer. If the buffer would be full the buffer is rearranged before the data are appended  (to be overwritten by subclass)"""
117           self.getBaseBuffer().append(values)
118    
119       def getBaseBufferSize(self):
120           """returns the size of the buffer (to be overwritten by subclass)"""
121           return self.getBaseBuffer().getBaseBufferSize()
122      
123       def needsRearrangement(self,num_new_data=0):
124           """returns True if the buffer will be full after num_new_data have been appended (to be overwritten by subclass)"""
125           return self.getBaseBuffer().needsRearrangement(num_new_data)
126    
127       def isEmpty(self):
128          """returns true if no data are appeneded to buffer"""
129          return self.getNumData()<=0
130      
131       def getNumData(self):
132          """returns the number of data (not all of them are accessible)"""
133          return self.getIdOfLastDatum()-self.getIdOfFirstDatum()+1
134    
135       def getBaseBuffer(self):
136          """return the buffer referenced by the TimeSeriesBaseDataset"""
137          return self.__buffer
138    
139       def getOffset(self):
140          """return the offset when referring to dataset elements"""
141          return self.__offset
142    
143       def __getitem__(self,index):
144          """returns the datum index"""
145          if type(index)==SliceType:
146             start=index.start
147             end=index.stop
148             if start==end:
149                return self[start]
150             else:
151                 if start<self.getIdOfFirstDatum() or start>self.getIdOfLastDatum() or \
152                     end-1<self.getIdOfFirstDatum() or end-1>self.getIdOfLastDatum(): raise IndexError,"%s: Index [%d:%d] out of range"%(self,start,end)
153                 return self.getBaseBuffer()[start+self.getOffsetInBaseBuffer():end+self.getOffsetInBaseBuffer()]
154          else:
155             if index<self.getIdOfFirstDatum() or index>self.getIdOfLastDatum(): raise IndexError,"%s: Index %d out of range"%(self,index)
156             return self.getBaseBuffer()[index+self.getOffsetInBaseBuffer()]
157    
158  _DEFAULT_CACHE_SIZE=9  class TimeSeriesBaseBuffer(TimeSeriesBaseDataset):
159  _DEFAULT_BUFFER_SIZE=5     """An inplementation of TimeSeriesBaseDataset which actually is storing data into a numarray buffer"""
160  _FLOATING_TYPE=numarray.Float64     def __init__(self,buffer_size=DEFAULT_BUFFER_SIZE,numComponents=1,type=DEFAULT_FLOAT_TYPE,id_of_first_datum=0,debug=False,description="TimeSeriesBaseBuffer"):
   
 class TimeSeries(TimeSeriesBase):  
    def __init__(self,buffer_overlap=0,buffer_size=_DEFAULT_BUFFER_SIZE,cache_size=_DEFAULT_CACHE_SIZE,numComponents=1):  
        if buffer_size>cache_size: raise ValueError,"buffer size has to be less or equal cache size"  
        TimeSeriesBase.__init__(self)  
        self.__updates=list()  
        self.__max_buffer_overlap=0  
        self.__buffer_overlap=0  
        self.__numNodes=0  
        self.__numNodesInBuffer=0  
        self.__numNodesInCache=0  
        self.__firstNodeInBuffer=0  
        self.__firstNodeInCache=0  
        self.__buffer_size=buffer_size  
        self.__node_cache=numarray.zeros((cache_size,),_FLOATING_TYPE)  
        self.__attachment_cache=[]  
161         if numComponents<2:         if numComponents<2:
162            self.__value_cache=numarray.zeros((cache_size,),_FLOATING_TYPE)            buffer=numarray.zeros((buffer_size,),type)
163         else:         else:
164            self.__value_cache=numarray.zeros((cache_size,numComponents),_FLOATING_TYPE)            buffer=numarray.zeros((buffer_size,numComponents),type)
165         self.resizeMaxBufferOverlap(buffer_overlap)         TimeSeriesBaseDataset.__init__(self,buffer,id_of_first_datum-1,debug,description)
166           self.__num_data_in_buffer=0
167     def __del__(self):         self.__id_last_unreferenced_datum=id_of_first_datum-1
168         self.flush()         self.__id_last_datum=id_of_first_datum-1
169           self.__id_first_datum=id_of_first_datum
170           if self.debug(): print "Debug: %s : buffer of size %d with %d components allocated (first datum is %d)."% \
171                           (self,self.getBaseBufferSize(),self.getNumComponents(),id_of_first_datum)
172    
    def __str__(self):  
        return "TimeSeries"  
173    
174       def getBaseBufferSize(self):
175           """returns the size of the buffer"""
176           return self.getBaseBuffer().shape[0]
177      
178     def getNumComponents(self):     def getNumComponents(self):
179         if self.__value_cache.rank==1:         """returns the number of components of the data (overwrites TimeSeriesBaseDataset method)"""
180           if self.getBaseBuffer().rank==1:
181            return 1            return 1
182         else:         else:
183            self.__value_cache.shape[1]            self.getBaseBuffer().shape[1]
   
    def getNumNodes(self):  
        """returns the number of time nodes in the time series"""  
        return self.__numNodes  
   
    def getCacheSize(self):  
        """returns the cache size"""  
        return self.__node_cache.shape[0]  
   
    def getBufferSize(self):  
        """returns the cache size"""  
        return self.__buffer_size  
   
    def getNumNodesInCache(self):  
        """returns the number of nodes in cache"""  
        return self.__numNodesInCache  
   
    def getNumNodesInBuffer(self):  
        """returns the number of nodes in cache"""  
        return self.__numNodesInBuffer  
       
    def getFirstNodeInCache(self):  
        """returns the id number of the first node in the cache"""  
        return self.__firstNodeInCache  
   
    def getFirstNodeInBuffer(self):  
        """returns the id number of the first node in the buffer"""  
        return self.__firstNodeInBuffer  
   
    def getFirstNodeOfBufferInCache(self):  
        """returns the first location of the first node in the buffer relative to the cache"""  
        return self.getFirstNodeInBuffer()-self.getFirstNodeInCache()  
   
    def getBufferOverlap(self):  
        """returns the current size of the left extension"""  
        return self.__buffer_overlap  
   
    def getMaxBufferOverlap(self):  
        """returns the maximum size of the left extension"""  
        return self.__max_buffer_overlap  
   
    def resizeMaxBufferOverlap(self,new_buffer_overlap=0):  
        if new_buffer_overlap>self.__max_buffer_overlap:  
           if self.getNumNodes()>0: raise ValueError,"left extension can only be resized for empty time series"  
           if self.getCacheSize()<self.getBufferSize()+new_buffer_overlap:  
                raise ValueError,"Cache size is too small! required cache size is %s"%self.getBufferSize()+new_buffer_overlap  
           self.__max_buffer_overlap=new_buffer_overlap  
           if self.debug(): print "Debug: %s: left extension is increased to %d"%(self,new_buffer_overlap)  
   
    def getLastNode(self):  
        if self.getNumNodesInCache()>0:  
           return self.__node_cache[self.getNumNodesInCache()-1]  
        else:  
           return -1.e300  
184    
185     def getLastValue(self):     def getNumDataInBaseBuffer(self):
186         if self.getNumNodesInCache()>0:         """returns the number of data currently in the buffer"""
187            return self.__node_cache[self.getNumNodesInCache()-1]         return self.__num_data_in_buffer
188         else:  
189            raise ValueError,"No value available"     def getIdOfLastDatum(self):
190          """returns the identification number of the last datum in the data set (overwrites method from TimeSeriesBaseDataset)"""
191          return self.__id_last_datum
192    
193       def getIdOfFirstDatum(self):
194          """returns the identification number of the first datum (overwrites method from TimeSeriesBaseDataset)"""
195          return self.__id_first_datum
196    
197       def getOffsetInBaseBuffer(self):
198          """returns the offset to access elements in the buffer (overwrites method from TimeSeriesBaseDataset)"""  
199          return -self.getIdOfLastDatum()+self.getNumDataInBaseBuffer()-1  
200    
201       def getIdOfLastUnreferencedDatum(self):
202           """returns the identification number of the last datum which has been unused by all TimeSeries refering to the TimeSeriesBaseDataset (overwrites method from TimeSeriesBaseDataset)"""
203           return self.__id_last_unreferenced_datum
204    
205       def updateIdOfLastUnreferencedDatum(self,last_unreferenced_datum):
206           """updates the identification number of the last unused datum (to be overwritten by subclass)"""
207           self.getBaseBuffer().updateIdOfLastUnreferencedDatum(last_unreferenced_datum-self.getOffset())
208    
209       def updateIdOfLastUnreferencedDatum(self,last_unreferenced_datum):
210           """updates the identification number of the last unused datum (overwrites TimeSeriesBaseDataset method)"""
211           if self.__id_last_unreferenced_datum>last_unreferenced_datum:
212               self.__id_last_unreferenced_datum=last_unreferenced_datum
213               if self.debug(): print "Debug: %s: last unused datum is now %s"%(self,last_unreferenced_datum)
214    
215       def needsRearrangement(self,num_new_data=0):
216           """returns True if the buffer will be full after num_new_data have been appended"""
217           return self.getNumDataInBaseBuffer()+num_new_data>self.getBaseBufferSize()
218            
219       def getIdOfFirstAvailableDatum(self):
220          """returns the identification number of the first avaiable datum (overwrites TimeSeriesBaseDataset method)"""
221          return self.getIdOfLastDatum()-self.__num_data_in_buffer+1
222    
223       def append(self,data):
224          """appends data to the buffer. If the buffer would be full the buffer is rearranged before the data are appended (overwrites TimeSeriesBaseDataset method)"""
225          data=numarray.array(data)
226          nc=self.getNumComponents()
227          if data.rank==0:
228            if nc==1:
229               num_new_data=1
230            else:
231               raise ValueError,"%s: illegal data shape"%self
232          elif data.rank==1:
233            if nc==1:
234                 num_new_data=data.shape[0]
235            else:
236                 num_new_data=1  
237          elif data.rank==2:
238            if not nc==data.shape[1]: raise ValueError,"%s: illegal data shape"%self
239            num_new_data=data.shape[0]
240          else:
241             raise ValueError,"%s: illegal rank"%self
242    
243     def checkInUpdate(self,time_series_filter):        # check is buffer will be overflown when data are appended:
244         """checks in a time_series_filter object to be updated when buffer is full"""        if self.needsRearrangement(num_new_data):
245         if self.getNumNodes()>0:          nn=self.getNumDataInBaseBuffer()
246            raise TypeError,"Check in of TimeSeries requires empty buffer."          num_protected_data=self.getIdOfLastDatum()-self.getIdOfLastUnreferencedDatum()
247         self.__updates.append(time_series_filter)          if num_protected_data+num_new_data>self.getBaseBufferSize():
248         self.resizeMaxBufferOverlap(time_series_filter.getBufferOverlapNeededForUpdate())                raise ValueError,"%s: buffer overflow: buffer size has to be bigger than %d"%(self,num_protected_data+num_new_data)
249         if self.debug(): print "Debug: %s: %s checked in successfully."%(self,time_series_filter)          if num_protected_data>0: self.getBaseBuffer()[0:num_protected_data]=self.getBaseBuffer()[nn-num_protected_data:nn]
250            self.__num_data_in_buffer=num_protected_data
251     def append(self,time_nodes,values,attachments=None):          self.__id_last_unreferenced_datum=self.__id_last_datum
252         """appends the time_nodes and values into the buffer"""          if self.debug():
253         num_additional_nodes=time_nodes.shape[0]               print "Debug: %s: rearrangement: first data in buffer is %d."%(self,self.getIdOfLastDatum()-self.getNumDataInBaseBuffer()+1)
254         if num_additional_nodes<1: return        # copy data over:
255         if self.debug():        nn=self.getNumDataInBaseBuffer()
256              if num_additional_nodes>1:        self.getBaseBuffer()[nn:nn+num_new_data]=data
257                 print "Debug: %s: values %d to %d are added to time series."%(self,self.getNumNodes(),self.getNumNodes()+num_additional_nodes-1)        self.__num_data_in_buffer+=num_new_data
258              else:        self.__id_last_datum+=num_new_data
259                 print "Debug: %s: value %d is added to time series."%(self,self.getNumNodes())        self.__id_last_unreferenced_datum+=num_new_data
260         if not num_additional_nodes==values.shape[0]:        if self.debug(): print "Debug: %s: %d data appended. Last unreferenced datum is now %d."%(self,num_new_data,self.__id_last_unreferenced_datum)
261            raise ValueError,"Number time nodes and number of values don't match."  
262         if self.getLastNode()>=time_nodes[0]:  # ======================================
263            raise ValueError,"first time node to be checked in is less than last previously checked in node"  class TimeSeriesControlerView(TimeSeriesBase):
264          """A TimeSeriesControlerView is attached to a Controler and moves forward in time by increasing the id of the last processed datum.
265         if num_additional_nodes>1:           Any implementation of a TimeSeriesControlerView must provide the getControler method which returns the controler"""
266              if min(time_nodes[1:num_additional_nodes]-time_nodes[0:num_additional_nodes-1])<=0:        def __init__(self,id_first_datum=0,debug=False,description="TimeSeries"):
267                raise ValueError,"time nodes have to be strictly increasing"          TimeSeriesBase.__init__(self,debug,description)
268            self.__id_last_processed_datum=id_first_datum-1
269            if self.debug(): print "Debug: %s  created with first datum %d"%(str(self),id_first_datum)
270    
271          def getIdOfLastProcessedDatum(self):
272              return self.__id_last_processed_datum
273    
274          def updateIdOfLastProcessedDatum(self,id_last_processed_datum):
275              self.__id_last_processed_datum=id_last_processed_datum
276    
277          # def getControler(self):
278          #      """returns the Controler of the time series (to be overwritten by subclass)"""
279          #      pass
280    
281    class TimeSeries(TimeSeriesBaseDataset,TimeSeriesControlerView):
282          """makes TimeSeriesBaseDataset look like a TimeSeries and introduces operations
283             Any implementation of a TimeSeriesControlerView must provide the getControler method which returns the controler"""
284          def __init__(self,dataset,debug=False,description="TimeSeries"):
285            TimeSeriesControlerView.__init__(self,dataset.getIdOfFirstDatum(),debug,description)
286            TimeSeriesBaseDataset.__init__(self,dataset,0,debug,description)
287                
288         # full cache requires a shift:        def getDataset(self):
289         if self.getNumNodesInCache()+num_additional_nodes>self.getCacheSize():            """returns the TimeSeriesBaseDataset of the time series"""
290             new_num_nodes_in_cache=self.getNumNodesInBuffer()+self.getBufferOverlap()            return self.getBaseBuffer()
291             if new_num_nodes_in_cache+num_additional_nodes>self.getCacheSize():  
292                raise ValueError,"Cache overflow: Expected size is bigger than %d"%(new_num_nodes_in_cache+num_additional_nodes)        # def getControler(self):
293             start=self.getNumNodesInCache()-new_num_nodes_in_cache        #      """returns the Controler of the time series (to be overwritten by subclass)"""
294             end=start+new_num_nodes_in_cache        #      pass
295             self.__node_cache[0:new_num_nodes_in_cache]=self.__node_cache[start:end]  
296             self.__value_cache[0:new_num_nodes_in_cache]=self.__value_cache[start:end]        def __add__(self,arg):
297             self.__attachment_cache[0:new_num_nodes_in_cache]=self.__attachment_cache[start:end]           if isinstance(arg,TimeSeriesBaseDataset):
298                return TimeSeriesAdd(self,arg)
299             self.__firstNodeInCache+=start           else:
300             self.__numNodesInCache=new_num_nodes_in_cache              return TimeSeriesAddScalar(self,arg)
301             if self.debug(): print "Debug: %s: %d values from %d onwards are moved to the beginning of the cache (first node in cache is now %d)."% \  
302                                                                                      (self,new_num_nodes_in_cache,start,self.__firstNodeInCache)        def __sub__(self,arg):
303                       return self+(-1.)*arg
304         # copy values into cache:  
305         if self.getNumNodesInCache()+num_additional_nodes>self.getCacheSize():        def __mul__(self,arg):
306             raise ValueError,"Cache overflow: Expected size is bigger than %d"%(self.getNumNodesInCache()+num_additional_nodes)           if isinstance(arg,TimeSeriesBaseDataset):
307         if self.debug():              return TimeSeriesMult(self,arg)
308             if num_additional_nodes>1:           else:
309                print "Debug: %s: values %d to %d of cache are updated"%(self,self.getNumNodesInCache(),self.getNumNodesInCache()+num_additional_nodes-1)              return TimeSeriesMultScalar(self,arg)
310             else:  
311                print "Debug: %s: value %d of cache is updated."%(self,self.getNumNodesInCache())        def __div__(self,arg):
312         self.__node_cache[self.getNumNodesInCache():self.getNumNodesInCache()+num_additional_nodes]=time_nodes           if isinstance(arg,TimeSeriesBaseDataset):
313         self.__value_cache[self.getNumNodesInCache():self.getNumNodesInCache()+num_additional_nodes]=values              return TimeSeriesDiv(self,arg)
314         self.__numNodes+=num_additional_nodes           else:
315         self.__numNodesInBuffer+=num_additional_nodes              return TimeSeriesMultScalar(self,1./arg)
316         self.__numNodesInCache+=num_additional_nodes  
317         print self.__node_cache        def __pow__(self,arg):
318         print self.__value_cache           if isinstance(arg,TimeSeriesBaseDataset):
319         # copy values into cache:              return TimeSeriesPower(self,arg)
320         if self.getNumNodesInBuffer()>=self.getBufferSize():           else:
321                if self.debug() and len(self.__updates)>0: print "Debug: %s: buffer is full. Updating process is started"%self              return TimeSeriesPowerScalar(self,arg)
322                self.processBuffer()        
323          def __radd__(self,arg):
324             return self.__add__(arg)
325    
326     def flush(self):        def __rsub__(self,arg):
327        self.processBuffer()           return arg+(-1.)*self
328    
329          def __rmul__(self,arg):
330             return self.__mul__(arg)
331    
332     def processBuffer(self):        def __rdiv__(self,arg):
333          if self.getNumNodesInBuffer()>0:           if isinstance(arg,TimeSeriesBaseDataset):
334             for i in self.__updates:              return TimeSeriesDiv(arg,self)
335               if self.debug(): print "Debug: %s: update for %s started"%(self,i)           else:
336               if i.getBufferOverlapNeededForUpdate()>self.getBufferOverlap():              return TimeSeriesDivScalar(self,arg)
337                  s=self.getFirstNodeOfBufferInCache()  
338                  l=self.getNumNodesInBuffer()        def __rpow__(self,arg):
339             if isinstance(arg,TimeSeriesBaseDataset):
340                return TimeSeriesPower(arg,self)
341             else:
342                return Exp(numarray.log(arg)*self)
343    
344          def __lshift__(self,arg):
345             return TimeSeriesShift(self,-arg)
346    
347          def __rshift__(self,arg):
348             return TimeSeriesShift(self,arg)
349    
350          def __neg__(self):
351             return (-1.0)*self
352    
353          def __pos__(self):
354             return (1.0)*self
355    
356    class TimeSeriesOperator(TimeSeriesControlerView):
357          """a TimeSeriesOperator decribes an operation acting on list of TimeSeries time_series_args. It allows to update its output (if there is any)
358             through the update method which is overwritten by a particular implementation of the class. The update method is called to process the data [start:end] using
359             [start-left_wing_size:end+right_wing_size] of its arguments"""
360          def __init__(self,controler,time_series_args=[],left_wing_size=0,right_wing_size=0,debug=False,description="TimeSeriesOperator"):
361              id_first_datum=controler.getIdOfFirstDatum()
362              for i in time_series_args: id_first_datum=max(id_first_datum,i.getIdOfFirstDatum())
363              TimeSeriesControlerView.__init__(self,id_first_datum+left_wing_size,debug,description)
364              self.__left_wing_size=left_wing_size
365              self.__right_wing_size=right_wing_size
366              self.__time_series_args=time_series_args
367              self.__controler=controler
368              controler.appendOperatorToUpdateList(self)
369              if self.debug(): print "Debug: %s: with left/right wing size %d/%d and %d arguments."%(str(self),left_wing_size,right_wing_size,len(time_series_args))
370    
371          def __del__(self):
372              self.getControler().removeOperatorFromUpdateList(self)
373    
374          def getControler(self):
375              """returns the Controler updating the TimeSeriesOperator"""
376              return self.__controler
377    
378          def getLeftWingSize(self):
379              """returns the left wing size"""  
380              return self.__left_wing_size
381    
382          def getRightWingSize(self):
383              """returns the right wing size"""
384              return self.__right_wing_size
385    
386          def getArguments(self,index=None):
387              """returns the list of arguments or, index is present, the argument with index index. In the latter case None is returned if no arguments are present"""
388              if index==None:
389                 return self.__time_series_args
390              else:
391                 if len(self.__time_series_args)>0:
392                    return self.__time_series_args[index]
393               else:               else:
394                  s=self.getFirstNodeOfBufferInCache()-i.getBufferOverlapNeededForUpdate()                  return None
                 l=self.getNumNodesInBuffer()+i.getBufferOverlapNeededForUpdate()  
              i.update(self.__node_cache[s:s+l],self.__value_cache[s:s+l])  
            self.__firstNodeInBuffer+=self.__numNodesInBuffer  
            self.__numNodesInBuffer=0  
         self.__buffer_overlap=self.getMaxBufferOverlap()  
         if self.debug(): print "Debug: %s: first node in buffer is now %d"%(self,self.__firstNodeInBuffer)  
395    
396            def getArgumentDataset(self,index):
397              """returns the dataset of in the argument with index index"""
398              arg=self.getArguments(index)
399              if arg==None:
400                 return None
401              else:
402                  return self.getArguments(index).getDataset()
403    
404          def flush(self):
405              """calls the update method with all the maximum processable range. It also updates the id of unused datum for all arguments"""
406              start=self.getIdOfLastProcessedDatum()+1
407              end=self.getControler().getIdOfLastDatum()
408              for i in self.getArguments(): end=min(end,i.getIdOfLastDatum())
409              if start<=end-self.getRightWingSize():
410                 if self.debug(): print "Debug: %s: range [%d:%d] is updated."%(self,start,end-self.getRightWingSize())
411                 self.update(start,end-self.getRightWingSize()+1)      
412                 for i in self.getArguments(): i.updateIdOfLastUnreferencedDatum(end-self.getLeftWingSize())
413                 self.updateIdOfLastProcessedDatum(end)
414    
415          def update(self,start,end):
416              """updates the the data [start:end] using [start-left_wing_size:end+right_wing_size] of its arguments (is overwritten by a particular TimeSeriesOperator)"""
417              pass
418    
419    
420    class TimeSeriesFilter(TimeSeries,TimeSeriesOperator):
421          """a TimeSeriesFilter is a TimeSeries taht is created trough a TimeSeriesOperator"""
422          def __init__(self,controler,dataset,time_series_args=[],left_wing_size=0,right_wing_size=0,debug=False,description="TimeSeriesFilter"):
423             TimeSeriesOperator.__init__(self,controler,time_series_args,left_wing_size,right_wing_size,debug,description)
424             TimeSeries.__init__(self,dataset,debug,description)
425    
426          def update(self,start,end):
427              """appends zeros to the dataset. This method should be overwritten by a particular TimeSeriesFilter"""
428              nc=self.getNumComponents()
429              if nc>1:
430                 self.getDataset().append(numarray.zeros([nc,end-start]))
431              else:
432                 self.getDataset().append(numarray.zeros(end-start))
433    
434    class Controler(TimeSeries):
435       """controls a set of TimeSeries"""
436       def __init__(self,buffer_size=DEFAULT_BUFFER_SIZE,debug=False,description="TimeSeriesControler"):
437            TimeSeries.__init__(self,TimeSeriesBaseBuffer(buffer_size,1,DEFAULT_FLOAT_TYPE,0,debug,"node buffer of "+description),debug,"nodes of "+description)
438            self.setFlushRate()  
439            self.__update_time_series=list()
440          
441       def getControler(self):
442           """returns the Controler of the time series (overwrites method of by TimeSeries)"""
443           return self
444    
445       def setFlushRate(self,rate=50):
446           """set the flush rate, i.e. after rate new time nodes have been checked in the flush method is called."""
447           self.__flush_rate=rate
448           if self.debug(): print "Debug: %s: flush rate is set to %d"%(self,rate)
449    
450       def needsFlushing(self):
451          """returns true if the depending TimeSeriesFilters needs to be flushed becuase the time nodes buffer is full or because of the set flush rate"""
452          return self.needsRearrangement(1) or (self.getNumData()+1)%self.__flush_rate==0
453    
454       def flush(self):
455           """flushes all dependend TimeSeriesFilters by processing their flush method"""
456           if self.debug(): print "Debug: %s: start flushing"%self
457           for time_serie in self.__update_time_series: time_serie.flush()
458    
459       def appendOperatorToUpdateList(self,time_serie):
460           if not time_serie.getControler()==self: raise ValueError,"%s: TimeSeries %s is not defined on this controler."%(self,time_serie)
461           if not self.isEmpty(): raise ValueError,"%s: you can only check in a time series time_serie is controler is empty."%self
462           self.__update_time_series.append(time_serie)
463           if self.debug(): print "Debug: %s: %s has been added to update list."%(self,time_serie)
464    
465       def removeOperatorFromUpdateList(self,time_serie):
466           self.__update_time_series.remove(time_serie)
467           if self.debug(): print "Debug: %s: %s has been removed from update list."%(self,time_serie)
468    
469       def nextTime(self,value):
470           if self.needsFlushing(): self.flush()
471           self.getDataset().append(value)
472           if self.debug(): print "Debug: %s: new time node %e has been added."%(self,value)
473    
474    class TimeSeriesShift(TimeSeries):
475          """creates a shift of the time series, i.e. if d[n] is the datum at time t[n], the value at t[n] becomes v[n+shift] on the output"""
476          def __init__(self,time_serie,shift=1):
477              if shift<0:
478                  dsc="(%s)<<%d"%(time_serie,-shift)
479              else:
480                  dsc="(%s)>>%d"%(time_serie,shift)
481              self.__controler=time_serie.getControler()
482              TimeSeries.__init__(self,TimeSeriesBaseDataset(time_serie.getDataset(),-shift,time_serie.debug(),"buffer view to "+dsc),time_serie.debug(),dsc)
483    
484          def getControler(self):
485              return self.__controler
486    
487    class TimeSeriesAdd(TimeSeriesFilter):
488          """adds two TimeSeries"""
489          def __init__(self,time_serie_1,time_serie_2):
490              dsc="(%s)+(%s)"%(time_serie_1,time_serie_2)
491              dbg=time_serie_1.debug() or time_serie_2.debug()
492              cntrl=time_serie_1.getControler()
493              if not cntrl==time_serie_2.getControler():
494                      raise ValueError("TimeSeriesAdd: %s and %s have different controler."%(time_serie_1,time_serie_2))
495              id_first_datum=max(time_serie_1.getIdOfFirstDatum(),time_serie_2.getIdOfFirstDatum())
496              TimeSeriesFilter.__init__(self,cntrl, \
497                                  TimeSeriesBaseBuffer(cntrl.getBaseBufferSize(),time_serie_1.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \
498                                  [time_serie_1,time_serie_2],0,0,dbg,dsc)
499    
500          def update(self,start,end):
501              self.append(self.getArgumentDataset(0)[start:end]+self.getArgumentDataset(1)[start:end])
502    
503    class TimeSeriesAddScalar(TimeSeriesFilter):
504          """adds a single value to a TimeSeries"""
505          def __init__(self,time_serie,scalar):
506              dsc="(%s)+(%s)"%(time_serie,scalar)
507              dbg=time_serie.debug()
508              cntrl=time_serie.getControler()
509              id_first_datum=time_serie.getIdOfFirstDatum()
510              TimeSeriesFilter.__init__(self,cntrl, \
511                           TimeSeriesBaseBuffer(cntrl.getBaseBufferSize(),time_serie.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \
512                           [time_serie],0,0,dbg,dsc)
513              self.__scalar=scalar
514    
515          def update(self,start,end):
516              self.append(self.getArgumentDataset(0)[start:end]+self.__scalar)
517    
518    class TimeSeriesMult(TimeSeriesFilter):
519          """multiplies two TimeSeries"""
520          def __init__(self,time_serie_1,time_serie_2):
521              dsc="(%s)*(%s)"%(time_serie_1,time_serie_2)
522              dbg=time_serie_1.debug() or time_serie_2.debug()
523              cntrl=time_serie_1.getControler()
524              if not cntrl==time_serie_2.getControler():
525                      raise ValueError("TimeSeriesMult: %s and %s have different controler."%(time_serie_1,time_serie_2))
526              id_first_datum=max(time_serie_1.getIdOfFirstDatum(),time_serie_2.getIdOfFirstDatum())
527              TimeSeriesFilter.__init__(self,cntrl, \
528                       TimeSeriesBaseBuffer(cntrl.getBaseBufferSize(),time_serie_1.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \
529                       [time_serie_1,time_serie_2],0,0,dbg,dsc)
530    
531          def update(self,start,end):
532              self.append(self.getArgumentDataset(0)[start:end]*self.getArgumentDataset(1)[start:end])
533    
534    class TimeSeriesMultScalar(TimeSeriesFilter):
535          """multiplies a TimeSeries with a single value"""
536          def __init__(self,time_serie,scalar):
537              dsc="(%s)*%s"%(time_serie,scalar)
538              dbg=time_serie.debug()
539              cntrl=time_serie.getControler()
540              id_first_datum=time_serie.getIdOfFirstDatum()
541              TimeSeriesFilter.__init__(self,cntrl, \
542                           TimeSeriesBaseBuffer(cntrl.getBaseBufferSize(),time_serie.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \
543                           [time_serie],0,0,dbg,dsc)
544              self.__scalar=scalar
545    
546          def update(self,start,end):
547              self.append(self.getArgumentDataset(0)[start:end]*self.__scalar)
548    
549    class TimeSeriesDiv(TimeSeriesFilter):
550          """divides two TimeSeries"""
551          def __init__(self,time_serie_1,time_serie_2):
552              dsc="(%s)/(%s)"%(time_serie_1,time_serie_2)
553              dbg=time_serie_1.debug() or time_serie_2.debug()
554              cntrl=time_serie_1.getControler()
555              if not cntrl==time_serie_2.getControler():
556                      raise ValueError("TimeSeriesDiv: %s and %s have different controler."%(time_serie_1,time_serie_2))
557              id_first_datum=max(time_serie_1.getIdOfFirstDatum(),time_serie_2.getIdOfFirstDatum())
558              TimeSeriesFilter.__init__(self,cntrl, \
559                         TimeSeriesBaseBuffer(cntrl.getBaseBufferSize(),time_serie_1.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \
560                         [time_serie_1,time_serie_2],0,0,dbg,dsc)
561    
562          def update(self,start,end):
563              self.append(self.getArgumentDataset(0)[start:end]/self.getArgumentDataset(1)[start:end])
564    
565    class TimeSeriesDivScalar(TimeSeriesFilter):
566          """divides a scalar be a TimeSerie"""
567          def __init__(self,time_serie,scalar):
568              dsc="(%s)/(%s)"%(scalar,time_serie)
569              dbg=time_serie.debug()
570              cntrl=time_serie.getControler()
571              id_first_datum=time_serie.getIdOfFirstDatum()
572              TimeSeriesFilter.__init__(self,cntrl, \
573                           TimeSeriesBaseBuffer(cntrl.getBaseBufferSize(),time_serie.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \
574                           [time_serie],0,0,dbg,dsc)
575              self.__scalar=scalar
576    
577          def update(self,start,end):
578              self.append(self.__scalar/self.getArgumentDataset(0)[start:end])
579    
580    class TimeSeriesPower(TimeSeriesFilter):
581          """raise one TimeSeries to the power of an other TimeSeries"""
582          def __init__(self,time_serie_1,time_serie_2):
583              dsc="(%s)**(%s)"%(time_serie_1,time_serie_2)
584              dbg=time_serie_1.debug() or time_serie_2.debug()
585              cntrl=time_serie_1.getControler()
586              if not cntrl==time_serie_2.getControler():
587                      raise ValueError("TimeSeriesPower: %s and %s have different controler."%(time_serie_1,time_serie_2))
588              id_first_datum=max(time_serie_1.getIdOfFirstDatum(),time_serie_2.getIdOfFirstDatum())
589              TimeSeriesFilter.__init__(self,cntrl, \
590                    TimeSeriesBaseBuffer(cntrl.getBaseBufferSize(),time_serie_1.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \
591                    [time_serie_1,time_serie_2],0,0,dbg,dsc)
592    
593          def update(self,start,end):
594              self.append(self.getArgumentDataset(0)[start:end]**self.getArgumentDataset(1)[start:end])
595    
596    class TimeSeriesPowerScalar(TimeSeriesFilter):
597          """raises a TimeSerie to the power of a scalar"""
598          def __init__(self,time_serie,scalar):
599              dsc="(%s)**(%s)"%(time_serie,scalar)
600              dbg=time_serie.debug()
601              cntrl=time_serie.getControler()
602              id_first_datum=time_serie.getIdOfFirstDatum()
603              TimeSeriesFilter.__init__(self,cntrl, \
604                           TimeSeriesBaseBuffer(cntrl.getBaseBufferSize(),time_serie.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \
605                           [time_serie],0,0,dbg,dsc)
606              self.__scalar=scalar
607    
608          def update(self,start,end):
609              self.append(self.getArgumentDataset(0)[start:end]**self.__scalar)
610    
611    class Exp(TimeSeriesFilter):
612          """"""
613          def __init__(self,time_serie):
614              dsc="exp(%s)"%(time_serie)
615              dbg=time_serie.debug()
616              cntrl=time_serie.getControler()
617              id_first_datum=time_serie.getIdOfFirstDatum()
618              TimeSeriesFilter.__init__(self,cntrl, \
619                         TimeSeriesBaseBuffer(cntrl.getBaseBufferSize(),time_serie.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \
620                         [time_serie],0,0,dbg,dsc)
621    
622          def update(self,start,end):
623              self.append(numarray.exp(self.getArgumentDataset(0)[start:end]))
624    
625    class Writer(TimeSeriesOperator):
626          """writes the time series into an output strim ostream which mast have the writeline method. The values are seperated by the string seperator."""
627          def __init__(self,time_serie,ostream,seperator=",",commend_tag="#"):
628             dsc="write %s to %s"%(time_serie,ostream)
629             dbg=time_serie.debug()
630             cntrl=time_serie.getControler()
631             self.__ostream=ostream
632             self.__seperator=seperator
633             TimeSeriesOperator.__init__(self,cntrl,[time_serie],0,0,dbg,dsc)
634             ostream.writelines("%s time series %s\n"%(commend_tag,str(self)))
635    
636  class TimeSeriesCollector(TimeSeries):        def update(self,start,end):
637        """TimeSeriesCollector collects values at time nodes"""           cntrl=self.getControler()
638        def __init__(self):           arg=self.getArguments(0)
639           TimeSeries.__init__(self)           n=arg.getNumComponents()
640             if n<2:
641        def __str__(self):              for i in range(start,end): self.__ostream.writelines("%s%s%s\n"%(cntrl[i],self.__seperator,arg[i]))
642           return "TimeSeriesCollector"           else:
643                for i in range(start,end):
644        def add(self,time_mark,value):                 l="%s"%cntrl[i]
645             """adds the value at time time_mark to the time series"""                 for j in range(n): l=l+"%s%s"(self.__seperator,arg[i][j])
646             self.append(numarray.array([time_mark]),numarray.array([value]))                 self.__ostream.writelines("%s\n"%l)
647    
648        def read(self,istream,seperator=","):  class DataCatcher(TimeSeries):
649          """reads pairs from iostream istream"""        """collects data into a time series."""
650          for l in istream:        def __init__(self,controler,numComponents=1,description="DataCatcher"):
651             d=l.strip().split(seperator)           self.__controler=controler
652             self.add(float(d[0]),float(d[1]))           dbg=controler.debug()
653             TimeSeries.__init__(self,TimeSeriesBaseBuffer(controler.getBaseBufferSize(),numComponents,DEFAULT_FLOAT_TYPE,controler.getIdOfFirstDatum(),dbg,"buffer for "+description),dbg,description)
654  class TimeSeriesIntegrator(TimeSeries,TimeSeriesFilter):  
655        def __init__(self,time_series):        def getControler(self):
656           TimeSeriesFilter.__init__(self,1)            return self.__controler
657           TimeSeries.__init__(self,buffer_size=time_series.getBufferSize(),cache_size=time_series.getCacheSize(), \  
658                                                                           numComponents=time_series.getNumComponents())        def nextValue(self,value):
659           self.setDebug(time_series.debug())            """append a value to the time series"""
660           time_series.checkInUpdate(self)            id_last=self.getIdOfLastDatum()
661           self.__integral=0            id_current=self.getControler().getIdOfLastDatum()
662              if id_last+1==id_current:
663        def __str__(self):               self.getDataset().append(value)
664           return "TimeSeriesIntegrator"            elif id_last+1<id_current:
665                   if self.isEmpty():
666        def update(self,times,values):                     self.getDataset().append(value)
667            l=times.shape[0]                     id_last+=1
668            self.append(times[1:l],(values[0:l-1]+values[1:l])/2.*(times[1:l]-times[0:l-1]))                 t_last=self.getControler()[id_last]
669                   t_current=self.getControler()[id_current]
670                   value_last=self[id_last]
671                   out=(value_last-value)/(t_last-t_current)*(self.getControler()[id_last+1:id_current+1]-t_current)+value
672                   self.getDataset().append(out)
673              else :
674                 raise ValueError,"%s: a new time node must be introduced before a new value can be added."
675              self.updateIdOfLastUnreferencedDatum(id_last)
676              
677      
678    class TimeSeriesCumulativeSum(TimeSeriesFilter):
679          """cummulative sum of the time series values"""
680          def __init__(self,time_serie):
681             dsc="cumsum(%s)"%(time_serie)
682             dbg=time_serie.debug()
683             cntrl=time_serie.getControler()
684             id_first_datum=time_serie.getIdOfFirstDatum()
685             TimeSeriesFilter.__init__(self,cntrl, \
686                         TimeSeriesBaseBuffer(cntrl.getBaseBufferSize(),time_serie.getNumComponents(),DEFAULT_FLOAT_TYPE,id_first_datum,dbg,"buffer for "+dsc), \
687                         [time_serie],0,0,dbg,dsc)
688             self.__last_value=0
689    
690          def update(self,start,end):
691              out=numarray.cumsum(self.getArgumentDataset(0)[start:end])+self.__last_value
692              self.__last_value=out[end-start-1]
693              self.append(out)
694                    
695    
696  class TimeSeriesDifferential(TimeSeries,TimeSeriesFilter):  class Reader(TimeSeriesBase):
697        def __init__(self,time_series):        """reads a list of input streams and creates a time series for each input stream but on the same Controler where the first column
698           TimeSeriesFilter.__init__(self,1)           is used to create the time nodes"""
699           TimeSeries.__init__(self,buffer_size=time_series.getBufferSize(),cache_size=time_series.getCacheSize(), \        def __init__(self,list_of_istreams,buffer_size=DEFAULT_BUFFER_SIZE,seperator=",",commend_tag="#",debug=False):
700                                                                           numComponents=time_series.getNumComponents())           TimeSeriesBase.__init__(self,debug=debug,description="reader")
701           self.setDebug(time_series.debug())           if not isinstance(list_of_istreams,list):
702           time_series.checkInUpdate(self)                self.__list_of_istreams=[list_of_istreams]
703             else:
704        def __str__(self):                self.__list_of_istreams=list_of_istreams
705           return "TimeSeriesDifferential"           self.__cntrl=Controler(buffer_size,debug,"reader controler")
   
       def update(self,times,values):  
           l=times.shape[0]  
           self.append((times[0:l-1]+times[1:l])/2,(values[0:l-1]-values[1:l])/(times[0:l-1]-times[1:l]))  
   
 class TimeSeriesViewer(TimeSeriesFilter):  
       def __init__(self,time_series):  
          TimeSeriesFilter.__init__(self,0)  
          time_series.checkInUpdate(self)  
   
       def __str__(self):  
          return "TimeSeriesViewer"  
   
       def update(self,times,values):  
           for i in range(times.shape[0]): print "[%s: %s]"%(times[i],values[i])  
   
 class TimeSeriesWriter(TimeSeriesFilter):  
       def __init__(self,time_series,ostream,seperator=","):  
          TimeSeriesFilter.__init__(self,0)  
          time_series.checkInUpdate(self)  
          self.setDebug(time_series.debug())  
          self.__ostream=ostream  
706           self.__seperator=seperator           self.__seperator=seperator
707             self.__commend_tag=commend_tag
708             self.__time_series={}
709             self.__t={}
710             self.__v={}
711             # set up the time series:
712             for i in self.__list_of_istreams:
713               line=self.__commend_tag
714               while  not line=="" and line[0]==self.__commend_tag:
715                   line=i.readline().strip()
716               if line=="":
717                  list_of_istreams.remove(i)
718               else:
719                  d=line.split(self.__seperator)
720                  self.__t[i]=float(d[0])
721                  tmp=[]
722                  for j in d[1:]: tmp.append(float(j))
723                  self.__v[i]=numarray.array(tmp)
724                  self.__time_series[i]=DataCatcher(self.__cntrl,len(d)-1,str(i))
725    
726             #
727          def run(self):
728             while len(self.__list_of_istreams)>0:
729                if len(self.__time_series)>0:
730                   # find list all times with minumum time node:
731                   tminargs=[]
732                   for i in self.__time_series:
733                       if len(tminargs)==0:
734                           tminargs.append(i)
735                       elif abs(t[tminargs[0]]-self.__t[i])<1.e-8*abs(self.__t[i]):
736                           tminargs.append(i)
737                       elif self.__t[i]<t[tminargs[0]]:
738                           tminargs=[i]
739                   # find list all times with minumum time node:
740                   self.__cntrl.nextTime(self.__t[tminargs[0]])
741                   for i in tminargs:
742                       self.__time_series[i].nextValue(self.__v[i])
743                       # find next line without leading "#"
744                       line="#"
745                       while not line=="" and line[0]==self.__commend_tag:
746                           line=i.readline().strip()
747                       # if eof reached iostream is removed for searching
748                       if line=="":
749                          self.__list_of_istreams.remove(i)
750                       else:
751                          d=line.split(self.__seperator)
752                          self.__t[i]=float(d[0])
753                          tmp=[]
754                          for j in d[1:]: tmp.append(float(j))
755                          self.__v[i]=numarray.array(tmp)
756    
757          def getControler(self):
758             """returns the controler shared by all time series created through the input streams"""
759             return self.__cntrl
760    
761          def getTimeSeries(self,istream=None):
762             """returns the time series as a tuple. If istream is present its time series is returned"""
763             if istream==None:
764                out=self.__time_series.values()
765                if len(out)>1:
766                   return tuple(out)
767                elif len(out)>0:
768                   return out[0]
769                else:
770                   return None
771             else:
772                return self.__time_series[istream]
773    
774    
775    class Plotter(TimeSeriesOperator):
776        def __init__(self,time_series,window_size=DEFAULT_BUFFER_SIZE/4,file_name=None,format=None):
777             if isinstance(time_series,list):
778                 dbg=time_series[0].getControler().debug()
779                 text=""
780                 for i in time_series:
781                   if len(text)==0:
782                      text=str(i)
783                   else:
784                      text=text+","+str(i)
785                 TimeSeriesOperator.__init__(self,time_series[0].getControler(),time_series,window_size,0,dbg,"plot(%s)"%text)
786             else:
787                 dbg=time_series.getControler().debug()
788                 text=str(time_series)
789                 TimeSeriesOperator.__init__(self,time_series.getControler(),[time_series],window_size,0,dbg,"plot(%s)"%text)
790             from pyvisi.renderers.gnuplot import LinePlot,Scene,PsImage
791             self.__renderer=Scene()
792             self.__line_plot=LinePlot(self.__renderer)
793             self.__line_plot.setTitle(text)
794             self.__line_plot.setLineStyle("lines")
795             self.__line_plot.setXLabel("time")
796             self.__line_plot.setYLabel("values")
797             self.__file_name=file_name
798             if format==None:
799                 self.__format=PsImage()
800             else:
801                 self.__format=format
802             self.__window_size=window_size
803    
804        def update(self,start,end):
805             s=max(end-self.__window_size,self.getControler().getIdOfFirstAvailableDatum())
806             args=[self.getControler()[s:end]]
807             for arg in self.getArguments(): args.append(arg[s:end])
808             self.__line_plot.setData(*args)
809             self.__line_plot.render()
810             if self.__file_name==None:
811                 raise SystemError,"Online viewing is not avilabel yet!"
812             else:
813                 self.__renderer.save(fname=self.__file_name, format=self.__format)
814                
815    
816    def viewer(time_serie,seperator=","):
817          """creates a viewer for a time series"""
818          import sys
819          return Writer(time_serie,sys.stdout,seperator)
820    
821    def differential(time_serie):
822          """calculates the derivative Dv of the time series v:
823            
824                Dv[n]=(v[n]-v[n-1])/(t[n]-t[n-1])
825    
826        def __str__(self):        """
827           return "TimeSeriesWriter"        out=(((time_serie<<1)-time_serie)/((time_serie.getControler()<<1)-time_serie.getControler())+ \
828               ((time_serie>>1)-time_serie)/((time_serie.getControler()>>1)-time_serie.getControler()))/2.
829          out.setDescription("d(%s)/dt"%str(time_serie))
830          out.setDebug(time_serie.debug())
831          return out
832    
833        def update(self,times,values):  def integral(time_serie):
834          for i in range(times.shape[0]): self.__ostream.writelines("%s,%s\n"%(times[i],values[i]))        """calculates the intagral Iv of the time series v using the trapozidal rule:
835            
836                Iv[n]=int_{t_0}^{t_n} v ~ sum_{0<i<=n} n (v[i]+v[i-1])/2*(t[i]-t[i-1])
837    
838          """
839          out=TimeSeriesCumulativeSum(((time_serie>>1)+time_serie)/2.*(time_serie.getControler()-(time_serie.getControler()>>1)))
840          out.setDescription("I (%s) dt"%str(time_serie))
841          out.setDebug(time_serie.debug())
842          return out
843    
844    def smooth(time_serie,range=5):
845         """smoothes a time series using the at each time the previous and next range values"""
846         i=integral(time_serie)
847         out=((i>>range)-(i<<range))/((time_serie.getControler()>>range)-(time_serie.getControler()<<range))
848         out.setDescription("smooth(%s,-%d:%d) dt"%(str(time_serie),range,range))
849         out.setDebug(time_serie.debug())
850         return out
851    
852    def leakySmooth(time_serie,l=0.99):
853         """leaky smoother: s(t)=int_{t_0}^{t} v(r) l^{t-r} dr/ int_{t_0}^{t} l^{t-r} dr """
854         w=l**(-time_serie.getControler())
855         out=integrate(time_serie*w)/integrate(w)
856         out.setDescription("leaky smoother(%s)"%str(time_serie))
857         return out
858    
859  # test  # test
860    
861  if __name__=="__main__":  if __name__=="__main__":
862       # tests the interfaces to data sets:
863       print "Test of Datasets:"
864       print "================="
865       bf=TimeSeriesBaseBuffer(buffer_size=5,numComponents=1,debug=True,description="TestBaseBuffer")
866       bfv_l=TimeSeriesBaseDataset(bf,offset=1,debug=True,description="offset 1")
867       bfv_r=TimeSeriesBaseDataset(bf,offset=-1,debug=True,description="offset -1")
868       bf.append([1.,2.,3.,4.])
869       print "should be all 2. :",bfv_l[0]
870       print bf[1]
871       print bfv_r[2]
872       bf.append([5.,6.,7.])
873       print "should be all 5. :",bfv_l[3],bf[4],bfv_r[5]
874       print "should be all 6. :",bfv_l[4],bf[5],bfv_r[6]
875       print "should be all 7. :",bfv_l[5],bf[6],bfv_r[7]
876       print "should be all [6., 7.] :",bfv_l[4:6],bf[5:7],bfv_r[6:8]
877    
878       print "Test of Controler"
879       print "================="
880       b=Controler(buffer_size=15,debug=True)
881       s3=b>>3
882       s1=b>>1
883       s_3=b<<3
884       print s_3
885       print b
886       print b+s3
887       sum=(s_3+b)+(b+s3)
888      
889       for i in range(30):
890           b.nextTime(i*1.)
891       b.flush()
892       print "should be all 28. :",s_3.getDataset()[25],b.getDataset()[28],s3.getDataset()[31]
893       print "should be all 29. :",s_3.getDataset()[26],b.getDataset()[29],s3.getDataset()[32]
894       print "should be all 96. :",sum.getDataset()[24]
895      
896       print "Test of operators"
897       print "================="
898       b=Controler(buffer_size=15,debug=True)
899       b.setFlushRate(2)
900       q=DataCatcher(b)
901       b1=b<<1
902       a=b+b1
903       a_s=b1+1.
904       s_a=1.+b1
905       d=b-b1
906       d_s=b1-1.
907       s_d=1.-b1
908       m=b*b1
909       m_s=b1*2.
910       s_m=2.*b1
911       dv=b/b1
912       dv_s=b1/2.
913       s_dv=2./b1
914       p=b**b1
915       p_s=b1**2.
916       s_p=2.**b1
917       pb=+b
918       mb=-b
919       sum=TimeSeriesCumulativeSum(b)
920       diff=differential(b)
921       smt=smooth(b,2)
922       int=integral(b*2)
923       fl=file("/tmp/test.csv","w")
924       w=Writer(q,fl)
925       v=viewer(q)
926       plo=Plotter([a,a_s],window_size=4,file_name="s.ps")
927       for i in range(30):
928           b.nextTime(i*1.)
929           if i%2==1: q.nextValue(i*28.)
930       b.flush()
931       print "a[28] should be %e: %e"%(28.+29.,a[28])
932       print "a_s[28] should be %e: %e"%(29.+1.,a_s[28])
933       print "s_a[28] should be %e: %e"%(29.+1.,s_a[28])
934       print "d[28] should be %e: %e"%(28.-29.,d[28])
935       print "d_s[28] should %e: %e"%(29.-1.,d_s[28])
936       print "s_d[28] should %e: %e"%(1.-29.,s_d[28])
937       print "m[28] should be %e: %e"%(28.*29.,m[28])
938       print "m_s[28] should be %e: %e"%(29.*2.,m_s[28])
939       print "s_m[28] should be %e: %e"%(29.*2.,s_m[28])
940       print "dv[28] should be %e: %e"%(28./29.,dv[28])
941       print "dv_s[28] should be %e: %e"%(29./2.,dv_s[28])
942       print "s_dv[28] should be %e: %e"%(2./29.,s_dv[28])
943       print "p[28] should be %e: %e"%(28.**29.,p[28])
944       print "p_s[28] should be %e: %e"%(29.**2,p_s[28])
945       print "s_p[28] should be %e: %e"%(2.**29.,s_p[28])
946       print "pb[28] should be %e: %e"%(28.,pb[28])
947       print "mb[28] should be %e: %e"%(-28.,mb[28])
948       print "sum[28] should be %e: %e"%(28*29./2,sum[28])
949       print "diff[28] should be %e: %e"%(1.,diff[28])
950       print "smt[27] should be %e: %e"%(27.,smt[27])
951       print "int[28] should be %e: %e"%(28.**2,int[28])
952       print "q[27] should be %e: %e"%(27*28.,q[27])
953       print "q[28] should be %e: %e"%(28*28.,q[28])
954       print "q[29] should be %e: %e"%(29*28.,q[29])
955       fl.flush()
956      
957       rin=Reader(file("/tmp/test.csv","r+"),buffer_size=15,debug=True)
958       rin.run()
959       inp=rin.getTimeSeries()
960       print "inp[27] should be %e: %e"%(27*28.,inp[27])
961       print "inp[28] should be %e: %e"%(28*28.,inp[28])
962       print "inp[29] should be %e: %e"%(29*28.,inp[29])
963    
    c=TimeSeriesCollector()  
    c.setDebugOn()  
    ii=TimeSeriesIntegrator(c)  
    d=TimeSeriesDifferential(c)  
    v=TimeSeriesViewer(ii)  
    w=TimeSeriesWriter(d,file("test.csv","w"))  
   
    for i in range(15):  
       c.add(i*1.,i+1.)  

Legend:
Removed from v.110  
changed lines
  Added in v.1809

  ViewVC Help
Powered by ViewVC 1.1.26