/[escript]/trunk/doc/user/pyvisi.tex
ViewVC logotype

Contents of /trunk/doc/user/pyvisi.tex

Parent Directory Parent Directory | Revision Log Revision Log


Revision 1700 - (show annotations)
Thu Aug 14 03:35:46 2008 UTC (11 years, 6 months ago) by lgraham
File MIME type: application/x-tex
File size: 50677 byte(s)


1 \chapter{The Module \pyvisi}
2 \label{PYVISI CHAP}
3 \declaremodule{extension}{esys.pyvisi}
4 \modulesynopsis{Python Visualization Interface}
5
6 \section{Introduction}
7 \pyvisi is a Python module that is used to generate 2D and 3D visualization
8 for escript and its PDE solvers: finley and bruce. This module provides
9 an easy to use interface to the \VTK library (\VTKUrl). Pyvisi can be used to
10 render (generate) surface maps and contours for scalar fields, arrows and
11 streamlines for vector fields, and ellipsoids for tensor fields.
12 There are three
13 approaches for rendering an object. (1) Online - object is rendered on-screen
14 with interaction capability (i.e. zoom and rotate), (2) Offline - object is
15 rendered off-screen (no pop-up window) and (3) Display - object is rendered
16 on-screen but with no interaction capability (on-the-fly
17 animation). All three approaches have the option to save the rendered object
18 as an image (i.e. jpg) and subsequently converting a series of images into a
19 movie (.mpg).
20
21 The following outlines the general steps to use Pyvisi:
22
23 \begin{enumerate}
24 \item Create a \Scene instance - a window in which objects are to be
25 rendered on.
26 \item Create a data input instance (i.e. \DataCollector or \ImageReader) -
27 reads and loads the source data for visualization.
28 \item Create a data visualization instance (i.e. \Map, \Velocity, \Ellipsoid,
29 \Contour, \Carpet, \StreamLine or \Image) - processes and manipulates
30 the source data.
31 \item Create a \Camera or \Light instance - controls the viewing angle and
32 lighting effects.
33 \item Render the object - using either the Online, Offline or Display approach.
34 \item Generate movie - converts a series of images into a movie.
35 \end{enumerate}
36 \begin{center}
37 \begin{math}
38 scene \rightarrow data \; input \rightarrow data \; visualization \rightarrow
39 camera \, / \, light \rightarrow render \rightarrow movie
40 \end{math}
41 \end{center}
42
43 \section{\pyvisi Classes}
44 The following subsections give a brief overview of the important classes
45 and some of their corresponding methods. Please refer to \ReferenceGuide for
46 full details.
47
48
49 %#############################################################################
50
51
52 \subsection{Scene Classes}
53 This subsection details the instances used to setup the viewing environment.
54
55 \subsubsection{\Scene class}
56
57 \begin{classdesc}{Scene}{renderer = Renderer.ONLINE, num_viewport = 1,
58 x_size = 1152, y_size = 864}
59 A scene is a window in which objects are to be rendered on. Only
60 one scene needs to be created. However, a scene may be divided into four
61 smaller windows called viewports (if needed). Each viewport in turn can
62 render a different object.
63 \end{classdesc}
64
65 The following are some of the methods available:
66 \begin{methoddesc}[Scene]{setBackground}{color}
67 Set the background color of the scene.
68 \end{methoddesc}
69
70 \begin{methoddesc}[Scene]{render}{image_name = None}
71 Render the object using either the Online, Offline or Display mode.
72 \end{methoddesc}
73
74 \subsubsection{\Camera class}
75
76 \begin{classdesc}{Camera}{scene, viewport = Viewport.SOUTH_WEST}
77 A camera controls the display angle of the rendered object and one is
78 usually created for a \Scene. However, if a \Scene has four viewports, then a
79 separate camera may be created for each viewport.
80 \end{classdesc}
81
82 The following are some of the methods available:
83 \begin{methoddesc}[Camera]{setFocalPoint}{position}
84 Set the focal point of the camera.
85 \end{methoddesc}
86
87 \begin{methoddesc}[Camera]{setPosition}{position}
88 Set the position of the camera.
89 \end{methoddesc}
90
91 \begin{methoddesc}[Camera]{azimuth}{angle}
92 Rotate the camera to the left and right.
93 \end{methoddesc}
94
95 \begin{methoddesc}[Camera]{elevation}{angle}
96 Rotate the camera to the top and bottom (only between -90 and 90).
97 \end{methoddesc}
98
99 \begin{methoddesc}[Camera]{backView}{}
100 Rotate the camera to view the back of the rendered object.
101 \end{methoddesc}
102
103 \begin{methoddesc}[Camera]{topView}{}
104 Rotate the camera to view the top of the rendered object.
105 \end{methoddesc}
106
107 \begin{methoddesc}[Camera]{bottomView}{}
108 Rotate the camera to view the bottom of the rendered object.
109 \end{methoddesc}
110
111 \begin{methoddesc}[Camera]{leftView}{}
112 Rotate the camera to view the left side of the rendered object.
113 \end{methoddesc}
114
115 \begin{methoddesc}[Camera]{rightView}{}
116 Rotate the camera to view the right side of the rendered object.
117 \end{methoddesc}
118
119 \begin{methoddesc}[Camera]{isometricView}{}
120 Rotate the camera to view the isometric angle of the rendered object.
121 \end{methoddesc}
122
123 \begin{methoddesc}[Camera]{dolly}{distance}
124 Move the camera towards (greater than 1) the rendered object. However,
125 the camera is unable to be moved away from the rendered object.
126 \end{methoddesc}
127
128 \subsubsection{\Light class}
129
130 \begin{classdesc}{Light}{scene, viewport = Viewport.SOUTH_WEST}
131 A light controls the lighting effect for the rendered object and works in
132 a similar way to \Camera.
133 \end{classdesc}
134
135 The following are some of the methods available:
136 \begin{methoddesc}[Light]{setColor}{color}
137 Set the light color.
138 \end{methoddesc}
139
140 \begin{methoddesc}[Light]{setFocalPoint}{position}
141 Set the focal point of the light.
142 \end{methoddesc}
143
144 \begin{methoddesc}[Light]{setPosition}{position}
145 Set the position of the light.
146 \end{methoddesc}
147
148 \begin{methoddesc}[Light]{setAngle}{elevation = 0, azimuth = 0}
149 An alternative to set the position and focal point of the light by using the
150 elevation and azimuth.
151 \end{methoddesc}
152
153
154 %##############################################################################
155
156
157 \subsection{Input Classes}
158 \label{INPUT SEC}
159 This subsection details the instances used to read and load the source data
160 for visualization.
161
162 \subsubsection{\DataCollector class}
163 \begin{classdesc}{DataCollector}{source = Source.XML}
164 A data collector is used to read data either from a XML file (using
165 \texttt{setFileName()}) or from an escript object directly (using
166 \texttt{setData()}). Writing XML files are expensive, but this approach has
167 the advantage given that the results can be analyzed easily after the
168 simulation has completed.
169 \end{classdesc}
170
171 The following are some of the methods available:
172 \begin{methoddesc}[DataCollector]{setFileName}{file_name}
173 Set the XML file name to read.
174 \end{methoddesc}
175
176 \begin{methoddesc}[DataCollector]{setData}{**args}
177 Create data using the \textless name\textgreater=\textless data\textgreater
178 pairing. Assumption is made that the data will be given in the
179 appropriate format.
180 \end{methoddesc}
181
182 \begin{methoddesc}[DataCollector]{setActiveScalar}{scalar}
183 Specify the scalar field to load.
184 \end{methoddesc}
185
186 \begin{methoddesc}[DataCollector]{setActiveVector}{vector}
187 Specify the vector field to load.
188 \end{methoddesc}
189
190 \begin{methoddesc}[DataCollector]{setActiveTensor}{tensor}
191 Specify the tensor field to load.
192 \end{methoddesc}
193
194 \subsubsection{\ImageReader class}
195
196 \begin{classdesc}{ImageReader}{format}
197 An image reader is used to read data from an image in a variety of formats.
198 \end{classdesc}
199
200 The following are some of the methods available:
201 \begin{methoddesc}[ImageReader]{setImageName}{image_name}
202 Set the image name to be read.
203 \end{methoddesc}
204
205 \subsubsection{\TextTwoD class}
206
207 \begin{classdesc}{Text2D}{scene, text, viewport = Viewport.SOUTH_WEST}
208 A two-dimensional text is used to annotate the rendered object
209 (i.e. inserting titles, authors and labels).
210 \end{classdesc}
211
212 The following are some of the methods available:
213 \begin{methoddesc}[Text2D]{setFontSize}{size}
214 Set the 2D text size.
215 \end{methoddesc}
216
217 \begin{methoddesc}[Text2D]{boldOn}{}
218 Bold the 2D text.
219 \end{methoddesc}
220
221 \begin{methoddesc}[Text2D]{setColor}{color}
222 Set the color of the 2D text.
223 \end{methoddesc}
224
225 Including methods from \ActorTwoD.
226
227
228 %##############################################################################
229
230
231 \subsection{Data Visualization Classes}
232 \label{DATAVIS SEC}
233 This subsection details the instances used to process and manipulate the source
234 data. The typical usage of some of the classes are also shown.
235
236 One point to note is that the source can either be point or cell data. If the
237 source is cell data, a conversion to point data may or may not be
238 required, in order for the object to be rendered correctly.
239 If a conversion is needed, the 'cell_to_point' flag (see below) must
240 be set to 'True', otherwise 'False' (which is the default). On occasions, an
241 inaccurate object may be rendered from cell data even after conversion.
242
243 \subsubsection{\Map class}
244
245 \begin{classdesc}{Map}{scene, data_collector,
246 viewport = Viewport.SOUTH_WEST, lut = Lut.COLOR, cell_to_point = False,
247 outline = True}
248 Class that shows a scalar field on a domain surface. The domain surface
249 can either be color or gray-scale, depending on the lookup table used.
250 \end{classdesc}
251
252 The following are some of the methods available:\\
253 Methods from \ActorThreeD and \DataSetMapper.
254
255 A typical usage of \Map is shown below.
256
257 \begin{python}
258 """
259 Author: John Ngui, john.ngui@uq.edu.au
260 """
261
262 # Import the necessary modules.
263 from esys.pyvisi import Scene, DataCollector, Map, Camera
264 from esys.pyvisi.constant import *
265 import os
266
267 PYVISI_EXAMPLE_MESHES_PATH = "data_meshes"
268 PYVISI_EXAMPLE_IMAGES_PATH = "data_sample_images"
269 X_SIZE = 800
270 Y_SIZE = 800
271
272 SCALAR_FIELD_POINT_DATA = "temperature"
273 SCALAR_FIELD_CELL_DATA = "temperature_cell"
274 FILE_3D = "interior_3D.xml"
275 IMAGE_NAME = "map.jpg"
276 JPG_RENDERER = Renderer.ONLINE_JPG
277
278 # Create a Scene with four viewports.
279 s = Scene(renderer = JPG_RENDERER, num_viewport = 4, x_size = X_SIZE,
280 y_size = Y_SIZE)
281
282 # Create a DataCollector reading from a XML file.
283 dc1 = DataCollector(source = Source.XML)
284 dc1.setFileName(file_name = os.path.join(PYVISI_EXAMPLE_MESHES_PATH, FILE_3D))
285 dc1.setActiveScalar(scalar = SCALAR_FIELD_POINT_DATA)
286
287 # Create a Map for the first viewport.
288 m1 = Map(scene = s, data_collector = dc1, viewport = Viewport.SOUTH_WEST,
289 lut = Lut.COLOR, cell_to_point = False, outline = True)
290 m1.setRepresentationToWireframe()
291
292 # Create a Camera for the first viewport
293 c1 = Camera(scene = s, viewport = Viewport.SOUTH_WEST)
294 c1.isometricView()
295
296 # Create a second DataCollector reading from the same XML file but specifying
297 # a different scalar field.
298 dc2 = DataCollector(source = Source.XML)
299 dc2.setFileName(file_name = os.path.join(PYVISI_EXAMPLE_MESHES_PATH, FILE_3D))
300 dc2.setActiveScalar(scalar = SCALAR_FIELD_CELL_DATA)
301
302 # Create a Map for the third viewport.
303 m2 = Map(scene = s, data_collector = dc2, viewport = Viewport.NORTH_EAST,
304 lut = Lut.COLOR, cell_to_point = True, outline = True)
305
306 # Create a Camera for the third viewport
307 c2 = Camera(scene = s, viewport = Viewport.NORTH_EAST)
308
309 # Render the object.
310 s.render(image_name = os.path.join(PYVISI_EXAMPLE_IMAGES_PATH, IMAGE_NAME))
311 \end{python}
312
313 \subsubsection{\MapOnPlaneCut class}
314
315 \begin{classdesc}{MapOnPlaneCut}{scene, data_collector,
316 viewport = Viewport.SOUTH_WEST, lut = Lut.COLOR, cell_to_point = False,
317 outline = True}
318 This class works in a similar way to \Map, except that it shows a scalar
319 field cut using a plane. The plane can be translated and rotated along the
320 X, Y and Z axes.
321 \end{classdesc}
322
323 The following are some of the methods available:\\
324 Methods from \ActorThreeD, \Transform and \DataSetMapper.
325
326 \subsubsection{\MapOnPlaneClip class}
327
328 \begin{classdesc}{MapOnPlaneClip}{scene, data_collector,
329 viewport = Viewport.SOUTH_WEST, lut = Lut.COLOR, cell_to_point = False,
330 outline = True}
331 This class works in a similar way to \MapOnPlaneCut, except that it shows a
332 scalar field clipped using a plane.
333 \end{classdesc}
334
335 The following are some of the methods available:\\
336 Methods from \ActorThreeD, \Transform, \Clipper and \DataSetMapper.
337
338 \subsubsection{\MapOnScalarClip class}
339
340 \begin{classdesc}{MapOnScalarClip}{scene, data_collector,
341 viewport = Viewport.SOUTH_WEST, lut = Lut.COLOR, cell_to_point = False,
342 outline = True}
343 This class works in a similar way to \Map, except that it shows a scalar
344 field clipped using a scalar value.
345 \end{classdesc}
346
347 The following are some of the methods available:\\
348 Methods from \ActorThreeD, \Clipper and \DataSetMapper.
349
350 \subsubsection{\MapOnScalarClipWithRotation class}
351
352 \begin{classdesc}{MapOnScalarClipWithRotation}{scene, data_collector,
353 viewport = Viewport.SOUTH_WEST, lut = Lut.COLOR, cell_to_point = False}
354 This class works in a similar way to \Map except that it
355 shows a 2D scalar field clipped using a scalar value and subsequently
356 rotated around the z-axis to create a 3D looking effect. This class should
357 only be used with 2D data sets and NOT 3D.
358 \end{classdesc}
359
360 The following are some of the methods available:\\
361 Methods from \ActorThreeD, \Clipper, \Rotation and \DataSetMapper.
362
363 \subsubsection{\Velocity class}
364
365 \begin{classdesc}{Velocity}{scene, data_collector, arrow = Arrow.TWO_D,
366 color_mode = ColorMode.VECTOR, viewport = Viewport.SOUTH_WEST,
367 lut = Lut.COLOR, cell_to_point = False, outline = True}
368 Class that shows a vector field using arrows. The arrows can either be
369 color or gray-scale, depending on the lookup table used. If the arrows
370 are colored, there are two possible coloring modes, either using vector data or
371 scalar data. Similarly, there are two possible types of arrows, either
372 using two-dimensional or three-dimensional.
373 \end{classdesc}
374
375 The following are some of the methods available:\\
376 Methods from \ActorThreeD, \GlyphThreeD, \MaskPoints and \DataSetMapper.
377
378 \subsubsection{\VelocityOnPlaneCut class}
379
380 \begin{classdesc}{VelocityOnPlaneCut}{scene, data_collector,
381 arrow = Arrow.TWO_D, color_mode = ColorMode.VECTOR,
382 viewport = Viewport.SOUTH_WEST, lut = Lut.COLOR,
383 cell_to_point = False, outline = True}
384 This class works in a similar way to \MapOnPlaneCut, except that
385 it shows a vector field using arrows cut using a plane.
386 \end{classdesc}
387
388 The following are some of the methods available:\\
389 Methods from \ActorThreeD, \GlyphThreeD, \Transform, \MaskPoints and
390 \DataSetMapper.
391
392 A typical usage of \VelocityOnPlaneCut is shown below.
393
394 \begin{python}
395 """
396 Author: John Ngui, john.ngui@uq.edu.au
397 """
398
399 # Import the necessary modules
400 from esys.pyvisi import Scene, DataCollector, VelocityOnPlaneCut, Camera
401 from esys.pyvisi.constant import *
402 import os
403
404 PYVISI_EXAMPLE_MESHES_PATH = "data_meshes"
405 PYVISI_EXAMPLE_IMAGES_PATH = "data_sample_images"
406 X_SIZE = 400
407 Y_SIZE = 400
408
409 VECTOR_FIELD_CELL_DATA = "velocity"
410 FILE_3D = "interior_3D.xml"
411 IMAGE_NAME = "velocity.jpg"
412 JPG_RENDERER = Renderer.ONLINE_JPG
413
414 # Create a Scene.
415 s = Scene(renderer = JPG_RENDERER, num_viewport = 1, x_size = X_SIZE,
416 y_size = Y_SIZE)
417
418 # Create a DataCollector reading from a XML file.
419 dc1 = DataCollector(source = Source.XML)
420 dc1.setFileName(file_name = os.path.join(PYVISI_EXAMPLE_MESHES_PATH, FILE_3D))
421 dc1.setActiveVector(vector = VECTOR_FIELD_CELL_DATA)
422
423 # Create VelocityOnPlaneCut.
424 vopc1 = VelocityOnPlaneCut(scene = s, data_collector = dc1,
425 viewport = Viewport.SOUTH_WEST, color_mode = ColorMode.VECTOR,
426 arrow = Arrow.THREE_D, lut = Lut.COLOR, cell_to_point = False,
427 outline = True)
428 vopc1.setScaleFactor(scale_factor = 0.5)
429 vopc1.setPlaneToXY(offset = 0.5)
430 vopc1.setRatio(2)
431 vopc1.randomOn()
432
433 # Create a Camera.
434 c1 = Camera(scene = s, viewport = Viewport.SOUTH_WEST)
435 c1.isometricView()
436 c1.elevation(angle = -20)
437
438 # Render the object.
439 s.render(image_name = os.path.join(PYVISI_EXAMPLE_IMAGES_PATH, IMAGE_NAME))
440 \end{python}
441
442 \subsubsection{\VelocityOnPlaneClip class}
443
444 \begin{classdesc}{VelocityOnPlaneClip}{scene, data_collector,
445 arrow = Arrow.TWO_D, color_mode = ColorMode.VECTOR,
446 viewport = Viewport.SOUTH_WEST, lut = Lut.COLOR,
447 cell_to_point = False, online = True}
448 This class works in a similar way to \MapOnPlaneClip, except that it shows a
449 vector field using arrows clipped using a plane.
450 \end{classdesc}
451
452 The following are some of the methods available:\\
453 Methods from \ActorThreeD, \GlyphThreeD, \Transform, \Clipper,
454 \MaskPoints and \DataSetMapper.
455
456 \subsubsection{\Ellipsoid class}
457
458 \begin{classdesc}{Ellipsoid}{scene, data_collector,
459 viewport = Viewport = SOUTH_WEST, lut = Lut.COLOR, cell_to_point = False,
460 outline = True}
461 Class that shows a tensor field using ellipsoids. The ellipsoids can either be
462 color or gray-scale, depending on the lookup table used.
463 \end{classdesc}
464
465 The following are some of the methods available:\\
466 Methods from \ActorThreeD, \Sphere, \TensorGlyph, \MaskPoints and
467 \DataSetMapper.
468
469 \subsubsection{\EllipsoidOnPlaneCut class}
470
471 \begin{classdesc}{EllipsoidOnPlaneCut}{scene, data_collector,
472 viewport = Viewport.SOUTH_WEST, lut = Lut.COLOR, cell_to_point = False,
473 outline = True}
474 This class works in a similar way to \MapOnPlaneCut, except that it shows
475 a tensor field using ellipsoids cut using a plane.
476 \end{classdesc}
477
478 The following are some of the methods available:\\
479 Methods from \ActorThreeD, \Sphere, \TensorGlyph, \Transform,
480 \MaskPoints and \DataSetMapper.
481
482 \subsubsection{\EllipsoidOnPlaneClip class}
483
484 \begin{classdesc}{EllipsoidOnPlaneClip}{scene, data_collector,
485 viewport = Viewport.SOUTH_WEST, lut = Lut.COLOR, cell_to_point = False,
486 outline = True}
487 This class works in a similar way to \MapOnPlaneClip, except that it shows a
488 tensor field using ellipsoids clipped using a plane.
489 \end{classdesc}
490
491 The following are some of the methods available:\\
492 Methods from \ActorThreeD, \Sphere, \TensorGlyph, \Transform, \Clipper,
493 \MaskPoints and \DataSetMapper.
494
495 A typical usage of \EllipsoidOnPlaneClip is shown below.
496
497 \begin{python}
498 """
499 Author: John Ngui, john.ngui@uq.edu.au
500 """
501
502 # Import the necessary modules
503 from esys.pyvisi import Scene, DataCollector, EllipsoidOnPlaneClip, Camera
504 from esys.pyvisi.constant import *
505 import os
506
507 PYVISI_EXAMPLE_MESHES_PATH = "data_meshes"
508 PYVISI_EXAMPLE_IMAGES_PATH = "data_sample_images"
509 X_SIZE = 400
510 Y_SIZE = 400
511
512 TENSOR_FIELD_CELL_DATA = "stress_cell"
513 FILE_3D = "interior_3D.xml"
514 IMAGE_NAME = "ellipsoid.jpg"
515 JPG_RENDERER = Renderer.ONLINE_JPG
516
517 # Create a Scene.
518 s = Scene(renderer = JPG_RENDERER, num_viewport = 1, x_size = X_SIZE,
519 y_size = Y_SIZE)
520
521 # Create a DataCollector reading from a XML file.
522 dc1 = DataCollector(source = Source.XML)
523 dc1.setFileName(file_name = os.path.join(PYVISI_EXAMPLE_MESHES_PATH, FILE_3D))
524 dc1.setActiveTensor(tensor = TENSOR_FIELD_CELL_DATA)
525
526 # Create an EllipsoidOnPlaneClip.
527 eopc1 = EllipsoidOnPlaneClip(scene = s, data_collector = dc1,
528 viewport = Viewport.SOUTH_WEST, lut = Lut.COLOR, cell_to_point = True,
529 outline = True)
530 eopc1.setPlaneToXY()
531 eopc1.setScaleFactor(scale_factor = 0.2)
532 eopc1.rotateX(angle = 10)
533
534 # Create a Camera.
535 c1 = Camera(scene = s, viewport = Viewport.SOUTH_WEST)
536 c1.bottomView()
537 c1.azimuth(angle = -90)
538 c1.elevation(angle = 10)
539
540 # Render the object.
541 s.render(image_name = os.path.join(PYVISI_EXAMPLE_IMAGES_PATH, IMAGE_NAME))
542 \end{python}
543
544 \subsubsection{\Contour class}
545
546 \begin{classdesc}{Contour}{scene, data_collector,
547 viewport = Viewport.SOUTH_WEST, lut = Lut.COLOR, cell_to_point = False,
548 outline = True}
549 Class that shows a scalar field using contour surfaces. The contour surfaces can
550 either be color or gray-scale, depending on the lookup table used. This
551 class can also be used to generate iso surfaces.
552 \end{classdesc}
553
554 The following are some of the methods available:\\
555 Methods from \ActorThreeD, \ContourModule and \DataSetMapper.
556
557 A typical usage of \Contour is shown below.
558
559 \begin{python}
560 """
561 Author: John Ngui, john.ngui@uq.edu.au
562 """
563
564 # Import the necessary modules
565 from esys.pyvisi import Scene, DataCollector, Contour, Camera
566 from esys.pyvisi.constant import *
567 import os
568
569 PYVISI_EXAMPLE_MESHES_PATH = "data_meshes"
570 PYVISI_EXAMPLE_IMAGES_PATH = "data_sample_images"
571 X_SIZE = 400
572 Y_SIZE = 400
573
574 SCALAR_FIELD_POINT_DATA = "temperature"
575 FILE_3D = "interior_3D.xml"
576 IMAGE_NAME = "contour.jpg"
577 JPG_RENDERER = Renderer.ONLINE_JPG
578
579 # Create a Scene.
580 s = Scene(renderer = JPG_RENDERER, num_viewport = 1, x_size = X_SIZE,
581 y_size = Y_SIZE)
582
583 # Create a DataCollector reading a XML file.
584 dc1 = DataCollector(source = Source.XML)
585 dc1.setFileName(file_name = os.path.join(PYVISI_EXAMPLE_MESHES_PATH, FILE_3D))
586 dc1.setActiveScalar(scalar = SCALAR_FIELD_POINT_DATA)
587
588 # Create a Contour.
589 ctr1 = Contour(scene = s, data_collector = dc1, viewport = Viewport.SOUTH_WEST,
590 lut = Lut.COLOR, cell_to_point = False, outline = True)
591 ctr1.generateContours(contours = 3)
592
593 # Create a Camera.
594 cam1 = Camera(scene = s, viewport = Viewport.SOUTH_WEST)
595 cam1.elevation(angle = -40)
596
597 # Render the object.
598 s.render(image_name = os.path.join(PYVISI_EXAMPLE_IMAGES_PATH, IMAGE_NAME))
599 \end{python}
600
601 \subsubsection{\ContourOnPlaneCut class}
602
603 \begin{classdesc}{ContourOnPlaneCut}{scene, data_collector,
604 viewport = Viewport.SOUTH_WEST, lut = Lut.COLOR, cell_to_point = False,
605 outline = True}
606 This class works in a similar way to \MapOnPlaneCut, except that it shows a
607 scalar field using contour surfaces cut using a plane.
608 \end{classdesc}
609
610 The following are some of the methods available:\\
611 Methods from \ActorThreeD, \ContourModule, \Transform and \DataSetMapper.
612
613 \subsubsection{\ContourOnPlaneClip class}
614
615 \begin{classdesc}{ContourOnPlaneClip}{scene, data_collector,
616 viewport = Viewport.SOUTH_WEST, lut = Lut.COLOR, cell_to_point = False,
617 outline = True}
618 This class works in a similar way to \MapOnPlaneClip, except that it shows a
619 scalar field using contour surfaces clipped using a plane.
620 \end{classdesc}
621
622 The following are some of the methods available:\\
623 Methods from \ActorThreeD, \ContourModule, \Transform, \Clipper and
624 \DataSetMapper.
625
626 \subsubsection{\StreamLine class}
627
628 \begin{classdesc}{StreamLine}{scene, data_collector,
629 viewport = Viewport.SOUTH_WEST, color_mode = ColorMode.VECTOR, lut = Lut.COLOR,
630 cell_to_point = False, outline = True}
631 Class that shows the direction of particles of a vector field using streamlines.
632 The streamlines can either be color or gray-scale, depending on the lookup
633 table used. If the streamlines are colored, there are two possible coloring
634 modes, either using vector data or scalar data.
635 \end{classdesc}
636
637 The following are some of the methods available:\\
638 Methods from \ActorThreeD, \PointSource, \StreamLineModule, \Tube and
639 \DataSetMapper.
640
641 A typical usage of \StreamLine is shown below.
642
643 \begin{python}
644 """
645 Author: John Ngui, john.ngui@uq.edu.au
646 """
647
648 # Import the necessary modules.
649 from esys.pyvisi import Scene, DataCollector, StreamLine, Camera
650 from esys.pyvisi.constant import *
651 import os
652
653 PYVISI_EXAMPLE_MESHES_PATH = "data_meshes"
654 PYVISI_EXAMPLE_IMAGES_PATH = "data_sample_images"
655 X_SIZE = 400
656 Y_SIZE = 400
657
658 VECTOR_FIELD_CELL_DATA = "temperature"
659 FILE_3D = "interior_3D.xml"
660 IMAGE_NAME = "streamline.jpg"
661 JPG_RENDERER = Renderer.ONLINE_JPG
662
663 # Create a Scene.
664 s = Scene(renderer = JPG_RENDERER, num_viewport = 1, x_size = X_SIZE,
665 y_size = Y_SIZE)
666
667 # Create a DataCollector reading from a XML file.
668 dc1 = DataCollector(source = Source.XML)
669 dc1.setFileName(file_name = os.path.join(PYVISI_EXAMPLE_MESHES_PATH, FILE_3D))
670
671 # Create a Streamline.
672 sl1 = StreamLine(scene = s, data_collector = dc1,
673 viewport = Viewport.SOUTH_WEST, color_mode = ColorMode.SCALAR,
674 lut = Lut.COLOR, cell_to_point = False, outline = True)
675 sl1.setTubeRadius(radius = 0.02)
676 sl1.setTubeNumberOfSides(3)
677 sl1.setTubeRadiusToVaryByVector()
678 sl1.setPointSourceRadius(0.9)
679
680 # Create a Camera.
681 c1 = Camera(scene = s, viewport = Viewport.SOUTH_WEST)
682 c1.isometricView()
683
684 # Render the object.
685 s.render(image_name = os.path.join(PYVISI_EXAMPLE_IMAGES_PATH, IMAGE_NAME))
686 \end{python}
687
688 \subsubsection{\Carpet class}
689
690 \begin{classdesc}{Carpet}{scene, data_collector,
691 viewport = Viewport.Viewport.SOUTH_WEST, warp_mode = WarpMode.SCALAR,
692 lut = Lut.COLOR, cell_to_point = False, outline = True}
693 This class works in a similar way to \MapOnPlaneCut, except that it shows a
694 scalar field cut on a plane and deformed (warped) along the normal. The
695 plane can either be color or gray-scale, depending on the lookup table used.
696 Similarly, the plane can be deformed either using scalar data or vector data.
697 \end{classdesc}
698
699 The following are some of the methods available:\\
700 Methods from \ActorThreeD, \Warp, \Transform and \DataSetMapper.
701
702 A typical usage of \Carpet is shown below.
703
704 \begin{python}
705 """
706 Author: John Ngui, john.ngui@uq.edu.au
707 """
708
709 # Import the necessary modules.
710 from esys.pyvisi import Scene, DataCollector, Carpet, Camera
711 from esys.pyvisi.constant import *
712 import os
713
714 PYVISI_EXAMPLE_MESHES_PATH = "data_meshes"
715 PYVISI_EXAMPLE_IMAGES_PATH = "data_sample_images"
716 X_SIZE = 400
717 Y_SIZE = 400
718
719 SCALAR_FIELD_CELL_DATA = "temperature_cell"
720 FILE_3D = "interior_3D.xml"
721 IMAGE_NAME = "carpet.jpg"
722 JPG_RENDERER = Renderer.ONLINE_JPG
723
724 # Create a Scene.
725 s = Scene(renderer = JPG_RENDERER, num_viewport = 1, x_size = X_SIZE,
726 y_size = Y_SIZE)
727
728 # Create a DataCollector reading from a XML file.
729 dc1 = DataCollector(source = Source.XML)
730 dc1.setFileName(file_name = os.path.join(PYVISI_EXAMPLE_MESHES_PATH, FILE_3D))
731 dc1.setActiveScalar(scalar = SCALAR_FIELD_CELL_DATA)
732
733 # Create a Carpet.
734 cpt1 = Carpet(scene = s, data_collector = dc1, viewport = Viewport.SOUTH_WEST,
735 warp_mode = WarpMode.SCALAR, lut = Lut.COLOR, cell_to_point = True,
736 outline = True)
737 cpt1.setPlaneToXY(0.2)
738 cpt1.setScaleFactor(1.9)
739
740 # Create a Camera.
741 c1 = Camera(scene = s, viewport = Viewport.SOUTH_WEST)
742 c1.isometricView()
743
744 # Render the object.
745 s.render(image_name = os.path.join(PYVISI_EXAMPLE_IMAGES_PATH, IMAGE_NAME))
746 \end{python}
747
748 \subsubsection{\Legend class}
749
750 \begin{classdesc}{Legend}{scene, data_collector,
751 viewport = Viewport.SOUTH_WEST, lut = Lut.COLOR, legend = LegendType.SCALAR}
752 Class that shows a scalar field on a domain surface. The domain surface
753 can either be color or gray-scale, depending on the lookup table used
754 \end{classdesc}
755
756 The following are some of the methods available:\\
757 Methods from \ActorThreeD, \ScalarBar and \DataSetMapper.
758
759 \subsubsection{\Rectangle class}
760
761 \begin{classdesc}{Rectangle}{scene, viewport = Viewport.SOUTH_WEST}
762 Class that generates a rectangle box.
763 \end{classdesc}
764
765 The following are some of the methods available:\\
766 Methods from \ActorThreeD, \CubeSource and \DataSetMapper.
767
768 \subsubsection{\Image class}
769
770 \begin{classdesc}{Image}{scene, image_reader, viewport = Viewport.SOUTH_WEST}
771 Class that displays an image which can be scaled (upwards and downwards) and
772 has interaction capability. The image can also be translated and rotated along
773 the X, Y and Z axes. One of the most common use of this feature is pasting an
774 image on a surface map.
775 \end{classdesc}
776
777 The following are some of the methods available:\\
778 Methods from \ActorThreeD, \PlaneSource and \Transform.
779
780 A typical usage of \Image is shown below.
781
782 \begin{python}
783 """
784 Author: John Ngui, john.ngui@uq.edu.au
785 """
786
787 # Import the necessary modules.
788 from esys.pyvisi import Scene, DataCollector, Map, ImageReader, Image, Camera
789 from esys.pyvisi import GlobalPosition
790 from esys.pyvisi.constant import *
791 import os
792
793 PYVISI_EXAMPLE_MESHES_PATH = "data_meshes"
794 PYVISI_EXAMPLE_IMAGES_PATH = "data_sample_images"
795 X_SIZE = 400
796 Y_SIZE = 400
797
798 SCALAR_FIELD_POINT_DATA = "temperature"
799 FILE_3D = "interior_3D.xml"
800 LOAD_IMAGE_NAME = "flinders.jpg"
801 SAVE_IMAGE_NAME = "image.jpg"
802 JPG_RENDERER = Renderer.ONLINE_JPG
803
804 # Create a Scene.
805 s = Scene(renderer = JPG_RENDERER, num_viewport = 1, x_size = X_SIZE,
806 y_size = Y_SIZE)
807
808 # Create a DataCollector reading from a XML file.
809 dc1 = DataCollector(source = Source.XML)
810 dc1.setFileName(file_name = os.path.join(PYVISI_EXAMPLE_MESHES_PATH, FILE_3D))
811
812 # Create a Map.
813 m1 = Map(scene = s, data_collector = dc1, viewport = Viewport.SOUTH_WEST,
814 lut = Lut.COLOR, cell_to_point = False, outline = True)
815 m1.setOpacity(0.3)
816
817 # Create an ImageReader (in place of DataCollector).
818 ir = ImageReader(ImageFormat.JPG)
819 ir.setImageName(image_name = os.path.join(PYVISI_EXAMPLE_MESHES_PATH, \
820 LOAD_IMAGE_NAME))
821
822 # Create an Image.
823 i = Image(scene = s, image_reader = ir, viewport = Viewport.SOUTH_WEST)
824 i.setOpacity(opacity = 0.9)
825 i.translate(0,0,-1)
826 i.setPoint1(GlobalPosition(2,0,0))
827 i.setPoint2(GlobalPosition(0,2,0))
828
829 # Create a Camera.
830 c1 = Camera(scene = s, viewport = Viewport.SOUTH_WEST)
831
832 # Render the image.
833 s.render(image_name = os.path.join(PYVISI_EXAMPLE_IMAGES_PATH, SAVE_IMAGE_NAME))
834 \end{python}
835
836 \subsubsection{\Logo class}
837
838 \begin{classdesc}{Logo}{scene, image_reader, viewport = Viewport.SOUTH_WEST}
839 Class that displays a static image, in particular a logo
840 (i.e. company symbol) and has NO interaction capability. The position and size
841 of the logo can be specified.
842 \end{classdesc}
843
844 The following are some of the methods available:\\
845 Methods from \ImageReslice and \ActorTwoD.
846
847 \subsubsection{\Movie class}
848
849 \begin{classdesc}{Movie}{parameter_file = "make_movie"}
850 Class that creates a file called 'make_movie' by default (if a parameter
851 file name is not specified) which contains a list of parameters required
852 by the 'ppmtompeg' command to generate a movie from a series of images.
853 \end{classdesc}
854
855 The following are some of the methods available:\\
856 \begin{methoddesc}[Movie]{imageRange}{input_directory, first_image, last_image}
857 The image range from which the movie is to be generated from.
858 \end{methoddesc}
859
860 \begin{methoddesc}[Movie]{imageList}{input_directory, image_list}
861 The image list from which the movie is to be generated from.
862 \end{methoddesc}
863
864 \begin{methoddesc}[Movie]{makeMovie}{movie}
865 Generate the movie.
866 \end{methoddesc}
867
868 A typical usage of \Movie is shown below.
869
870 \begin{python}
871 """
872 Author: John Ngui, john.ngui@uq.edu.au
873 """
874
875 # Import the necessary modules.
876 from esys.pyvisi import Scene, DataCollector, Map, Camera, Velocity, Legend
877 from esys.pyvisi import Movie, LocalPosition
878 from esys.pyvisi.constant import *
879 import os
880
881 PYVISI_EXAMPLE_MESHES_PATH = "data_meshes"
882 PYVISI_EXAMPLE_IMAGES_PATH = "data_sample_images"
883 X_SIZE = 800
884 Y_SIZE = 800
885
886 SCALAR_FIELD_POINT_DATA = "temp"
887 FILE_2D = "tempvel-"
888 IMAGE_NAME = "movie"
889 JPG_RENDERER = Renderer.ONLINE_JPG
890
891 # Create a Scene.
892 s = Scene(renderer = JPG_RENDERER, num_viewport = 1, x_size = X_SIZE,
893 y_size = Y_SIZE)
894
895 # Create a DataCollector reading from a XML file.
896 dc1 = DataCollector(source = Source.XML)
897 dc1.setActiveScalar(scalar = SCALAR_FIELD_POINT_DATA)
898
899 # Create a Map.
900 m1 = Map(scene = s, data_collector = dc1,
901 viewport = Viewport.SOUTH_WEST, lut = Lut.COLOR, cell_to_point = False,
902 outline = True)
903
904 # Create a Camera.
905 cam1 = Camera(scene = s, viewport = Viewport.SOUTH_WEST)
906
907 # Create a movie.
908 mov = Movie()
909 #lst = []
910
911 # Read in one file one after another and render the object.
912 for i in range(938, 949):
913 dc1.setFileName(file_name = os.path.join(PYVISI_EXAMPLE_MESHES_PATH, \
914 FILE_2D + "%06d.vtu") % i)
915
916 s.render(image_name = os.path.join(PYVISI_EXAMPLE_IMAGES_PATH, \
917 IMAGE_NAME + "%06d.jpg") % i)
918
919 #lst.append(IMAGE_NAME + "%06d.jpg" % i)
920
921 # Images (first and last inclusive) from which the movie is to be generated.
922 mov.imageRange(input_directory = PYVISI_EXAMPLE_IMAGES_PATH,
923 first_image = IMAGE_NAME + "000938.jpg",
924 last_image = IMAGE_NAME + "000948.jpg")
925
926 # Alternatively, a list of images can be specified.
927 #mov.imageList(input_directory = PYVISI_EXAMPLE_IMAGES_PATH, image_list = lst)
928
929 # Generate the movie.
930 mov.makeMovie(os.path.join(PYVISI_EXAMPLE_IMAGES_PATH, "movie.mpg"))
931 \end{python}
932
933
934 %##############################################################################
935
936
937 \subsection{Coordinate Classes}
938 This subsection details the instances used to position the rendered object.
939
940 \subsubsection{\LocalPosition class}
941
942 \begin{classdesc}{LocalPosition}{x_coor, y_coor}
943 Class that defines the local positioning (X and Y) coordinate system (2D).
944 \end{classdesc}
945
946 \subsubsection{\GlobalPosition class}
947
948 \begin{classdesc}{GlobalPosition}{x_coor, y_coor, z_coor}
949 Class that defines the global positioning (X, Y and Z) coordinate system (3D).
950 \end{classdesc}
951
952
953 %##############################################################################
954
955
956 \subsection{Supporting Classes}
957 This subsection details the supporting classes and their corresponding methods
958 inherited by the input (see Section \ref{INPUT SEC}) and data
959 visualization classes (see Section \ref{DATAVIS SEC}).
960
961 \subsubsection{\ActorThreeD class}
962 Class that defines a 3D actor. \\
963
964 The following are some of the methods available:
965
966 \begin{methoddesc}[Actor3D]{setOpacity}{opacity}
967 Set the opacity (transparency) of the 3D actor.
968 \end{methoddesc}
969
970 \begin{methoddesc}[Actor3D]{setColor}{color}
971 Set the color of the 3D actor.
972 \end{methoddesc}
973
974 \begin{methoddesc}[Actor3D]{setRepresentationToWireframe}{}
975 Set the representation of the 3D actor to wireframe.
976 \end{methoddesc}
977
978 \subsubsection{\ActorTwoD class}
979 Class that defines a 2D actor. \\
980
981 The following are some of the methods available:
982
983 \begin{methoddesc}[Actor2D]{setPosition}{position}
984 Set the position (XY) of the 2D actor. Default position is the lower left hand
985 corner of the window / viewport.
986 \end{methoddesc}
987
988 \subsubsection{\Clipper class}
989 Class that defines a clipper. \\
990
991 The following are some of the methods available:
992
993 \begin{methoddesc}[Clipper]{setInsideOutOn}{}
994 Clips one side of the rendered object.
995 \end{methoddesc}
996
997 \begin{methoddesc}[Clipper]{setInsideOutOff}{}
998 Clips the other side of the rendered object.
999 \end{methoddesc}
1000
1001 \begin{methoddesc}[Clipper]{setClipValue}{value}
1002 Set the scalar clip value (instead of using a plane) for the clipper.
1003 \end{methoddesc}
1004
1005 \subsubsection{\ContourModule class}
1006 Class that defines the contour module. \\
1007
1008 The following are some of the methods available:
1009
1010 \begin{methoddesc}[ContourModule]{generateContours}{contours = None,
1011 lower_range = None, upper_range = None}
1012 Generate the specified number of contours within the specified range.
1013 In order to generate an iso surface, the 'lower_range' and 'upper_range'
1014 must be equal.
1015 \end{methoddesc}
1016
1017 \subsubsection{\GlyphThreeD class}
1018 Class that defines 3D glyphs. \\
1019
1020 The following are some of the methods available:
1021
1022 \begin{methoddesc}[Glyph3D]{setScaleModeByVector}{}
1023 Set the 3D glyph to scale according to the vector data.
1024 \end{methoddesc}
1025
1026 \begin{methoddesc}[Glyph3D]{setScaleModeByScalar}{}
1027 Set the 3D glyph to scale according to the scalar data.
1028 \end{methoddesc}
1029
1030 \begin{methoddesc}[Glyph3D]{setScaleFactor}{scale_factor}
1031 Set the 3D glyph scale factor.
1032 \end{methoddesc}
1033
1034 \subsubsection{\TensorGlyph class}
1035 Class that defines tensor glyphs. \\
1036
1037 The following are some of the methods available:
1038
1039 \begin{methoddesc}[TensorGlyph]{setScaleFactor}{scale_factor}
1040 Set the scale factor for the tensor glyph.
1041 \end{methoddesc}
1042
1043 \begin{methoddesc}[TensorGlyph]{setMaxScaleFactor}{max_scale_factor}
1044 Set the maximum allowable scale factor for the tensor glyph.
1045 \end{methoddesc}
1046
1047 \subsubsection{\PlaneSource class}
1048 Class that defines a plane source. A plane source is defined by an origin
1049 and two other points, which form the axes (X and Y). \\
1050
1051 The following are some of the methods available:
1052
1053 \begin{methoddesc}[PlaneSource]{setPoint1}{position}
1054 Set the first point from the origin of the plane source.
1055 \end{methoddesc}
1056
1057 \begin{methoddesc}[PlaneSource]{setPoint2}{position}
1058 Set the second point from the origin of the plane source.
1059 \end{methoddesc}
1060
1061 \subsubsection{\PointSource class}
1062 Class that defines the source (location) to generate points. The points are
1063 generated within the radius of a sphere. \\
1064
1065 The following are some of the methods available:
1066
1067 \begin{methoddesc}[PointSource]{setPointSourceRadius}{radius}
1068 Set the radius of the sphere.
1069 \end{methoddesc}
1070
1071 \begin{methoddesc}[PointSource]{setPointSourceCenter}{center}
1072 Set the center of the sphere.
1073 \end{methoddesc}
1074
1075 \begin{methoddesc}[PointSource]{setPointSourceNumberOfPoints}{points}
1076 Set the number of points to generate within the sphere (the larger the
1077 number of points, the more streamlines are generated).
1078 \end{methoddesc}
1079
1080 \subsubsection{\Sphere class}
1081 Class that defines a sphere. \\
1082
1083 The following are some of the methods available:
1084
1085 \begin{methoddesc}[Sphere]{setThetaResolution}{resolution}
1086 Set the theta resolution of the sphere.
1087 \end{methoddesc}
1088
1089 \begin{methoddesc}[Sphere]{setPhiResolution}{resolution}
1090 Set the phi resolution of the sphere.
1091 \end{methoddesc}
1092
1093 \subsubsection{\StreamLineModule class}
1094 Class that defines the streamline module. \\
1095
1096 The following are some of the methods available:
1097
1098 \begin{methoddesc}[StreamLineModule]{setMaximumPropagationTime}{time}
1099 Set the maximum length of the streamline expressed in elapsed time.
1100 \end{methoddesc}
1101
1102 \begin{methoddesc}[StreamLineModule]{setIntegrationToBothDirections}{}
1103 Set the integration to occur both sides: forward (where the streamline
1104 goes) and backward (where the streamline came from).
1105 \end{methoddesc}
1106
1107 \subsubsection{\Transform class}
1108 Class that defines the orientation of planes. \\
1109
1110 The following are some of the methods available:
1111
1112 \begin{methoddesc}[Transform]{translate}{x_offset, y_offset, z_offset}
1113 Translate the rendered object along the x, y and z-axes.
1114 \end{methoddesc}
1115
1116 \begin{methoddesc}[Transform]{rotateX}{angle}
1117 Rotate the plane along the x-axis.
1118 \end{methoddesc}
1119
1120 \begin{methoddesc}[Transform]{rotateY}{angle}
1121 Rotate the plane along the y-axis.
1122 \end{methoddesc}
1123
1124 \begin{methoddesc}[Transform]{rotateZ}{angle}
1125 Rotate the plane along the z-axis.
1126 \end{methoddesc}
1127
1128 \begin{methoddesc}[Transform]{setPlaneToXY}{offset = 0}
1129 Set the plane orthogonal to the z-axis.
1130 \end{methoddesc}
1131
1132 \begin{methoddesc}[Transform]{setPlaneToYZ}{offset = 0}
1133 Set the plane orthogonal to the x-axis.
1134 \end{methoddesc}
1135
1136 \begin{methoddesc}[Transform]{setPlaneToXZ}{offset = 0}
1137 Set the plane orthogonal to the y-axis.
1138 \end{methoddesc}
1139
1140 \subsubsection{\Tube class}
1141 Class that defines the tube wrapped around the streamlines. \\
1142
1143 The following are some of the methods available:
1144
1145 \begin{methoddesc}[Tube]{setTubeRadius}{radius}
1146 Set the radius of the tube.
1147 \end{methoddesc}
1148
1149 \begin{methoddesc}[Tube]{setTubeRadiusToVaryByVector}{}
1150 Set the radius of the tube to vary by vector data.
1151 \end{methoddesc}
1152
1153 \begin{methoddesc}[Tube]{setTubeRadiusToVaryByScalar}{}
1154 Set the radius of the tube to vary by scalar data.
1155 \end{methoddesc}
1156
1157 \subsubsection{\Warp class}
1158 Class that defines the deformation of a scalar field. \\
1159
1160 The following are some of the methods available:
1161
1162 \begin{methoddesc}[Warp]{setScaleFactor}{scale_factor}
1163 Set the displacement scale factor.
1164 \end{methoddesc}
1165
1166 \subsubsection{\MaskPoints class}
1167 Class that defines the masking of points
1168 every n'th point. This is useful to prevent the rendered object
1169 from being cluttered with arrows or ellipsoids. \\
1170
1171 The following are some of the methods available:
1172
1173 \begin{methoddesc}[MaskPoints]{setRatio}{ratio}
1174 Mask every n'th point.
1175 \end{methoddesc}
1176
1177 \begin{methoddesc}[MaskPoints]{randomOn}{}
1178 Enables the randomization of the points selected for masking.
1179 \end{methoddesc}
1180
1181 \subsubsection{\ScalarBar class}
1182 Class that defines a scalar bar. \\
1183
1184 The following are some of the methods available:
1185
1186 \begin{methoddesc}[ScalarBar]{setTitle}{title}
1187 Set the title of the scalar bar.
1188 \end{methoddesc}
1189
1190 \begin{methoddesc}[ScalarBar]{setPosition}{position}
1191 Set the local position of the scalar bar.
1192 \end{methoddesc}
1193
1194 \begin{methoddesc}[ScalarBar]{setOrientationToHorizontal}{}
1195 Set the orientation of the scalar bar to horizontal.
1196 \end{methoddesc}
1197
1198 \begin{methoddesc}[ScalarBar]{setOrientationToVertical}{}
1199 Set the orientation of the scalar bar to vertical.
1200 \end{methoddesc}
1201
1202 \begin{methoddesc}[ScalarBar]{setHeight}{height}
1203 Set the height of the scalar bar.
1204 \end{methoddesc}
1205
1206 \begin{methoddesc}[ScalarBar]{setWidth}{width}
1207 Set the width of the scalar bar.
1208 \end{methoddesc}
1209
1210 \begin{methoddesc}[ScalarBar]{setLabelColor}{color}
1211 Set the color of the scalar bar's label.
1212 \end{methoddesc}
1213
1214 \begin{methoddesc}[ScalarBar]{setTitleColor}{color}
1215 Set the color of the scalar bar's title.
1216 \end{methoddesc}
1217
1218 \subsubsection{\ImageReslice class}
1219 Class that defines an image reslice used to resize static
1220 (no interaction capability) images (i.e. logo). \\
1221
1222 The following are some of the methods available:
1223
1224 \begin{methoddesc}[ImageReslice]{setSize}{size}
1225 Set the size of the image (logo in particular), between 0 and 2. Size 1 (one)
1226 displays the image in its original size (which is the default).
1227 \end{methoddesc}
1228
1229 \subsubsection{\DataSetMapper class}
1230 Class that defines a data set mapper. \\
1231
1232 The following are some of the methods available:
1233
1234 \begin{methoddesc}[DataSetMapper]{setScalarRange}{lower_range, upper_range}
1235 Set the minimum and maximum scalar range for the data set mapper. This
1236 method is called when the range has been specified by the user.
1237 Therefore, the scalar range read from the source will be ignored.
1238 \end{methoddesc}
1239
1240 \subsubsection{\CubeSource class}
1241 Class that defines a cube source. The center of the cube source defines
1242 the point from which the cube is to be generated and the X, Y
1243 and Z lengths define the length of the cube from the center point. If
1244 X length is 3, then the X length to the left and right of the center
1245 point is 1.5 respectively.\\
1246
1247 The following are some of the methods available:
1248
1249 \begin{methoddesc}[CubeSource]{setCenter}{center}
1250 Set the cube source center.
1251 \end{methoddesc}
1252
1253 \begin{methoddesc}[CubeSource]{setXLength}{length}
1254 Set the cube source length along the x-axis.
1255 \end{methoddesc}
1256
1257 \begin{methoddesc}[CubeSource]{setYLength}{length}
1258 Set the cube source length along the y-axis.
1259 \end{methoddesc}
1260
1261 \begin{methoddesc}[CubeSource]{setZLength}{length}
1262 Set the cube source length along the z-axis.
1263 \end{methoddesc}
1264
1265 \subsubsection{\Rotation class}
1266 Class that sweeps 2D data around the z-axis to create a 3D looking effect. \\
1267
1268 The following are some of the methods available:
1269
1270 \begin{methoddesc}[Rotation]{setResolution}{resolution}
1271 Set the resolution of the sweep for the rotation, which controls the
1272 number of intermediate points
1273 \end{methoddesc}
1274
1275 \begin{methoddesc}[Rotation]{setAngle}{angle}
1276 Set the angle of rotation.
1277 \end{methoddesc}
1278
1279
1280 % #############################################################################
1281
1282
1283 \section{More Examples}
1284 This section shows more examples.
1285
1286 \textsf{Reading A Series of Files}
1287
1288 \begin{python}
1289 """
1290 Author: John Ngui, john.ngui@uq.edu.au
1291 """
1292
1293 # Import the necessary modules.
1294 from esys.pyvisi import Scene, DataCollector, Contour, Camera
1295 from esys.pyvisi.constant import *
1296 import os
1297
1298 PYVISI_EXAMPLE_MESHES_PATH = "data_meshes"
1299 PYVISI_EXAMPLE_IMAGES_PATH = "data_sample_images"
1300 X_SIZE = 400
1301 Y_SIZE = 300
1302
1303 SCALAR_FIELD_POINT_DATA_1 = "lava"
1304 SCALAR_FIELD_POINT_DATA_2 = "talus"
1305 FILE_2D = "phi_talus_lava."
1306
1307 IMAGE_NAME = "seriesofreads"
1308 JPG_RENDERER = Renderer.ONLINE_JPG
1309
1310 # Create a Scene.
1311 s = Scene(renderer = JPG_RENDERER, num_viewport = 1, x_size = X_SIZE,
1312 y_size = Y_SIZE)
1313
1314 # Create a DataCollector reading from a XML file.
1315 dc1 = DataCollector(source = Source.XML)
1316 dc1.setActiveScalar(scalar = SCALAR_FIELD_POINT_DATA_1)
1317
1318 # Create a Contour.
1319 mosc1 = Contour(scene = s, data_collector = dc1,
1320 viewport = Viewport.SOUTH_WEST, lut = Lut.COLOR, cell_to_point = False,
1321 outline = True)
1322 mosc1.generateContours(0)
1323
1324 # Create a second DataCollector reading from the same XML file
1325 # but specifying a different scalar field.
1326 dc2 = DataCollector(source = Source.XML)
1327 dc2.setActiveScalar(scalar = SCALAR_FIELD_POINT_DATA_2)
1328
1329 # Create a second Contour.
1330 mosc2 = Contour(scene = s, data_collector = dc2,
1331 viewport = Viewport.SOUTH_WEST, lut = Lut.COLOR, cell_to_point = False,
1332 outline = True)
1333 mosc2.generateContours(0)
1334
1335 # Create a Camera.
1336 cam1 = Camera(scene = s, viewport = Viewport.SOUTH_WEST)
1337
1338 # Read in one file one after another and render the object.
1339 for i in range(99, 104):
1340 dc1.setFileName(file_name = os.path.join(PYVISI_EXAMPLE_MESHES_PATH, \
1341 FILE_2D + "%04d.vtu") % i)
1342 dc2.setFileName(file_name = os.path.join(PYVISI_EXAMPLE_MESHES_PATH, \
1343 FILE_2D + "%04d.vtu") % i)
1344
1345 s.render(image_name = os.path.join(PYVISI_EXAMPLE_IMAGES_PATH, \
1346 IMAGE_NAME + "%04d.jpg") % i)
1347 \end{python}
1348
1349 \textsf{Manipulating A Single File with A Series of Translation}
1350
1351 \begin{python}
1352 """
1353 Author: John Ngui, john.ngui@uq.edu.au
1354 """
1355
1356 # Import the necessary modules.
1357 from esys.pyvisi import Scene, DataCollector, MapOnPlaneCut, Camera
1358 from esys.pyvisi.constant import *
1359 import os
1360
1361 PYVISI_EXAMPLE_MESHES_PATH = "data_meshes"
1362 PYVISI_EXAMPLE_IMAGES_PATH = "data_sample_images"
1363 X_SIZE = 400
1364 Y_SIZE = 400
1365
1366 SCALAR_FIELD_POINT_DATA = "temperature"
1367 FILE_3D = "interior_3D.xml"
1368 IMAGE_NAME = "seriesofcuts"
1369 JPG_RENDERER = Renderer.ONLINE_JPG
1370
1371 # Create a Scene.
1372 s = Scene(renderer = JPG_RENDERER, num_viewport = 1, x_size = X_SIZE,
1373 y_size = Y_SIZE)
1374
1375 # Create a DataCollector reading from a XML file.
1376 dc1 = DataCollector(source = Source.XML)
1377 dc1.setFileName(file_name = os.path.join(PYVISI_EXAMPLE_MESHES_PATH, FILE_3D))
1378 dc1.setActiveScalar(scalar = SCALAR_FIELD_POINT_DATA)
1379
1380 # Create a MapOnPlaneCut.
1381 mopc1 = MapOnPlaneCut(scene = s, data_collector = dc1,
1382 viewport = Viewport.SOUTH_WEST, lut = Lut.COLOR, cell_to_point = False,
1383 outline = True)
1384 mopc1.setPlaneToYZ(offset = 0.1)
1385
1386 # Create a Camera.
1387 c1 = Camera(scene = s, viewport = Viewport.SOUTH_WEST)
1388 c1.isometricView()
1389
1390 # Render the object with multiple cuts using a series of translation.
1391 for i in range(0, 5):
1392 s.render(image_name = os.path.join(PYVISI_EXAMPLE_IMAGES_PATH, IMAGE_NAME +
1393 "%02d.jpg") % i)
1394 mopc1.translate(0.6,0,0)
1395 \end{python}
1396
1397 \textsf{Reading Data Directly from Escript Objects}
1398
1399 \begin{python}
1400 """
1401 Author: Lutz Gross, l.gross@uq.edu.au
1402 Author: John Ngui, john.ngui@uq.edu.au
1403 """
1404
1405 # Import the necessary modules.
1406 from esys.escript import *
1407 from esys.escript.linearPDEs import LinearPDE
1408 from esys.finley import Rectangle
1409 from esys.pyvisi import Scene, DataCollector, Map, Camera
1410 from esys.pyvisi.constant import *
1411 import os
1412
1413 PYVISI_EXAMPLE_IMAGES_PATH = "data_sample_images"
1414 X_SIZE = 400
1415 Y_SIZE = 400
1416 JPG_RENDERER = Renderer.ONLINE_JPG
1417
1418 #... set some parameters ...
1419 xc=[0.02,0.002]
1420 r=0.001
1421 qc=50.e6
1422 Tref=0.
1423 rhocp=2.6e6
1424 eta=75.
1425 kappa=240.
1426 tend=5.
1427 # ... time, time step size and counter ...
1428 t=0
1429 h=0.1
1430 i=0
1431
1432 #... generate domain ...
1433 mydomain = Rectangle(l0=0.05,l1=0.01,n0=250, n1=50)
1434 #... open PDE ...
1435 mypde=LinearPDE(mydomain)
1436 mypde.setSymmetryOn()
1437 mypde.setValue(A=kappa*kronecker(mydomain),D=rhocp/h,d=eta,y=eta*Tref)
1438 # ... set heat source: ....
1439 x=mydomain.getX()
1440 qH=qc*whereNegative(length(x-xc)-r)
1441 # ... set initial temperature ....
1442 T=Tref
1443
1444 # Create a Scene.
1445 s = Scene(renderer = JPG_RENDERER, x_size = X_SIZE, y_size = Y_SIZE)
1446
1447 # Create a DataCollector reading directly from escript objects.
1448 dc = DataCollector(source = Source.ESCRIPT)
1449
1450 # Create a Map.
1451 m = Map(scene = s, data_collector = dc, \
1452 viewport = Viewport.SOUTH_WEST, lut = Lut.COLOR, \
1453 cell_to_point = False, outline = True)
1454
1455 # Create a Camera.
1456 c = Camera(scene = s, viewport = Viewport.SOUTH_WEST)
1457
1458 # ... start iteration:
1459 while t<0.4:
1460 i+=1
1461 t+=h
1462 mypde.setValue(Y=qH+rhocp/h*T)
1463 T=mypde.getSolution()
1464
1465 dc.setData(temp = T)
1466
1467 # Render the object.
1468 s.render(image_name = os.path.join(PYVISI_EXAMPLE_IMAGES_PATH, \
1469 "diffusion%02d.jpg") % i)
1470 \end{python}
1471
1472 \newpage
1473
1474 \section{Useful Keys}
1475 This section shows some of the useful keys when interacting with the rendered
1476 object (in the Online approach).
1477
1478 \begin{table}[ht]
1479 \begin{center}
1480 \begin{tabular}{| c | p{13cm} |}
1481 \hline
1482 \textbf{Key} & \textbf{Description} \\ \hline
1483 Keypress 'c' / 'a' & Toggle between the camera ('c') and object ('a') mode. In
1484 camera mode, mouse events affect the camera position and focal point. In
1485 object mode, mouse events affect the rendered object's element (i.e.
1486 cut surface map, clipped velocity field, streamline, etc) that is under the
1487 mouse pointer.\\ \hline
1488 Mouse button 1 & Rotate the camera around its focal point (if in camera mode)
1489 or rotate the rendered object's element (if in object mode).\\ \hline
1490 Mouse button 2 & Pan the camera (if in camera mode) or translate the rendered
1491 object's element (if in object mode). \\ \hline
1492 Mouse button 3 & Zoom the camera (if in camera mode) or scale the rendered
1493 object's element (if in object mode). \\ \hline
1494 Keypress 3 & Toggle the render window in and out of stereo mode. By default,
1495 red-blue stereo pairs are created. \\ \hline
1496 Keypress 'e' / 'q' & Exit the application if only one file is to be read, or
1497 read and display the next file if multiple files are to be read. \\ \hline
1498 Keypress 's' & Modify the representation of the rendered object to surfaces.
1499 \\ \hline
1500 Keypress 'w' & Modify the representation of the rendered object to wireframe.
1501 \\ \hline
1502 Keypress 'r' & Reset the position of the rendered object to the center.
1503 \\ \hline
1504 \end{tabular}
1505 \caption{Useful keys}
1506 \end{center}
1507 \end{table}
1508
1509
1510 % ############################################################################
1511
1512
1513 \newpage
1514
1515 \section{Sample Output}
1516 This section displays some of the sample output by Pyvisi.
1517
1518 \begin{table}[ht]
1519 \begin{tabular}{c c c}
1520 \includegraphics[width=\thumbnailwidth]{figures/Map} &
1521 \includegraphics[width=\thumbnailwidth]{figures/MapOnPlaneCut} &
1522 \includegraphics[width=\thumbnailwidth]{figures/MapOnPlaneClip} \\
1523 Map & MapOnPlaneCut & MapOnPlaneClip \\
1524 \includegraphics[width=\thumbnailwidth]{figures/MapOnScalarClip} &
1525 \includegraphics[width=\thumbnailwidth]{figures/MapOnScalarClipWithRotation} &
1526 \includegraphics[width=\thumbnailwidth]{figures/Velocity} \\
1527 MapOnScalarClip & MapOnScalarClipWithRotation & Velocity \\ \\ \\ \\
1528 \includegraphics[width=\thumbnailwidth]{figures/VelocityOnPlaneCut} &
1529 \includegraphics[width=\thumbnailwidth]{figures/VelocityOnPlaneClip} &
1530 \includegraphics[width=\thumbnailwidth]{figures/Ellipsoid} \\
1531 VelocityOnPlaneCut & VelocityOnPlaneClip & Ellipsoid \\ \\ \\ \\
1532 \includegraphics[width=\thumbnailwidth]{figures/EllipsoidOnPlaneCut} &
1533 \includegraphics[width=\thumbnailwidth]{figures/EllipsoidOnPlaneClip} \\
1534 EllipsoidOnPlaneCut & EllipsoidOnPlaneClip \\ \\ \\ \\
1535 \end{tabular}
1536 \caption{Sample output}
1537 \end{table}
1538
1539 \begin{table}[t]
1540 \begin{tabular}{c c c}
1541 \includegraphics[width=\thumbnailwidth]{figures/Contour} &
1542 \includegraphics[width=\thumbnailwidth]{figures/ContourOnPlaneCut} &
1543 \includegraphics[width=\thumbnailwidth]{figures/ContourOnPlaneClip} \\
1544 Contour & ContourOnPlaneCut & ContourOnPlaneClip\\ \\
1545 \includegraphics[width=\thumbnailwidth]{figures/StreamLine} &
1546 \includegraphics[width=\thumbnailwidth]{figures/Carpet} &
1547 \includegraphics[width=\thumbnailwidth]{figures/Rectangle} \\
1548 Streamline & Carpet & Rectangle \\ \\ \\
1549 \includegraphics[width=\thumbnailwidth]{figures/Text} &
1550 \includegraphics[width=\thumbnailwidth]{figures/Logo} &
1551 \includegraphics[width=\thumbnailwidth]{figures/Image} \\
1552 Text & Logo & Image \\ \\
1553 \includegraphics[width=\thumbnailwidth]{figures/Legend} \\
1554 Legend \\ \\
1555 \end{tabular}
1556 \caption{Sample Output}
1557 \end{table}
1558
1559

  ViewVC Help
Powered by ViewVC 1.1.26