6x__TEXT@@__text__TEXT__stubs__TEXTRR__stub_helper__TEXT  __cstring__TEXT__const__TEXT5h5__gcc_except_tab__TEXTX6(X6__unwind_info__TEXT7|7__DATA_CONST@@@@__got__DATA_CONST@@__mod_init_func__DATA_CONST@@ 8__DATA__la_symbol_ptr__DATAh__data__DATAp8p__bss__DATAH__LINKEDIT@@ /Volumes/Data/workspace/med-macos-free/build/ExtProjs/VTK/lib/libvtkRenderingVolumePython310D-8.1.1.dylib"0@PPFHM8N@10 4  P  ER8Ur>/h3%GGq2   a* /Volumes/Data/workspace/med-macos-free/build/ExtProjs/VTK/lib/libvtkRenderingCorePython310D-8.1.1.dylib x/Volumes/Data/workspace/med-macos-free/build/ExtProjs/VTK/lib/libvtkIOXMLPython310D-8.1.1.dylib /Volumes/Data/workspace/med-macos-free/build/ExtProjs/VTK/lib/libvtkImagingCorePython310D-8.1.1.dylib x/Volumes/Data/workspace/med-macos-free/build/ExtProjs/VTK/lib/libvtkRenderingVolume-8.1.1.dylib /Volumes/Data/workspace/med-macos-free/build/ExtProjs/VTK/lib/libvtkCommonColorPython310D-8.1.1.dylib /Volumes/Data/workspace/med-macos-free/build/ExtProjs/VTK/lib/libvtkFiltersGeometryPython310D-8.1.1.dylib /Volumes/Data/workspace/med-macos-free/build/ExtProjs/VTK/lib/libvtkFiltersSourcesPython310D-8.1.1.dylib /Volumes/Data/workspace/med-macos-free/build/ExtProjs/VTK/lib/libvtkFiltersGeneralPython310D-8.1.1.dylib /Volumes/Data/workspace/med-macos-free/build/ExtProjs/VTK/lib/libvtkFiltersCorePython310D-8.1.1.dylib /Volumes/Data/workspace/med-macos-free/build/ExtProjs/VTK/lib/libvtkCommonComputationalGeometryPython310D-8.1.1.dylib /Volumes/Data/workspace/med-macos-free/build/ExtProjs/VTK/lib/libvtkIOXMLParserPython310D-8.1.1.dylib /Volumes/Data/workspace/med-macos-free/build/ExtProjs/VTK/lib/libvtkIOCorePython310D-8.1.1.dylib /Volumes/Data/workspace/med-macos-free/build/ExtProjs/VTK/lib/libvtkCommonExecutionModelPython310D-8.1.1.dylib /Volumes/Data/workspace/med-macos-free/build/ExtProjs/VTK/lib/libvtkCommonDataModelPython310D-8.1.1.dylib /Volumes/Data/workspace/med-macos-free/build/ExtProjs/VTK/lib/libvtkCommonMiscPython310D-8.1.1.dylib /Volumes/Data/workspace/med-macos-free/build/ExtProjs/VTK/lib/libvtkCommonSystemPython310D-8.1.1.dylib /Volumes/Data/workspace/med-macos-free/build/ExtProjs/VTK/lib/libvtkCommonTransformsPython310D-8.1.1.dylib /Volumes/Data/workspace/med-macos-free/build/ExtProjs/VTK/lib/libvtkCommonMathPython310D-8.1.1.dylib /Volumes/Data/workspace/med-macos-free/build/ExtProjs/VTK/lib/libvtkCommonCorePython310D-8.1.1.dylib /Volumes/Data/workspace/med-macos-free/build/ExtProjs/VTK/lib/libvtkWrappingPython310Core-8.1.1.dylib x/Volumes/Data/workspace/med-macos-free/build/ExtProjs/VTK/lib/libvtkRenderingCore-8.1.1.dylib x/Volumes/Data/workspace/med-macos-free/build/ExtProjs/VTK/lib/libvtkCommonColor-8.1.1.dylib x/Volumes/Data/workspace/med-macos-free/build/ExtProjs/VTK/lib/libvtkFiltersGeometry-8.1.1.dylib x/Volumes/Data/workspace/med-macos-free/build/ExtProjs/VTK/lib/libvtkFiltersSources-8.1.1.dylib x/Volumes/Data/workspace/med-macos-free/build/ExtProjs/VTK/lib/libvtkFiltersGeneral-8.1.1.dylib x/Volumes/Data/workspace/med-macos-free/build/ExtProjs/VTK/lib/libvtkFiltersCore-8.1.1.dylib /Volumes/Data/workspace/med-macos-free/build/ExtProjs/VTK/lib/libvtkCommonComputationalGeometry-8.1.1.dylib p/Volumes/Data/workspace/med-macos-free/build/ExtProjs/VTK/lib/libvtkIOXML-8.1.1.dylib x/Volumes/Data/workspace/med-macos-free/build/ExtProjs/VTK/lib/libvtkIOXMLParser-8.1.1.dylib p/Volumes/Data/workspace/med-macos-free/build/ExtProjs/VTK/lib/libvtkIOCore-8.1.1.dylib x/Volumes/Data/workspace/med-macos-free/build/ExtProjs/VTK/lib/libvtkImagingCore-8.1.1.dylib /Volumes/Data/workspace/med-macos-free/build/ExtProjs/VTK/lib/libvtkCommonExecutionModel-8.1.1.dylib x/Volumes/Data/workspace/med-macos-free/build/ExtProjs/VTK/lib/libvtkCommonDataModel-8.1.1.dylib x/Volumes/Data/workspace/med-macos-free/build/ExtProjs/VTK/lib/libvtkCommonMisc-8.1.1.dylib x/Volumes/Data/workspace/med-macos-free/build/ExtProjs/VTK/lib/libvtkCommonSystem-8.1.1.dylib /Volumes/Data/workspace/med-macos-free/build/ExtProjs/VTK/lib/libvtkCommonTransforms-8.1.1.dylib x/Volumes/Data/workspace/med-macos-free/build/ExtProjs/VTK/lib/libvtkCommonMath-8.1.1.dylib x/Volumes/Data/workspace/med-macos-free/build/ExtProjs/VTK/lib/libvtkCommonCore-8.1.1.dylib p/Volumes/Data/workspace/med-macos-free/build/ExtProjs/VTK/lib/libvtksys-8.1.1.dylib 0/usr/lib/libc++.1.dylib 8< /usr/lib/libSystem.B.dylib&@)UHH=ՍH5nH31ԛbuHH=觛H]fDUHSPHH=H5Hާ1 u蛛HVH=ORH5H<H t H )tH[]H=H[]^f.@UHAVSH0HuHHEЋFEHEHEH}ȃu\Hu誛t^H]H=HAAtH=îH(t HLcXHuLW1H0[A^]ÐUHAWAVSH(HuHHED~D}HG]ԉ]؅y謚HHLw(HEMA)AuQHuH}Út|}L}tYH=.LTAtOH=֭L;t H~HuHc}HHH [A^]@UHAVSH HuHqHEDvDuHG]]y އHHt+H(Ht"D9u!Et-HHt(1.H}111膇HȆHuH2HHH [A^]f.DUHAVSH HuHHHEDvDuHG]]y HHt(H(HtD9uEt*H13Ht(1.H}111ɆH HuH2HHH [A^]fUHSHhH2HHE^HG)ЃZHuHHE]UĉUȅyNHHH_(HH}Hu莆H}HuyyH}HuddH}HuOOH}Hu::H}Hu%%}uЋUMDEDME9u.9u&9uD9uD9u 9DDHHlHHuHHE]UĉUȅy HHtcH_(HtZHuH}tD}tduЋUԋMDEDMEL$HAHuNH53z1H /H H;MuHHh[]1H9HHuH藃HuH/HH /H H;Mt#DUHAWAVSH(HuHbHED~D}HG]܉]y HHt$Lw(MtD9uEt&IƤHt.1:H}111蛃)ILIׂHuL[HHH([A^A_]fDUHAVSH HuHHEDvDuHG]]y .HHtH(HtD9u豄NHt1H}111 H/.HHH [A^]UHAVSH HuHUHEDvDuHG]]y 螂HHt H(HtD9uH軁Ht1H}111QHH [A^]HtH5HH諁HHq-Hf.fUHSH8^HG)ЃtyHuHHEЉ]؉U܉UyЁHHH_(HHuH}HuH"πHHuHHEЉ]؉U܉Uy `HHtH_(HtmH}Hu訁t\H}Hu藁tKH}Hu膁t:uUMH葂DHuHcCH8[]H5ʀ1H8[]1HufUHAVSH HuHHEDvDuHG]]y 莀HHt H(HtD9uHHt1H}111AHH [A^]HtH5HՀHHHa+Hf.fUHAWAVSH(HuHHED~D}HG]ԉ]؅y HHtvLw(MtmA)AuQHuH} tS}utWA@L΅ɸOA9F`tAF`IL~Hu6H}<1H([A^A_]IL p~HuHf*HӐUHAWAVSH(HuHHED~D}HG]܉]y ~HHt#H(HtD9uAEt}Ht+12H}111~!H(Lc}HuL}HHH([A^A_]f.DUHAWAVSH(HuHЧHED~D}HG]܉]y ~HHt#H(HtD9uA@Et6}Ht+12H}111}!H0Lc }HuL }HHH([A^A_]f.DUHAVSH HuHHEDvDuHG]]y ^}HHt H(HtD9uEt"_`{|Ht*11H}111} H8Q|HuHcP|HHH [A^]UHAWAVSH(HuH,HED~D}HG]ԉ]؅y |HHt`Lw(MtWA)Au;H5YH}HUx|}t4LH}{HuH'HH}2|1H([A^A_]fUHAVSH HuHCHEDvDuHG]]y {HHt!H(HtD9uEt#H_p {Ht+12H}111{!H@HzHuH:|HHH [A^]DUHAWAVSH(HuHgHED~D}HG]ԉ]؅y <{HHtgLw(Mt^A)AuBHuH}{{tD}utHA9tAIL'zHu6H}z1H([A^A_]ILHyHuH%HUHAVSH HuHܨHEDvDuHG]]y ^zHHt#H(HtD9uEt%xyHt*11H}111z HPNyHuHcMyHHH [A^]@UHAVSH HuHzHEDvDuHG]]y yHHt+H(Ht"D9u!Et-HHxHt(1.H}111VyHXxHuH$HHH [A^]f.DUHAVSH HuHHEDvDuHG]]y xHHt(H(HtD9uEt*H1HxHt(1.H}111xH`wHuH#HHH [A^]fUHAWAVSH(HuHHED~D}HG]ԉ]؅y uHHt*H(Ht!D9u Et,EQtHt-1:H}111t)HE$tHuEZtHHH0[A^]@UHAVSH0HuHHEDvDuHG]܉]y ~tHHt*H(Ht!D9u Et,EsHt-1:H}111't)HEdsHuEZWsHHH0[A^]@UHAVSH HuH HEDvDuHG]]y sHHt#H(HtD9uEt%rHt*11H}111ns HrHuHcrHHH [A^]@UHAVSH HuHɫHEDvDuHG]]y sHHtHG(HtD9uH,rHt1H}111rHH [A^]HtH5cHVsHrHHHf.UHAWAVSH(HuHWHED~D}HG]ԉ]؅y LrHHtYLw(MtPA)Au4HuH}rt6ELsOqHuHEHH}q1H([A^A_]UHAVSH0HuHmHEDvDuHG]܉]y qHHt*H(Ht!D9u Et,EpHt-1:H}111Gq)HEpHuEZwpHHH0[A^]@UHAWAVSH(HuHuHED~D}HG]ԉ]؅y pHHtyLw(MtpA)AuTHuH}qtV}utZL1IA9tAILoHu6H}Ip1H([A^A_]IL}oHuHsHf.@UHAVSH HuH HEDvDuHG]]y oHHtH(HtD9uEt!1nHt+12H}111o!HHcnHuHnHHH [A^]UHAWAVSH(HuHHED~D}HG]܉]y ,oHHt#H(HtD9uAEtFnHt+12H}111n!HLcnHuLnHHH([A^A_]f.DUHAVSH HuHHEDvDuHG]]y nnHHt#H(HtD9uEt%mHt*11H}111n H^mHuHc]mHHH [A^]@UHAVSH HuHȬHEDvDuHG]]y mHHt+H(Ht"D9u!Et-HlHt(1.H}111fmHlHuHHHH [A^]f.DUHAVSH HuHHEDvDuHG]]y lHHt(H(HtD9uEt*H1lHt(1.H}111lHkHuHHHH [A^]fUHAWAVSH(HuHeHED~D}HG]܉]y LlHHt$Lw(MtD9uEt&IƼekHt.1:H}111k)ILI7kHuLkHHH([A^A_]fDUHAWAVSH(HuHHED~D}HG]܉]y kHHt$Lw(MtD9uEt&IjHt.1:H}111;k)ILIwjHuLjHHH([A^A_]ÐUHH=%gH5hHH 0_jgu{jHgH=f2jHf]ÐUH]kfDUHAVSIH=fH5XhH1H iGgujHgH=fiH5HvfLit H cft5diHtAHH5LHUiH u'H[A^]iH=%fid@iHu[A^]f.fUHAVSH0HuHwHEЋFEHEHEH}ȃu\Huit^H]H=/HmAtH=}Hhmt H$iLchHuLh(i1H0[A^]ÐUHAWAVSH(HuHwHED~D}HG]ԉ]؅yhHHLw(HEMA)AuQHuH}it|}L}tYH=BLlAtOH=|L{lt^HuH4 HH}^1H0[A\A^A_]f.UHAWAVSH(HuH3HED~D}HG]ԉ]؅y |^HHtuLw(MtlA)AuPH5fH}HUH^}tILH!_Hu]Hu1Ht9H5qH^H]H}]1H([A^A_]H- HUHAWAVSH(HuHHED~D}HG]ԉ]؅y ]HHtuLw(MtlA)AuPH5H}HUh]}tILHY^H\Hu1Ht9H5pH]H\H} ]1H([A^A_]HMHUHAWAVSH(HuHתHED~D}HG]ԉ]؅y \HHtuLw(MtlA)AuPH5H}HU\}tILHg]H[Hu1Ht9H5 pH\H[H}-\1H([A^A_]HmHUHAWAVSH(HuHHED~D}HG]ԉ]؅y [HHtuLw(MtlA)AuPH5ưH}HU[}tILH\HZHu1Ht9H5+oH\HZH}M[1H([A^A_]HHUHAWAVSH(HuH{HED~D}HG]ԉ]؅y ZHHtuLw(MtlA)AuPH5H}HUZ}tILH[HYHu1Ht9H5KnH>[HZH}mZ1H([A^A_]HHUHAWAVSH(HuHSHED~D}HG]ԉ]؅y ZHHtuLw(MtlA)AuPH5H}HUY}tILHZHYHu1Ht9H5kmH^ZH$YH}Y1H([A^A_]HHUHAWAVSH(HuH(HED~D}HG]ԉ]؅y XHHtH(HtD9uEt!1\WHt+12H}111W!HHc1WHuH0WHHH [A^]UHAWAVSH(HuHHED~D}HG]܉]y WHHt#H(HtD9uAEtVHt+12H}111Ht1H}111? HHHH [A^]UHSH8HHHE^HG)ЃHuHLHEȉ]ЉUԉU؅y.?HHH_(HHuH}n?HuH}Y?}uUtl9s`u9Sdtns`SdHHZHuHHEȉ]ЉUԉU؅y >HHtUH_(HtLHuH}>t6uUHH=HuHHH5R+>1H H H;MuH8[]BUHAWAVSH(HuHHED~D}HG]܉]y =HHt!Lw(MtD9uEt#I`<Ht.1:H}111=)ILI<HuLN=HHH([A^A_]fUHSH8HHHE^HG)ЃHuHHEȉ]ЉUԉU؅y<HHH_(HHuH}>=HuH})=}uUtl9slu9SptnslSpHHZHuHHEȉ]ЉUԉU؅y p<HHtUH_(HtLHuH}<t6uUHH(r;HuHhHH5 ;1H |H H;MuH8[]?UHAWAVSH(HuHuHED~D}HG]܉]y ;HHt!Lw(MtD9uEt#Il:Ht.1:H}111^;)IL0I:HuL;HHH([A^A_]fUHAWAVSH(HuHHED~D}HG]ԉ]؅y :HHtsLw(MtjA)AuNHuH}+;tP}utTL1IA9FttAFtIL9Hu6H}_:1H([A^A_]IL@9HuHH@UHAVSH HuHӪHEDvDuHG]]y 9HHtH(HtD9uEt!19Ht+12H}1119!HHHc8HuH8HHH [A^]UHAWAVSH(HuHdHED~D}HG]܉]y L9HHt#H(HtD9uAEtf8Ht+12H}1118!HPLc;8HuL:8HHH([A^A_]f.DUHAVSH HuHHEDvDuHG]]y 8HHt H(HtD9uEt"_t7Ht*11H}111A8 HX7HuHc7HHH [A^]UHAVSH HuHdHEDvDuHG]]y 7HHt+H(Ht"D9u!Et-H@6Ht(1.H}1117H`6HuHHHH [A^]f.DUHAVSH HuHƬHEDvDuHG]]y 7HHt(H(HtD9uEt*H1@36Ht(1.H}1116Hh 6HuHHHH [A^]fUHAWAVSH8HuH9HED~D}HG]̉]Ѕy l6HHttLw(MtkA)AuOHuH}6tQHuH}6t@u܋ULN7EX5Hu#EZK5H}51H8[A^A_]DUHAVSH HuHŭHEDvDuHG]]y 5HHtHG(HtD9uHXx4Ht1H}111U5HH [A^]HtH5HH5H4HHuHUHAVSH HuHHEDvDuHG]]y 4HHtH(HtD9u54Ht1H}1114 HHHH [A^]UHH=U=H5>HH 03=u H&>H==3H=]ÐUH]7fDUHSPHH=<H5>H=H 3x=uH=H=<]3H5 H<H3t H <tH[]H=<H[]i3fUHAVSH0HuHAHEЋFEHEHEH}ȃuoHu3tqH]H=zHQ7At1H=H87tH=FH%7t H2LcU2HuLT221H0[A^]f.@UHAWAVSH(HuHoAHED~D}HG]ԉ]؅y2HHLw(HEMA)AuhHuH}2}L}tlH=iL@6AtbH=L'6tOH=EL6tHH5LHH u$HFH<dHuRHtHH5LHH t[A^]H[A^]f.UHAVSH0HuHNHEЋFEHEHEH}ȃu\Hu:t^H]H=`HAtH= Ht HoLcHuLx1H0[A^]ÐUHAWAVSH(HuHHED~D}HG]ԉ]؅y<HHLw(HEMA)AuQHuH}St|}L}tYH=sLAtOH=LtHHt*H(Ht!D9u Et, EQHt11:H}111)HZE HuEHHH0[A^]@UHAVSH0HuHIoHEDvDuHG]܉]y ~HHt*H(Ht!D9u Et,EHt-1:H}111')HEdHuEZWHHH0[A^]@UHAWAVSH(HuHHED~D}HG]ԉ]؅yHHLw(MA)AurHuH}tt}EtvAw]o(UTV.u{AILoHu6H}1H([A^A_]IL7HuH-HUHAVSH0HuHHEDvDuHG]܉]y HHt*H(Ht!D9u Et,eEHt11:H}111G)HZEHuEwHHH0[A^]@UHAVSH0HuH0HEDvDuHG]܉]y HHt*H(Ht!D9u Et,EHt11:H}111)HZEHuEHHH0[A^]@UHAVSH0HuHvHEDvDuHG]܉]y HHt*H(Ht!D9u Et,E1Ht-1:H}111)HEHuEZHHH0[A^]@UHAWAVSH(HuHHED~D}HG]ԉ]؅y\HHLw(MA)AurHuH}tt}EtvA](UTV.u{AILHu6H}1H([A^A_]ILHuHͬHUHAVSH0HuHwHEDvDuHG]܉]y >HHt*H(Ht!D9u Et,EQHt11:H}111)HZE HuEHHH0[A^]@UHAVSH0HuHHEDvDuHG]܉]y ~HHt*H(Ht!D9u Et,MEHt11:H}111')HZE`HuEWHHH0[A^]@UHAVSH0HuHHEDvDuHG]܉]y HHt*H(Ht!D9u Et,EHt-1:H}111g)H EHuEZHHH0[A^]@UHAWAVSH(HuH/HED~D}HG]ԉ]؅y HHtyLw(MtpA)AuTHuH};tV}utZL1IA9tAILHu6H}i1H([A^A_]IL(HuHHf.@UHAVSH HuHeHEDvDuHG]]y HHtH(HtD9uEt!1Ht+12H}111!H0HcHuHHHH [A^]UHAWAVSH(HuHHED~D}HG]܉]y LHHt#H(HtD9uAEtfHt+12H}111!H8Lc;HuL:HHH([A^A_]f.DUHAVSH HuHHEDvDuHG]]y HHt#H(HtD9uEt%Ht*11H}111> H@~HuHc}HHH [A^]@UHAVSH HuHHEDvDuHG]]y HHt+H(Ht"D9u!Et-H(Ht(1.H}111HHHuHHHH [A^]f.DUHAVSH HuHhHEDvDuHG]]y HHt(H(HtD9uEt*H1(3Ht(1.H}111HP HuHHHH [A^]fUHAWAVSH(HuH߲HED~D}HG]ԉ]؅y lHHtyLw(MtpA)AuTHuH}tV}utZL1IA9tAILEHu6H}1H([A^A_]ILX HuHHf.@UHAVSH HuHHEDvDuHG]]y nHHtH(HtD9uEt!1Ht+12H}111"!H`HcaHuH`HHH [A^]UHAWAVSH(HuHHED~D}HG]܉]y HHt#H(HtD9uAEtHt+12H}111l!HhLcHuLHHH([A^A_]f.DUHAVSH HuH!HEDvDuHG]]y HHt#H(HtD9uEt%Ht*11H}111 HpHuHcHHH [A^]@UHAVSH HuHHEDvDuHG]]y NHHt+H(Ht"D9u!Et-HX`Ht(1.H}111Hx8HuH.HHH [A^]f.DUHAVSH HuHHEDvDuHG]]y HHt(H(HtD9uEt*H1XHt(1.H}1119H{HuHqHHH [A^]fUHAWAVSH(HuHHED~D}HG]ԉ]؅y HHtWLw(MtNA)Au2HuH}t4uLHuHנHH}k1H([A^A_]fUHAVSH HuHHEDvDuHG]]y .HHtH(HtD9u#LHt1H}111 Hc6HHH [A^]f.UHAWAVSH(HuHHED~D}HG]ԉ]؅y HHtyLw(MtpA)AuTHuH}tV}utZL1IA9HtAHILeHu6H}1H([A^A_]IL-HuH#Hf.@UHAVSH HuHHEDvDuHG]]y HHtH(HtD9uEt!1Ht+12H}111B!HHcHuHHHH [A^]UHAWAVSH(HuHZHED~D}HG]܉]y HHt#H(HtD9uAEtHt+12H}111!HLcHuLHHH([A^A_]f.DUHAVSH HuHHEDvDuHG]]y HHt#H(HtD9uEt%H8Ht*11H}111 HHuHc HHH [A^]@UHAVSH HuHHEDvDuHG]]y nHHt+H(Ht"D9u!Et-HHt(1.H}111HXHuHNHHH [A^]f.DUHAVSH HuH#HEDvDuHG]]y HHt(H(HtD9uEt*H1Ht(1.H}111YHHuHHHH [A^]fUHAVSH0^HG)ЃIHuH=HEȉ]ЉUԉU؅yHH*H_(HHuH}$H5DH}HU}EHHEHHuHHEȉ]ЉUԉU؅yUHHH_(HHuH}twH5#DH}HU}t]IH5DH}HU}t@EHLH3EHuEZ H51H0[A^]f.UHAWAVSH(HuHHEDvDuHG]ԉ]؅y \HHt|L(MtsA)AuWH5:CH}HU(}tPIH5)CH}HU }t3}t:LLH[2HuHHH(HuH}zEECHEZYˍXۍH,!rHuHHE]ȉỦUЅyHHCH(8H}HuH}HuغMUE~uZ%&Y-2XH,Ë]EWZYXH,WZYXH,ˉE܉M.z.{-fu'fouHuH}HU1Gfou~Efvf8%fPt t9]t!HuH}HUؾHuHHH5@H1H ɖH H;MuHX[]2@UHSHXHHHE^HG)ЃHuHHE]ȉỦUЅyHH}H(rHuH}]EW.iZYXH,H%fHuH1HE]ȉỦUЅyTHHH(H}Hu[H}HuغSMUE~e؋]W.WZYيXɊH,ÉEfW.WZYXH,5EfW.WZY\XlH,5H5#=1H H H;MIHX[]ZYXH,؁THuTWZYىXH,5EfW.WZYȉXH,ÉEfW. WZYXH,ÉE.z.{-fefoeHuH}HU1foe~Efvf8%fPt t9]t!oHuH}HUؾNHH@HUHAVSH`HNHHEHuHHEDvDuHG]]yHHH(A)H}Au~Huܺt{H}HuĺteEEHEHEHEHEEċM̉MxaM)ȉE܋MȅxbU)ʉUыU̅xdu)։u;Etfn1H XH H;MH`[A^]%E܉E܋MȅyMMU̅yUU;Eu ;Mu;UtHuH}HU1E;EuE;EuE;Et!HuH}HUľtH؃HpHHH‰уHs1pHH)1AALAALAD AL0AD AL0AD@ALPAD@ALPAD`ALpAD`ALpH HuHt*HHADA ADA H HuH9IAUAOMAGEfnM.u"z fnM.uzfnM.u{HuH}HU1'E~=1@AA.u z HH9uHuH}LDHt1HxHEH9:?H}HHxHEH9 HHxHEH9t HtH DUHAWAVAUATSHhHHHHEHuHHEFEHGMMyHHH(AH}UAC6HcHxLx1EIcM$LDEtHxHEH9E+EH}HuĺOH}LD8ẺEHEHEDEArII9II91HHHHHtfA4A4HHuHafDA A ALALALALAL AL HH9u'H}%1HxHEH9t HtHHH;EHHh[A\A]A^A_]Af؃HpHHH‰уHs1pHH)1AALAALAD AL0AD AL0AD@ALPAD@ALPAD`ALpAD`ALpH HuHt*HHADA ADA H HuH9qAUAOMAGE;Uu ;Mu;Et;HuH}HU1rE~A1f.A A; u HH9uHuH}LD,Ht1HxHEH9W\HHHxHEH949?HHxHEH9t HtH f.UHAVSH`H~HHEHuHHEDvDuHG]]yHHH(A)H}AHuܺH}HuĺEEHUHUHMHMM̉Mu܋M}ĉ}ȉ}9u ;Mu;EtkHuH}HU1E;EuE;EuE;Et!5HuH}HUľiHuH H1H H H;Mu H`[A^]fDUHAWAVSHHHHHEHuHHED~D}HG]]y>HH Lw(MA)H}AHuԺHH}HuWE܉EHUHULcEMc @Ic @Ic @HH}ԋMHHHILI @H[\^9u ;Mu;EtHuH}HU1HuH21H H H;MuHH[A^A_]E1MfDUHAWAVSHXHlHHEHuHHED~D}HG]]yHHLw(MA)H}AHuԺH}HuH}HuH}Hut{E܉EHMHMHcuIc @Ic @Ic @HHȋUDEIHHHHI @H<[|~tM]ƃ}tGf9~F1H 1H H;MuuHX[A^A_]E1M1f9\~;Uu D;Eu;EtHuH}HU1HuH H H;Mt,f.@UHAWAVAUATSHxHuH{HEFEHGMMyHHLg(Lu1L1AC?HcH`H`EIcHMHAHEH]LfÍHcHxHxE1H]HcL4AMDM(E+EHH}HuMDuIH}HHUZH}HuiH}HuԺEEHEH]ArJ @H9J CH91HHLLHtD UHAWAVSHHHwHHEHuHˤHEDvDuHG]]y.HHL(MtwA)H}Au[HuԺ@1M܉MHUHUA; @sTUA; @sduA; @A; @vew1H wH H;Mw1A; @D UA; @rA; @vuA; @suA; @{E @E#;uu ;Uu;MtUHuH}HU17Ht!1H [vH H;MPHH[A^A_]1E@H .vH H;Mt"UHAVSH HuHrHEDvDuHG]]y nHHt$H(HtD9uEt&H @Ht+12H}111!HH\HuHHHH [A^]fUHAVSH HuH+HEDvDuHG]]y HHt$H(HtD9uEt&H` @Ht+12H}111m!HHHuHHHH [A^]fUHAVSH HuHHEDvDuHG]]y HHt$H(HtD9uEt&Hh @'Ht+12H}111!HHHuHWHHH [A^]fUHAVSH HuHHEDvDuHG]]y ^HHt$H(HtD9uEt&Hp @wHt+12H}111 !HHLHuHHHH [A^]fUHAVSH HuH HEDvDuHG]]y HHt$H(HtD9uEt&H @Ht+12H}111]!HHHuHHHH [A^]fUHAVSH HuH-HEDvDuHG]]y HHt$H(HtD9uEt&Hx @Ht+12H}111!HHHuHGHHH [A^]fUHAWAVSH(HuHCHED~D}HG]܉]y LHHt$Lw(MtD9uEt&I< eHt.1:H}111)ILI7HuLHHH([A^A_]fDUHAWAVSH(HuHHED~D}HG]܉]y HHt$Lw(MtD9uEt&I, Ht.1:H}111;)ILIwHuLHHH([A^A_]fDUHAVSH HuHHEDvDuHG]]y HHt#H(HtD9uEt% @Ht*11H}111~ HHuHcHHH [A^]@UHAVSH HuH*HEDvDuHG]]y HHt#H(HtD9uEt% @8Ht*11H}111 HHuHc HHH [A^]@UHAVSH HuHHEDvDuHG]]y nHHt$H(HtD9uEt&H @Ht+12H}111!HH\HuHHHH [A^]fUHAVSH HuHHEDvDuHG]]y HHt$H(HtD9uEt&H @Ht+12H}111m!H HHuHHHH [A^]fUHAVSH HuH`HEDvDuHG]]y HHtHG(HtD9uH,Ht1H}111HH [A^]HtH5cHVHHHlHf.UHAWAVSH(HuHHED~D}HG]ԉ]؅y LHHtjLw(MtaA)AuEHuH}tGHc]XHu9H[HIH5H_H}1H([A^A_]f.DUHAWAVSH(HuHrHED~D}HG]ԉ]؅y |HHtfLw(Mt]A)AuAHuH}tCHc]舿Hu5HI H5H蓿H}1H([A^A_]UHAWAVSH(HuH'HED~D}HG]ԉ]؅y 輿HHtfLw(Mt]A)AuAHuH}tCHc]ȾHu5H IH5 HӾH}<1H([A^A_]UHAVSH HuHHEDvDuHG]]y HHtHG(HtD9uH @Ht1H}111貾 HbHHH [A^]f.UHAWAVSH(HuHHED~D}HG]ԉ]؅y \HHtjLw(MtaA)AuEHuH}蛾tGHc]hHu9H[HI H5詾HoH}ؽ1H([A^A_]f.DUHAWAVSH(HuH-HED~D}HG]ԉ]؅y 茽HHtjLw(MtaA)AuEHuH}˽tGHc]蘼Hu9H[HI (H5ٽH蟼H}1H([A^A_]f.DUHAWAVAUATSHHUhHHEHuHÞHEFEHGMMy詼HHL(H}gAC$HcHX蓼LX1EIcM,LDMLE+E(H}Hx蜼#H}H|脼 H}HuĺFH}Hu,H}LDẺEHEHEHEHEEEDEArII9IDI91HHHHHtA4AtHHuHdA ALALALALALAL AL HH9u/H}1HXH`H9t HtоHWfHH;EHHĈ[A\A]A^A_]E1"؃HpHHH‰уHs1qHH)1AALADALAD AL0AD AL0AD@ALPAD@ALPAD`ALpAD`ALpH HuHt+HHADA ADALH HuH9ox|HMLELMȻE;EuE;EuE;Et!HuH}HUľ'E;EuE;EuE;Et!躸HuH}HUE~51A A;Lu HH9u}HuH}LD费_H HQdHHXH`H9ۼHHXH`H9t Ht诼H聸 UHAWAVSH(HuHdHED~D}HG]ԉ]؅y |HHt`Lw(MtWA)Au;H5f H}HUH}t4LHGxHuHncHH}1H([A^A_]fUHAWAVSH(HuHHED~D}HG]ԉ]؅y 輷HHt`Lw(MtWA)Au;H5 H}HU舷}t4LHչ趶HuHc赶H}B1H([A^A_]fUHAWAVSH(HuHHED~D}HG]ԉ]؅y HHt`Lw(MtWA)Au;H5]H}HUȶ}t4LHHuHaHH}肶1H([A^A_]fUHAVSH HuHKHEDvDuHG]]y >HHt$H(HtD9uEt&HWHt+12H}111!H(H,HuH臶HHH [A^]fUHAWAVAUATSHHhHHpFxHG|My{HHSHG(HELhL/AC?HcHCHE1EIcHLELAH]CD-HcHL1EIcIHEHMHh譴LeHcH8ѴH8E1H]HcHMHIDH}HEx+|H5n HhHUYLe}HH5R HhHU1HX}HhHdyHhHuD"tiHhHuD tRHhHuDt;DHEE A1Hu}HhyE1H8H@H9t HtOHHH9t Ht2HHH9t HtLH[A\A]A^A_]1HMHuHI9sIH9s1HpHHH‰уH s1H}nHH)1H}LAALD L0AD AL0D@LPAD@ALPfD`LpfAD`ALpHHuHt*HHfD fADA H HuHuH9)HHHHtH}H4I4HHuH}HHUrWH I HLILHLILHLILHL IL HL(IL(HL0IL0HL8IL8HH9uIDHEEAs 1HuHMHuHHUH9sHH9s1HpHHHAAH s 1H}HMjLH)1H}HMLLD L0D L0D@LPD@LPfD`LpfD`LpHHuMt(HIfD fD H IuHuH9)HHHHtH}H]H4H4HHuH}H]HHUrWH H HLHLHLHLHLHLHL HL HL(HL(HL0HL0HL8HL8HH9uLAąLY}rHEJHMH9stJH9Esj1HHLLHtH}Huȋ44HHuH}HHUȋ LLLLL L HI9uDHpHHH‰уHs1H}jHH)1H}H]LLD L0D L0D@LPD@LPfD`LpfD`LpH HuHt,HHHufD fD H HuL9L|dt%HEH$H}HHXLELM#HEH$H}HHXLELMZE~B1HMfA.u z HH9Eu":HuHhHUDUE~E1HMHMf.u z HH9Eu"HuHhHUD }~?1HUHMȋ ; u HI9u#謬HuHhHUHMح艬HHc脬IH8H@H9H/H*HBH=HH8H@H9t HtǰHHH9t Ht誰HHH9t Ht荰H_ UHAWAVSH(HuHwHEDvDuHG]ԉ]؅y\HHL(MtwA)Au[H56H}HU$}tTIH5%H}HU}t7LLH34HuH*WHH}辫1H([A^A_]DUHAWAVATSH0HuHHEDvDuHG]̉]ЅyzHHLg(MA)AurH5PH}HU>}tkIH5?H}HU!}tNIHuH}wt:MLLLB7HuH-VHH}1H0[A\A^A_]fDUHAVSH HuHĒHEDvDuHG]]y ~HHtH(HtD9ua螩Ht1H}1114 HUHHH [A^]UHAWAVSH(HuHtHEDvDuHG]ԉ]؅yHHL(MtwA)Au[H5H}HU贩}tTIH5H}HU藩}t7LLH跫ĨHuHTHH}N1H([A^A_]DUHAVSH HuHHEDvDuHG]]y HHtH(HtD9u.Ht1H}111Ĩ HTHHH [A^]UHAWAVATSHHTHHEH8HH@DvDHHGLPyZHHLg(MA)AH58H8HU}IH5H8HU}kIH8H\?MH8Hu0H8HuɧHEHE(E)E(E)`HEHp\LELMLLL}Ef.EuzEf.EuzEf.Eu{$fHuH8HUEf.`u$z"Ef.huzEf.pu{$HuH8HU(Hu"HQHH8r1H QH H;MuHİ[A\A^A_]E1M?UHAVSH0^HG)ЃHuH+HEЉ]؉U܉UyHHH_(HH5H}HUǥ}IH5H}HU覥}HLH谧EʤHfHuHHEЉ]؉U܉Uy ^HHt[H_(HtRH5EH}HU3}t8HH>E^HuEZQH51H0[A^]fUHAWAVSH(HuHHED~D}HG]ԉ]؅y 謤HHtrLw(MtiA)AuMHuH}tO}EtQA @.u{A @IL茣Hu6H} 1H([A^A_]IL0THuHJOHDUHAVSH0HuHHEDvDuHG]܉]y 辣HHt*H(Ht!D9u Et, @EѢHt-1:H}111g)H8E褢HuEZ藢HHH0[A^]@UHAWAVSH(HuHHED~D}HG]ԉ]؅y HHtrLw(MtiA)AuMHuH}5tO}EtQA @.u{A @ILܡHu6H}p1H([A^A_]IL@褡HuHMHDUHAVSH0HuHHEDvDuHG]܉]y HHt*H(Ht!D9u Et, @E!Ht-1:H}111跡)HHEHuEZHHH0[A^]@UHAVSH HuHHEDvDuHG]]y NHHt#H(HtD9uEt% @hHt*11H}111 HP>HuHc=HHH [A^]@UHAWAVSH(HuHHED~D}HG]ԉ]؅y 蜠HHt\Lw(MtSA)Au7H5H}HUh}t0}t7LH藢蒟Hu9H}&1H([A^A_]ILHHWHuHMKHUHSPHH=DHtZH=IHϣtGH=JH輣t4H=IH詣t!H=1H薣tHH[]LH[]ÐUHAVSH= 1HH/JH=hH18IH uHݞL[A^]DUHAWAVATSH=H5WHH hNb/HL5HH-HHLH5rLH؝tH u H=71ʝHt"HH5DLH螝H uH 虝Ht"HH5"LHmH uH؝H=ʓ11褝HL%HH='H1A$8IH uH蚝MtH5ѓLL IuLvH=h1?HH=H1A$8IH uH<MtH5zLL譜IuLH=ĜH [A\A^A_]f.fUH]霞fDUHSPHH5ȒHHDt H tH[]H=H[]閜fDUHAVSH0HuHHEЋFEHEHEH}ȃu\Hut^H]H=<H聠AtH=Hht HbLc蘛HuL藛(1H0[A^]ÐUHAWAVSH(HuHHED~D}HG]ԉ]؅yHHLw(HEMA)AuQHuH}t|}L}tYH=OL蔟AtOH=͐L{t3HHH [A^]f.DUHAVSH HuHHEDvDuHG]]y 螇HHt(H(HtD9uEt*H18賆Ht(1.H}111IH`苆HuH2HHH [A^]fUHAVSH HuH?HEDvDuHG]]y HHt H(HtD9u#H Ht1H}111衆 HQHHH [A^]f.fUHAWAVSH(HuHpHED~D}HG]ԉ]؅y LHHtrLw(MtiA)AuMHuH}腆tO}EtQA.u{AIL,Hu6H}1H([A^A_]ILhHuH0HDUHAVSH0HuHHEDvDuHG]܉]y ^HHt*H(Ht!D9u Et,EqHt-1:H}111)HpEDHuEZ7HHH0[A^]@UHAWAVSH(HuHHED~D}HG]ԉ]؅y蜄HHLw(MA)AurHuH}̈́tt}EtvAW%]O%(UTV.u{AILOHu6H}1H([A^A_]ILxHuH /HUHAVSH0HuH"HEDvDuHG]܉]y ~HHt*H(Ht!D9u Et,E$E葂Ht11:H}111')HZE`HuEWHHH0[A^]@UHAVSH0HuH"HEDvDuHG]܉]y 辂HHt*H(Ht!D9u Et,#EсHt11:H}111g)HZE蠁HuE藁HHH0[A^]@UHAVSH0HuHHEDvDuHG]܉]y HHt*H(Ht!D9u Et,EHt-1:H}111见)HEHuEZ׀HHH0[A^]@UHAWAVSH(HuH$HED~D}HG]ԉ]؅y<HHLw(MA)AurHuH}mtt}EtvA!]!(UTV.u{AILHu6H}胀1H([A^A_]ILHuH+HUHAVSH0HuHj$HEDvDuHG]܉]y HHt*H(Ht!D9u Et, E1Ht11:H}111)HZEHuE~HHH0[A^]@UHAVSH0HuH$HEDvDuHG]܉]y ^HHt*H(Ht!D9u Et,- Eq~Ht11:H}111)HZE@~HuE7~HHH0[A^]@UHAVSH0HuH$HEDvDuHG]܉]y ~HHt*H(Ht!D9u Et,E}Ht-1:H}111G~)HE}HuEZw}HHH0[A^]@UHAWAVSH(HuH"%HED~D}HG]ԉ]؅y}HHLw(MA)AurHuH} ~tt}EtvA](UTV.u{AIL|Hu6H}#}1H([A^A_]ILW|HuHM(HUHAVSH0HuH$HEDvDuHG]܉]y |HHt*H(Ht!D9u Et,E{Ht11:H}111g|)HZE{HuE{HHH0[A^]@UHAVSH0HuH=%HEDvDuHG]܉]y {HHt*H(Ht!D9u Et,E{Ht11:H}111{)HZEzHuEzHHH0[A^]@UHAVSH0HuH%HEDvDuHG]܉]y >{HHt*H(Ht!D9u Et,EQzHt-1:H}111z)HE$zHuEZzHHH0[A^]@UHAWAVSH(HuHfHED~D}HG]ԉ]؅y |zHHtrLw(MtiA)AuMHuH}ztO}EtQA.u{AIL\yHu6H}y1H([A^A_]IL$yHuH%HDUHAVSH0HuHgHEDvDuHG]܉]y yHHt*H(Ht!D9u Et,ExHt-1:H}1117y)HEtxHuEZgxHHH0[A^]@UHAWAVSH(HuHhHED~D}HG]ԉ]؅y xHHtrLw(MtiA)AuMHuH}ytO}EtQA.u{AILwHu6H}@x1H([A^A_]ILtwHuHj#HDUHAVSH0HuHiHEDvDuHG]܉]y wHHt*H(Ht!D9u Et,EvHt-1:H}111w)HEvHuEZvHHH0[A^]@UHAWAVSH(HuHHED~D}HG]ԉ]؅y wHHthLw(Mt_A)AuCHuH}gwtE}HutHI9@tI@ILvHu6H}v1H([A^A_]ILuHuH!Hf.DUHAVSH HuHHEDvDuHG]]y .vHHt$H(HtD9uEt&H@GuHt+12H}111u!HHuHuH!uHHH [A^]fUHAWAVSH(HuHƑHED~D}HG]ԉ]؅y|uHHLw(MA)AurHuH}utt}EtvAH]/(UTV.u{AHIL/tHu6H}t1H([A^A_]ILsHuHHUHAVSH0HuHHEDvDuHG]܉]y ^tHHt*H(Ht!D9u Et,%EqsHt11:H}111t)HZE@sHuE7sHHH0[A^]@UHAVSH0HuHHEDvDuHG]܉]y sHHt*H(Ht!D9u Et,UErHt11:H}111Gs)HZErHuEwrHHH0[A^]@UHAVSH0HuHӑHEDvDuHG]܉]y rHHt*H(Ht!D9u Et,HEqHt-1:H}111r)H EqHuEZqHHH0[A^]@UHAWAVSH(HuHHED~D}HG]ԉ]؅y rHHtmLw(EMtIA)H}Au@HuErt/}EtEA8LtALILqHt81>q1+E1#IL(pHuHHH([A^A_]UHAVSH HuHHEDvDuHG]]y .qHHt'H(HtD9uEt)LDpHt*11H}111p H0pHupHHH [A^]UHAWAVSH(HuHHEDvDuHG]ԉ]؅y |pHHtpL(MtgA)AuKH5LH}HUHp}tDIH5?H}HU+p}t'}t.1^oHu?H}o1H([A^A_]ILLH8HcoHuHoUHAWAVAUATSHH%HHEH(HXH0DvD8HG<@yhoHH(Lo(MA)AH5:H(HU%o}IH5"H(HUo}IH5H(HUn}IH(HL)ocH(HunFH(HpH޺n#HEHE(E)E(p)PHEH`DLH$LMLLLLpoEf.EuzEf.EuzEf.Eu{$CmHuH(HU\npf.Pu'z%xf.XuzEf.`u{'lHuH(HpmlHu"HHH(Fm1H H H;Mu Hĸ[A\A]A^A_]E1MqUHAWAVSH(HuHHED~D}HG]ԉ]؅y lHHt`Lw(MtWA)Au;H5PH}HUl}t4LHmkHuHHH}bl1H([A^A_]fUHAVSH HuHLHEDvDuHG]]y lHHt$H(HtD9uEt&H7kHt+12H}111k!H@H kHuHglHHH [A^]fUHAWAVSH(HuHHED~D}HG]ԉ]؅y lkHHtgLw(Mt^A)AuBHuH}ktD}utHA9tAILWjHu6H}j1H([A^A_]ILHjHuHHUHAVSH HuHHEDvDuHG]]y jHHt#H(HtD9uEt%iHt*11H}111>j HP~iHuHc}iHHH [A^]@UHAVSH HuHHEDvDuHG]]y iHHtH(HtD9ukhHt1H}111i HHHH [A^]UHAVSH HuHHEDvDuHG]]y NiHHtH(HtD9u}jnhHt1H}111i HOHHH [A^]UHAWAVSH(HuHOHED~D}HG]ԉ]؅yhHHLw(MA)AudHuH}htf}EthA? ]WU.u{AIL}gHu6H}h1H([A^A_]ILXEgHuH;HfDUHAVSH0HuHŚHEDvDuHG]܉]y gHHt%H(HtD9uWEt$EfHt11:H}111\g)H`ZEfHuEfHHH0[A^]fUHAVSH0HuHHEDvDuHG]܉]y fHHt*H(Ht!D9u Et,EfHt11:H}111f)HhZEeHuEeHHH0[A^]@UHAVSH0HuHsHEDvDuHG]܉]y .fHHt*H(Ht!D9u Et,EAeHt-1:H}111e)HpEeHuEZeHHH0[A^]@UHAWAVSH(HuH0HED~D}HG]ԉ]؅y leHHtgLw(Mt^A)AuBHuH}etD}utHA9tAILWdHu6H}d1H([A^A_]ILxdHuHHUHAVSH HuHϞHEDvDuHG]]y dHHt#H(HtD9uEt%cHt*11H}111>d H~cHuHc}cHHH [A^]@UHAVSH HuHHEDvDuHG]]y cHHt+H(Ht"D9u!Et-HxbHt(1.H}111cHbHuHHHH [A^]f.DUHAVSH HuHFHEDvDuHG]]y cHHt(H(HtD9uEt*H1x3bHt(1.H}111bH bHuHHHH [A^]fUHAWAVSH(HuHHED~D}HG]ԉ]؅y lbHHtgLw(Mt^A)AuBHuH}btD}utHA9tAILWaHu6H}a1H([A^A_]ILaHuH HUHAVSH HuHHEDvDuHG]]y aHHt#H(HtD9uEt%`Ht*11H}111>a H~`HuHc}`HHH [A^]@UHAVSH HuHIHEDvDuHG]]y `HHtH(HtD9ub_Ht1H}111` H HHH [A^]UHAVSH HuHHEDvDuHG]]y N`HHtH(HtD9uan_Ht1H}111` HO HHH [A^]UHAVSH HuH0HEDvDuHG]]y _HHtH(HtD9u`^Ht1H}111t_ H HHH [A^]UHAWAVSH(HuHHED~D}HG]ԉ]؅y ,_HHtgLw(Mt^A)AuBHuH}k_tD}utHA9tAIL^Hu6H}^1H([A^A_]IL]HuH HUHAVSH HuHHEDvDuHG]]y N^HHt#H(HtD9uEt%h]Ht*11H}111] H>]HuHc=]HHH [A^]@UHAVSH HuHHEDvDuHG]]y ]HHt+H(Ht"D9u!Et-H\Ht(1.H}111F]H\HuH~HHH [A^]f.DUHAVSH HuH HEDvDuHG]]y \HHt(H(HtD9uEt*H1[Ht(1.H}111\H[HuHHHH [A^]fUHAWAVSH(HuHHED~D}HG]ԉ]؅y ,\HHtQLw(MtHA)Au,H5H}HU[}t%}t,-[Hu9H}[1H([A^A_]ILHZHuHHUHAWAVSH(HuHHED~D}HG]ԉ]؅y \[HHtQLw(MtHA)Au,H5ЛH}HU([}t%}t,]ZHu9H}Z1H([A^A_]ILH"ZHuHHUHAWAVSH(HuH*HEDvDuHG]ԉ]؅y ZHHt|L(MtsA)AuWH5jH}HUXZ}tPIH5YH}HU;Z}t3}t:LLH[bYHuU H~THuHc}THHH [A^]@UHAWAVSH(HuHAHED~D}HG]ԉ]؅y THHtgLw(Mt^A)AuBHuH}UtD}utHA9tAILSHu6H}[T1H([A^A_]ILSHuHHUHAVSH HuHдHEDvDuHG]]y SHHt#H(HtD9uEt%SHt*11H}111S HRHuHcRHHH [A^]@UHAWAVSH(HuHHED~D}HG]ԉ]؅y LSHHtgLw(Mt^A)AuBHuH}StD}utHA9tAIL7RHu6H}R1H([A^A_]ILQHuHHUHAVSH HuHHEDvDuHG]]y nRHHt#H(HtD9uEt%QHt*11H}111R H^QHuHc]QHHH [A^]@UHH=H5~HH 0OQmuMHH="QH]ÐUH]RfDUHSPHH=H5HH PuHQH=JPH5_H7HwPt H $tH[]H=H[]PfUHAVSH0HuH._HEЋFEHEHEH}ȃu\HuQt^H]H=лHTAtH=EHTt H蒰LcOHuLOXP1H0[A^]ÐUHAWAVSH(HuH^HED~D}HG]ԉ]؅yPHHLw(HEMA)AuQHuH}3Pt|}L}tYH=LSAtOH=DLSt@HHH(HD9u3HPxHt9HHH5͵HPE1LE4?HucH}111?RE1?HuCLo@Ht6HH3?t)H-?HH1Q@H߾?1HH [A^]UHAWAVAUATSH(HHDrHGD)ǃHUH yHMDuEĉEȅyHH?HHLn(MAH5H}HU>}IH5H}HU>}IH5#H}HU>}tkIHuH}>tWEIELLLL=Hu2HH(H=yH([A\A]A^A_]>H5o >1H([A\A]A^A_]H}=H}=@UHAWAVSH(HuHHED~D}HG]ԉ]؅y =HHtsLw(MtjA)AuNHuH}=tP}utTL1IA9F0tAF0IL<Hu6H}=1H([A^A_]ILS<HuHIH@UHAVSH HuHHEDvDuHG]]y <HHtH(HtD9uEt!1;Ht+12H}111r<!HHc;HuH;HHH [A^]UHAWAVSH(HuHθHED~D}HG]܉]y <HHt#H(HtD9uAEt&;Ht+12H}111;!HLc:HuL:HHH([A^A_]f.DUHAVSH HuHHEDvDuHG]]y N;HHt H(HtD9uEt"_0k:Ht*11H}111; HA:HuHc@:HHH [A^]UHAVSH HuHBHEDvDuHG]]y :HHt+H(Ht"D9u!Et-H9Ht(1.H}111F:H9HuH~HHH [A^]f.DUHAVSH HuH޷HEDvDuHG]]y 9HHt(H(HtD9uEt*H18Ht(1.H}1119H8HuHHHH [A^]fUHAWAVSH(HuHHED~D}HG]ԉ]؅y ,9HHtlLw(MtcA)AuGHuH}e9tI}EtKAN4.u{AF4IL8Hu6H}81H([A^A_]IL7HuHHf.UHAVSH0HuHԷHEDvDuHG]܉]y >8HHt'H(HtD9uEt)G4ET7Ht-1:H}1117)HE'7HuEZ7HHH0[A^]UHAWAVSH(HuH+HED~D}HG]ԉ]؅y |7HHtQLw(MtHA)Au,H5,H}HUH7}t%}t,}6Hu9H}71H([A^A_]ILHB6HuH8HUHAWAVAUATSHHuHHEFEHGMMy6HHL(H}e6ACD-HcHx6LxMcM1ELDMX}tH}86B}%H59H}HU6}HH5 H}HU5HP}H}Hu6H}Hu5H}Hu5H}HuȺ5H}Hu5tyH}LD5tfHEHXHEH`HEHhHEHpDEHA sV1H-E1yH}41HxHEH9t Ht8HHĘ[A\A]A^A_]HI>I9sII9s1ЃHpHHH‰уH`s1pHH)1AALAALAD AL0AD AL0AD@ALPAD@ALPAD`ALpAD`ALpHHuHt&HHADA ADA H HuHH9t\HHHHtA A HHuHHr3A A ALALALALALALHH9uEIHLt$HMH $HMLELMLHHHPЋE;Xu E;\t!z2HuH}HU3E;`u E;dt!C2HuH}HUq3E;hu E;lt! 2HuH}HU:3E;pu E;tt!1HuH}HUȾ3E~51A A: u HH9u1HuH}LD2z1H4HlHHxHEH9(-HHxHEH9t Ht5H1 fUHAWAVAUATSHHuHHEFEHGMMy1HHL(H}U1ACD-HcHH1LH1EIcM$FLDMU}tH}'1?}"H5(H}HU 1}HH5H}HU0H`}H}Hu1H}Hu0H}Hu0H}HuȺ0H}Hu0tvH}LD0tcHEHhHEHpHEHxHEHEDEHAsY1H1E1{H}/1HHHPH9t Ht3HHĨ[A\A]A^A_]HI~I9sITI9s1ЃHpHHH‰уH0s1pHH)1AVALVATALTADV ALV0ADT ALT0ADV@ALVPADT@ALTPADV`ALVpADT`ALTpH@HuHt'HTHADA ADA H HuHH9taHHHHtA4FfA4DHHuHHr7A FfA DALFfALDALFfALDALFfALDHH9uEIHLt$HMH $HMLELMLHHH`ЋE;hu E;lt!c-HuH}HU.E;pu E;tt!,-HuH}HUZ.E;xu E;|t!,HuH}HU#.E;EuE;Et!,HuH}HUȾ-E~61A FfA; Du HH9u,HuH}LD-h,H0HZHHHHPH9$)HHHHPH9t Ht0H, UHH=hH5njHBH 0,]iuHiH=h+Hh]ÐUH]&/fDUHSPHH=phH5 jHݭH +hu踏HAiH=:h+H5H'hHG+t H htH[]H=hH[]+fUHAVSH0HuH9HEЋFEHEHEH}ȃuoHu+tqH]H=H/At1H=B7Hh/tH=>HU/t H+Lc*HuL*+1H0[A^]f.@UHAWAVSH(HuH9HED~D}HG]ԉ]؅y*HHLw(HEMA)AuhHuH}*}L}tlH= Lp.AtbH=16LW.tOH==LD.t#HHtH(HtD9uEt!1\"Ht+12H}111"!HHc1"HuH0"HHH [A^]UHAWAVSH(HuH/HED~D}HG]܉]y "HHt#H(HtD9uAEt!Ht+12H}111<"!HLc{!HuLz!HHH([A^A_]f.DUHAVSH HuHȳHEDvDuHG]]y !HHt H(HtD9uEt"_0 Ht*11H}111! H HuHc HHH [A^]ÐUHH=`H5>bHTH 0 -au譄HvaH=o` Hc`]ÐUH]"fDUHSPHH=@`H5aHH J `uHHaH= ` H5H_Ht H _tH[]H=_H[]) fUHAVSH0HuH.HEЋFEHEHEH}ȃuoHuz tqH]H=,H$At1H=+H#tH=3H#t HLcHuL1H0[A^]f.@UHAWAVSH(HuH/.HED~D}HG]ԉ]؅y\HHLw(HEMA)AuhHuH}s}L}tlH=L#AtbH=*L"tOH=o2L"t}utBA9vttAvtIL}Hu6H}1H([A^A_]ILEHuH;HfDUHAVSH HuH%HEDvDuHG]]y HHt H(HtD9uEt"_tHt*11H}111a HHuHcHHH [A^]ÐUHH=%MH5NHH 0MumpHMH=LbHL]ÐUH]fDUHSPHH=LH5YNHNH *HMupHMH=LH5HwLHt H dLtH[]H=TLH[] fUHAVSH0HuHnHEЋFEHEHEH}ȃuoHuZtqH]H=HAt1H=<HtH=`Ht HLcHuL1H0[A^]f.@UHAWAVSH(HuHHED~D}HG]ԉ]؅y<HHLw(HEMA)AuhHuH}S}L}tlH=zLAtbH=+LtOH=OLtFHH 0-EudHvEH=oDHcD]ÐUH]fDUHSPHH=@DH5EHH DuhdHEH= D]H5HCHt H CtH[]H=CH[]ifUHAVSH0HuHHEЋFEHEHEH}ȃuoHutqH]H=YHQAt1H=H8tH= H%t HLcUHuLT1H0[A^]f.@UHAWAVSH(HuHoHED~D}HG]ԉ]؅yHHLw(HEMA)AuhHuH}}L}tlH=HL@AtbH=L'tOH= Lt H~HuHc}HHH [A^]@UHAWAVSH(HuH]HED~D}HG]ԉ]؅y HHtgLw(Mt^A)AuBHuH}tD}utHA9tAILHu6H}[1H([A^A_]ILHuHHUHAVSH HuHAHEDvDuHG]]y HHt#H(HtD9uEt%Ht*11H}111 HHuHcHHH [A^]@UHAWAVSH8HuHLHED~D}HG]̉]Ѕy LHHtkLw(EMtWD)؃rH}11p1D9}HuH}ht!]̋ut&LtE"Ht*13E1(ILEHuEHHH8[A^A_]f.UHAWAVSH8HuHHED~D}HG]̉]Ѕy LHHtkLw(EMtWD)؃rH}11p1D9}HuH}ht!]̋ut&LnE"Ht*13E1(ILEHuEHHH8[A^A_]f.UHAVSH0HuHҢHEDvDuHG]܉]y NHHt'H(HtD9uEt)EdHt-16H}111%HE7HuE.HHH0[A^]f.UHAVSH HuHHEDvDuHG]]y HHt#H(HtD9uEt%Ht*11H}111> H~HuHc}HHH [A^]@UHAWAVSH(HuHeHED~D}HG]ԉ]؅y HHtgLw(Mt^A)AuBHuH}tD}utHA9tAILHu6H}[1H([A^A_]ILHuHHUHAVSH HuHHEDvDuHG]]y HHt+H(Ht"D9u!Et-HHt(1.H}111HHuHރHHH [A^]f.DUHAVSH HuHwHEDvDuHG]]y >HHt(H(HtD9uEt*H1SHt(1.H}111H+HuH!HHH [A^]fUHAWAVSH(HuHHED~D}HG]ԉ]؅y HHtlLw(EMtXD)؃rH}11F1D9}HuH}t]ԋut$LHaHt(1OE1DILH9HuHtH5HHHH HHHH([A^A_]f.UHAWAVAUATSHXHuHHED~D}HG]ĉ]ȅyhHHLw(EMA)AH}ArcHuwtVH}HuftEH}HuUt4E1;E}>H}HuFtHcE)E1EM{1HHX[A\A]A^A_]1EINh UYIN`X%{vXD,YMXXD,AYFpINXXD,EDHAHUEDHEDHAHUL4_HUHMHEMtQHHA9ALA9DMADA9ALDHIH}Hu6tHcE)E1EM{1HHH[A\A]A^A_]1D}EDHDeEDHDmEDHAHMAHMINXL4HuHMHEMtQHHA9ALA9DMADA9ALDHIf.@UHAVSH0HuHHEЋFEHEHEH}ȃu\Hu芿t^H]H=לH!AtH=Ht HľLc8HuL7Ⱦ1H0[A^]ÐUHAWAVSH(HuH_HED~D}HG]ԉ]؅y茾HHLw(HEMA)AuQHuH}裾t|}L}tYH=L4AtOH=Lt\HED~D}HGEԉE؅y ̻HHt{Lw(MtrxPAuUHuH} tWHuH}tFuUIL踺Hu*HfH H}RH}71H([A^A_]f.@UHAWAVAUATSH(HuH6HEDvDuHGEĉEȅyHH Lo(MAH5H}HU裺}IH5hH}HU肺}IH50H}HUa}IH5/H}HU@}toHH5/H}HU#}tRMULLLLIIAH>Hu(HC H}ڹH}迹1H([A\A]A^A_]fUHSHHH eHHE^HG)ЃHuHHEȉ]ЉUԉU؅y^HHH_(HHuH}蒹HuH}}}EMS0f.uz S8f.u{vC0K8HH^HuHdHEȉ]ЉUԉU؅y 贸HHtYH_(HtPHuH}軸t:EMHH買HuHcHH5;1H cH H;MuHH[]%UHAWAVSH(HuHbHED~D}HG]܉]y HHt!Lw(MtD9uEt#I0Ht.1:H}111螷)ILIڶHuLRHHH([A^A_]fUHAWAVSH(HuHHED~D}HG]ԉ]؅y ,HHtbLw(MtYA)Au=HuH}wt?}HutBI9v@tIv@ILHu6H}谶1H([A^A_]ILHuHaHDUHAVSH HuH0HEDvDuHG]]y NHHt!H(HtD9uEt#H_@jHt+12H}111!HH?HuHDHHH [A^]ÐUHH=H5HH 0/uHH=H]ÐUH]颸fDUHSPHH=H59HH ʴ(uhHqH=j蝴H5hHWHWt H DtH[]H=4H[]驴fUHAVSH0HuHHEЋFEHEHEH}ȃu\Hut^H]H=ٚH葸AtH=)Hxt HrLc訳HuL觳81H0[A^]ÐUHAWAVSH(HuHHED~D}HG]ԉ]؅yHHLw(HEMA)AuQHuH}t|}L}tYH=L褷AtOH=)L苷tHHH(HD9u3HPxHt9HHH5DHPE1LE4HucH}111̱RE1HuCLoHt6HH3t)H-HH1Q@H߾1HH [A^]UHAWAVSH(HuHHED~D}HG]ԉ]؅y<HHLw(MA)AurHuH}mtt}EtvAQ]Q(UTV.u{AILHu6H}胰1H([A^A_]IL路HuH[HUHAVSH0HuHNHEDvDuHG]܉]y HHt*H(Ht!D9u Et,PE1Ht11:H}111ǯ)HZEHuEHHH0[A^]@UHAVSH0HuHOHEDvDuHG]܉]y ^HHt*H(Ht!D9u Et,-PEqHt11:H}111)H ZE@HuE7HHH0[A^]@UHAVSH0HuHiHEDvDuHG]܉]y 螮HHt*H(Ht!D9u Et,E豭Ht-1:H}111G)H(E脭HuEZwHHH0[A^]@UHAWAVSH(HuH5QHED~D}HG]ԉ]؅yܭHHLw(MA)AurHuH} tt}EtvAN]N(UTV.u{AIL菬Hu6H}#1H([A^A_]IL0WHuHMXHUHAVSH0HuH QHEDvDuHG]܉]y 辬HHt*H(Ht!D9u Et,MEѫHt11:H}111g)H8ZE蠫HuE藫HHH0[A^]@UHAVSH0HuHPQHEDvDuHG]܉]y HHt*H(Ht!D9u Et,LEHt11:H}111觫)H@ZEHuEתHHH0[A^]@UHAVSH0HuHQHEDvDuHG]܉]y >HHt*H(Ht!D9u Et,EQHt-1:H}111)HHE$HuEZHHH0[A^]@UHAWAVSH(HuHQHED~D}HG]ԉ]؅y|HHLw(MA)AurHuH}譪tt}EtvA 7K]/K(UTV.u{A IL/Hu6H}é1H([A^A_]ILPHuHTHUHAVSH0HuHQHEDvDuHG]܉]y ^HHt*H(Ht!D9u Et,%JEqHt11:H}111)HXZE@HuE7HHH0[A^]@UHAVSH0HuHQHEDvDuHG]܉]y 螨HHt*H(Ht!D9u Et,mIE豧Ht11:H}111G)H`ZE耧HuEwHHH0[A^]@UHAVSH0HuH#RHEDvDuHG]܉]y ާHHt*H(Ht!D9u Et, EHt-1:H}111臧)HhEĦHuEZ跦HHH0[A^]@UHAWAVSH(HuHORHED~D}HG]ԉ]؅y HHtyLw(MtpA)AuTHuH}[tV}utZL1IA9tAILHu6H}艦1H([A^A_]ILp轥HuHQHf.@UHAVSH HuHSHEDvDuHG]]y HHtH(HtD9uEt!1<Ht+12H}111ҥ!HxHcHuHHHH [A^]UHAWAVSH(HuHUHED~D}HG]܉]y lHHt#H(HtD9uAEt膤Ht+12H}111!HLc[HuLZHHH([A^A_]f.DUHAVSH HuHVHEDvDuHG]]y 认HHt#H(HtD9uEt%ȣHt*11H}111^ H螣HuHc蝣HHH [A^]@UHAVSH HuH"XHEDvDuHG]]y HHt+H(Ht"D9u!Et-HpHt(1.H}111覣HHuHNHHH [A^]f.DUHAVSH HuHYHEDvDuHG]]y >HHt(H(HtD9uEt*H1pSHt(1.H}111H+HuH!NHHH [A^]fUHAWAVSH(HuHoHED~D}HG]ԉ]؅y 茢HHtgLw(Mt^A)AuBHuH}ˢtD}utHA9 tA ILwHu6H} 1H([A^A_]IL?HuH5MHUHAVSH HuHTHEDvDuHG]]y 计HHt#H(HtD9uEt% ȠHt*11H}111^ H螠HuHc蝠HHH [A^]@UHAWAVSH(HuHmhHED~D}HG]ԉ]؅y HHtyLw(MtpA)AuTHuH};tV}utZL1IA9xtAxIL՟Hu6H}i1H([A^A_]IL蝟HuHKHf.@UHAVSH HuHlhHEDvDuHG]]y HHtH(HtD9uEt!1Ht+12H}111貟!HHcHuHHHH [A^]UHAWAVSH(HuHhHED~D}HG]܉]y LHHt#H(HtD9uAEtfHt+12H}111!HLc;HuL:HHH([A^A_]f.DUHAVSH HuHiHEDvDuHG]]y 莞HHt#H(HtD9uEt%x訝Ht*11H}111> H~HuHc}HHH [A^]@UHAVSH HuHdiHEDvDuHG]]y ޝHHt+H(Ht"D9u!Et-HHt(1.H}111膝HȜHuHHHHH [A^]f.DUHAVSH HuHiHEDvDuHG]]y HHt(H(HtD9uEt*H13Ht(1.H}111ɜH HuHHHHH [A^]fUHAWAVSH(HuH HED~D}HG]ԉ]؅y lHHt\Lw(MtSA)Au7H5AH}HU8}t0}t7LH9bHu9H}1H([A^A_]ILH'HuHGHUHAVSH HuHHEDvDuHG]]y 莛HHt$H(HtD9uEt&H觚Ht+12H}111=!HH|HuHכHHH [A^]fUHAWAVSH(HuH#HED~D}HG]ԉ]؅y ܚHHt\Lw(MtSA)Au7H5HH}HU訚}t0}t7LH裝ҙHu9H}f1H([A^A_]ILH藙HuHEHUHAVSH HuHNHEDvDuHG]]y HHt$H(HtD9uEt&HHt+12H}111譙!HHHuHGHHH [A^]fUHAWAVSH(HuHiHEDvDuHG]ԉ]؅y LHHt|L(MtsA)AuWH5*H}HU}tPIH5H}HU}t3}t:LLH "HuHPE1LEѐHu+H}Ⱦe1H0[A^]E1褐HuLUHAVSH HuHHEDvDuHG]]yHHH(HD9u3HPxHt9HHH50>HPE1LEHucH}111蜐RE1HuCL?Ht6HHt)HHH1Q@H߾1HH [A^]UHAWAVSH(HuH~0HEDvDuHGEԉE؅y HHL(MxeAujH5H}HUϏ}tcIH5eH}HU貏}tFILLHێHu*H:H H}uH}Z1H([A^A_]ÐUHAWAVAUATSHXH:HHEHuH7HEDvDuHGEEy HHGLo(M:AH5W<H}HUŎ} IH5WH}HU褎}IH56H}HU胎}IH}Hu蚎(E)EIELELLLLE.Eu)z'E.EuzE.Euz E.Eu{!GHuH}HUi&Hu*H9H H}H}襍1H 9H H;MuHX[A\A]A^A_]聑UHH=H5HH 0ߌ}u}HH=貌H]ÐUH]fDUHSPHH=H5)H|H zuHaH=ZMH5HHGHt H 4tH[]H=$H[]YfUHAVSH0HuHHEЋFEHEHEH}ȃu\Hu誌t^H]H=HAAtH=H(t H"LcXHuLW1H0[A^]ÐUHAWAVSH(HuHHED~D}HG]ԉ]؅y謋HHLw(HEMA)AuQHuH}Ët|}L}tYH=̋LTAtOH=L;t'HEDvDuHG]܉]y HHt*H(Ht!D9u Et,'E!Ht11:H}111跆)H ZEHuEHHH0[A^]@UHAVSH0HuHHEDvDuHG]܉]y NHHt*H(Ht!D9u Et,EaHt-1:H}111)H(E4HuEZ'HHH0[A^]@UHAWAVSH(HuH(HED~D}HG]ԉ]؅y茅HHLw(MA)AurHuH}轅tt}EtvA G&]?&(UTV.u{A IL?Hu6H}ӄ1H([A^A_]IL0HuH/HUHAVSH0HuH(HEDvDuHG]܉]y nHHt*H(Ht!D9u Et,5%E聃Ht11:H}111)H8ZEPHuEGHHH0[A^]@UHAVSH0HuH)HEDvDuHG]܉]y 讃HHt*H(Ht!D9u Et,}$EHt11:H}111W)H@ZE萂HuE臂HHH0[A^]@UHAVSH0HuHF)HEDvDuHG]܉]y HHt*H(Ht!D9u Et, EHt-1:H}111藂)HHEԁHuEZǁHHH0[A^]@UHAWAVSH(HuHr)HED~D}HG]ԉ]؅y,HHLw(MA)AurHuH}]tt}EtvA"]"(UTV.u{AIL߀Hu6H}s1H([A^A_]ILP觀HuH,HUHAVSH0HuHG)HEDvDuHG]܉]y HHt*H(Ht!D9u Et,!E!Ht11:H}111跀)HXZEHuEHHH0[A^]@UHAVSH0HuH)HEDvDuHG]܉]y NHHt*H(Ht!D9u Et,!EaHt11:H}111)H`ZE0HuE'HHH0[A^]@UHAVSH0HuH)HEDvDuHG]܉]y HHt*H(Ht!D9u Et,E~Ht-1:H}1117)HhEt~HuEZg~HHH0[A^]@UHAWAVSH(HuH)HED~D}HG]ԉ]؅y ~HHtyLw(MtpA)AuTHuH} tV}utZL1IA9tAIL}Hu6H}9~1H([A^A_]ILpm}HuHc)Hf.@UHAVSH HuH5+HEDvDuHG]]y }HHtH(HtD9uEt!1|Ht+12H}111}!HxHc|HuH|HHH [A^]UHAWAVSH(HuH,HED~D}HG]܉]y }HHt#H(HtD9uAEt6|Ht+12H}111|!HLc |HuL |HHH([A^A_]f.DUHAVSH HuHS.HEDvDuHG]]y ^|HHt#H(HtD9uEt%x{Ht*11H}111| HN{HuHcM{HHH [A^]@UHAVSH HuH/HEDvDuHG]]y {HHt+H(Ht"D9u!Et-HpzHt(1.H}111V{HzHuH&HHH [A^]f.DUHAVSH HuH81HEDvDuHG]]y zHHt(H(HtD9uEt*H1pzHt(1.H}111zHyHuH%HHH [A^]fUHAWAVSH(HuHAHED~D}HG]ԉ]؅y yHHtH(HtD9uEt!1\xHt+12H}111x!HHc1xHuH0xHHH [A^]UHAWAVSH(HuH BHED~D}HG]܉]y xHHt#H(HtD9uAEtwHt+12H}111HuHHHH [A^]f.DUHAVSH HuHDZHEDvDuHG]]y N?HHt(H(HtD9uEt*H1c>Ht(1.H}111>H;>HuH1HHH [A^]fUHAVSH HuHHEDvDuHG]]y >HHt#H(HtD9uEt%=Ht*11H}111N> H=HuHc=HHH [A^]@UHSHXHHHE^HG)ЃrHuHHE]UĉUȅy=HHlH_(HFH}Hu>1H}Hu=H}Hu=}EMUf.u&z$f.uzf.uHH@<HHuHHE]UĉUȅy <HHtZH_(HtQHuH}<t;}t[EMUHH;HuNH5W<1H H H;MuHHX[]1HHHuHt;HuHjHH H H;Mt@fUHAWAVSH(HuHHED~D}HG]܉]y ;HHt$Lw(MtD9uEt&IƘ:Ht.1:H}111{;)ILI:HuL/;HHH([A^A_]fDUHAWAVSH(HuH*HED~D}HG]ԉ]؅y ;HHtgLw(Mt^A)AuBHuH}K;tD}utHA9tAIL9Hu6H}:1H([A^A_]IL9HuHHUHAVSH HuHKHEDvDuHG]]y .:HHt#H(HtD9uEt%H9Ht*11H}1119 H9HuHc9HHH [A^]@UHSHXH HHE^HG)ЃrHuHaHE]UĉUȅy^9HHlH_(HFH}Hu91H}Hu}9H}Huh9}EMUf.u&z$f.uzf.uHH7HHuHdHE]UĉUȅy a8HHtZH_(HtQHuH}h8t;}t[EMUHHT7HuNH571H hH H;MuHHX[]1HHHuH7HuHHH H H;Mt;fUHAWAVSH(HuHBHED~D}HG]܉]y \7HHt$Lw(MtD9uEt&Iưu6Ht.1:H}111 7)ILIG6HuL6HHH([A^A_]ÐUHAVSHbH7H= L5HL:H;Ht8H=HLx:HH~HHHHHfHnHUHHHHHH3HHHHHHHH߶HHٶHHӶHHͶHʶDHHĶHHHöHHHʶHHHH2HHHHHHHHHHHHHH|HyHvHsHpHmkHpH][A^]ÐUHH=H5HPH 03u3H֫H=ϪR3Hê]ÐUH]4fDUHSPHH=H59HH 3(u03HqH=j2H5HWH2t H DtH[]H=4H[]2fUHSH(HuH`AHE؋FEHEHEH}Ѓu;HuL3t=H]H=*H6t/HHc2Hu'21H([]û1HuH1H([]UHAWAVSH(HuHAHEDvDuHG]ԉ]؅y L2HHtwL(HEMtKA)AuFHuH}k2t1}LutNH=CL5tfL-1Ht?1CH}11,HE1 ILLP0HuHc0H([A^A_]û0HuUHAVSH0HuHx@HEЋFEHEuGH5DH}HU71}t@HHtCHH5`HPE1LEQ0Hu+H}Ⱦ01H0[A^]E1$0HuL1UHAVSH HuHg@HEDvDuHG]]y0HHH(HD9u3HPxHt9HHH5HPE1LE/HucH}1110RE1d/HuCL0Ht6HH/t)H}/HH1Q@H߾f/1HH [A^]UHAWAVSH(HuH HED~D}HG]ԉ]؅y /HHtgLw(Mt^A)AuBHuH}/tD}utHA9tAILw.Hu6H} /1H([A^A_]IL?.HuH5HUHAVSH HuHӫHEDvDuHG]]y .HHt+H(Ht"D9u!Et-H-Ht(1.H}111V.H-HuHHHH [A^]f.DUHAVSH HuHHEDvDuHG]]y -HHt(H(HtD9uEt*H1-Ht(1.H}111-H,HuHHHH [A^]fUHAVSH HuHHEDvDuHG]]y >-HHt#H(HtD9uEt%X,Ht*11H}111, H.,HuHc-,HHH [A^]@UHAVSH HuHHEDvDuHG]]y ,HHt#H(HtD9uEt%+Ht*11H}111>, H~+HuHc}+HHH [A^]@UHSPHH=dH0tZH=_H/tGH=VH/t4H=YH/t!H=Q?H/tHH[]l+H[]ÐUHH=EH5ަH H 0*ͥu+HH=*H]ÐUH].fDUHSPHH=H5yHH z*hu*HH=M*H5rHH*t H tH[]H=tH[]Y*fUHSH(HuH8HE؋FEHEHEH}Ѓu1Hu*t3H}')HuHc)H([]þ*1H([]UHAWAVSH(HuH8HED~D}HG]ԉ]؅y )HHtdLw(HEMt8A)Au3HuH})t}Hut;H&(Ht<1@H}b)1)HE1ILP(HuHc(H([A^A_]f.@UHAVSH0HuH(8HEЋFEHEuGH5<H}HU(}t@HHtCHH5kHPE1LE(Hu+H}Ⱦ(1H0[A^]E1'HuL/)UHAVSH HuH8HEDvDuHG]]y>(HHH(HD9u3HPxHt9HHH5HPE1LE4'HucH}111'RE1'HuCLo(Ht6HH3't)H-'HH1Q@H߾'1HH [A^]UHAWAVSH(HuHHED~D}HG]ԉ]؅y <'HHt\Lw(MtSA)Au7H5H}HU'}t0}t7LHK*2&Hu9H}&1H([A^A_]ILH`%HuHHUHAVSH HuHHEDvDuHG]]y ^&HHt$H(HtD9uEt&Hw%Ht+12H}111 &!HhHL%HuH&HHH [A^]fUHAWAVSH(HuHҮHED~D}HG]ԉ]؅y %HHtgLw(Mt^A)AuBHuH}%tD}utHA9tAIL$Hu6H}+%1H([A^A_]ILp_$HuHUHUHAVSH HuHHEDvDuHG]]y $HHt#H(HtD9uEt%#Ht*11H}111~$ Hx#HuHc#HHH [A^]@UHAWAVSH(HuHHED~D}HG]ԉ]؅y $HHtgLw(Mt^A)AuBHuH}[$tD}utHA9tAIL#Hu6H}#1H([A^A_]IL"HuHHUHAVSH HuHEHEDvDuHG]]y >#HHt#H(HtD9uEt%X"Ht*11H}111" H."HuHc-"HHH [A^]@UHAVSH HuHHEDvDuHG]]y "HHt+H(Ht"D9u!Et-H!Ht(1.H}1116"Hx!HuHnHHH [A^]f.DUHAVSH HuHޭHEDvDuHG]]y !HHt(H(HtD9uEt*H1 Ht(1.H}111y!H HuHHHH [A^]fUHAWAVSH(HuHHED~D}HG]ԉ]؅y !HHtgLw(Mt^A)AuBHuH}[!tD}utHA9 tA IL Hu6H} 1H([A^A_]ILHuHHUHAVSH HuHJHEDvDuHG]]y > HHt#H(HtD9uEt% XHt*11H}111 H.HuHc-HHH [A^]@UHAVSH HuHHEDvDuHG]]y HHt+H(Ht"D9u!Et-HHt(1.H}1116HxHuHnHHH [A^]f.DUHAVSH HuHHEDvDuHG]]y HHt(H(HtD9uEt*H1Ht(1.H}111yHHuHHHH [A^]fUHAWAVSH(HuHuHED~D}HG]ԉ]؅y HHtgLw(Mt^A)AuBHuH}[tD}utHA9tAILHu6H}1H([A^A_]ILHuHHUHAVSH HuHHEDvDuHG]]y >HHt#H(HtD9uEt%XHt*11H}111 H.HuHc-HHH [A^]@UHAVSH HuHHEDvDuHG]]y HHt+H(Ht"D9u!Et-HHt(1.H}1116HxHuHnHHH [A^]f.DUHAVSH HuHHEDvDuHG]]y HHt(H(HtD9uEt*H1Ht(1.H}111yHHuHHHH [A^]fUHAVSH HuHHEDvDuHG]]y HHtHG(HtD9uH<Ht1(H}111HHx!&HHH [A^]ÐUHAVSH HuHKHEDvDuHG]]y ~HHtHG(HtD9uHHt1(H}1112HHxHHH [A^]ÐUHSHHHHHE^HG)Ѓ4HuH2HEȉ]ЉUԉU؅yHHH_(HyH}HudH}HuOH}Hu:H}Hu%}EMU].u3z1.u$z".uz.uHHHHuHHEȉ]ЉUԉU؅y HHt_H_(HtVHuH}t@}t`EMU]HHHuNH51H H H;MuHHH[]1HcHHuH1HuH'HH MH H;Mtf.DUHAWAVSH(HuHsHED~D}HG]܉]y |HHt$Lw(MtD9uEt&IHt.1:H}111+)ILIgHuLHHH([A^A_]fDUHSHHH`HHE^HG)Ѓ4HuHԪHEȉ]ЉUԉU؅yHHH_(HyH}HudH}HuOH}Hu:H}Hu%}EMU].u3z1.u$z".uz.uHHHHuHHEȉ]ЉUԉU؅y sHHt_H_(HtVHuH}t@}t`EMU]HHaHuNH531H uH H;MuHHH[]1HcHHuHHuHHH -H H;Mtf.DUHAWAVSH(HuHHED~D}HG]܉]y \HHt$Lw(MtD9uEt&IuHt.1:H}111 )ILIGHuLHHH([A^A_]fDUHSHHH@HHE^HG)Ѓ uHuHvHEȉ]ЉUԉU؅y~HHoH_(HIH}Hu4H}HuH}Hu H}Hu}uUMDE9u9u9u D9DHHHHuHtHEȉ]ЉUԉU؅y |HHtXH_(HtOHuH}t9}tYuUMDEHH0qHuNH51H H H;MuHHH[]1HHHuH8!HuHHH =H H;Mtf.DUHAWAVSH(HuHHED~D}HG]܉]y lHHt$Lw(MtD9uEt&IHt.1:H}111)IL@IWHuLHHH([A^A_]fDUHAVSH HuHHEDvDuHG]]y HHtH(HtD9uHt1H}111b HcHHH [A^]f.UHAWAVSHHHHHEHuHͦHED~D}HG]]yHHLw(MA)H}AHuк(E)EHuLHËE;EuE;EuE;EuE;EtHuH}HU1Hu=HtUH5"HHH H H;Mt#F1H H H;Mu%HH[A^A_]H@HH fH H;MtUHAWAVSH(HuHTHED~D}HG]ԉ]؅y HHt\Lw(MtSA)Au7H5OH}HUh}t0}t7LH Hu9H}&1H([A^A_]ILHXW HuHMHUHHH^HHEHPH%HXF`HDždHPHu HPHu HPHu xH5MHPHU? }WHMHM(E)EHMHMMM(E)pHMHMH}HuHUHbE;Eu(E;Eu E;EuE;EuE;EuE;Et! HuHPHU1/ E;EuE;EuE;Et$ HuHPHU E;pu1E;tu&E;xuE;|uE;EuE;Et$h HuHPHU D HuH:H 1H MH H;Mu Hİ]fDUHAVSH HuHHEDvDuHG]]y ~ HHt H(HtD9uH Ht1H}1111 HH [A^]HtH5H H HHQHf.fUHAVSH HuHȦHEDvDuHG]]y HHt H(HtD9uH Ht1H}111q HH [A^]HtH5H H HHHf.fUHAWAVAUATSHHHuHHEFEHGMĉMȅy HHHL(H} AC6HcH} Le1EIcM,DLDM/E+EăH}Hu H}LD DEArI\I9ID]I91HHHHHtfA4DfAtEHHuH<A DfALEALDfALEALDfALEALDfALEHH9uH}1H}HEH9;@E1؃HpHHH‰уH0s1qHH)1ATALTADUALUADT ALT0ADU ALU0ADT@ALTPADU@ALUPADT`ALTpADU`ALUpH@HuHt(HTHADA ADALH HuH9uLL! E~91fA DfA;LEu HH9uHuH}LDHt+1H}HEH9t HtJ HHH[A\A]A^A_]HHH}HEH9uHH}HEH9t Ht H fUHAWAVAUATSHHHuHӤHEFEHGMĉMȅyHHHL(H}AC6HcH}Le1EIcM,DLDM/E+EăH}HuH}LDDEArI\I9ID]I91HHHHHtfA4DfAtEHHuH<A DfALEALDfALEALDfALEALDfALEHH9uH}|1H}HEH9;@E1؃HpHHH‰уH0s1qHH)1ATALTADUALUADT ALT0ADU ALU0ADT@ALTPADU@ALUPADT`ALTpADU`ALUpH@HuHt(HTHADA ADALH HuH9uLLE~91fA DfA;LEu HH9uHuH}LDHt+1H}HEH9t HtHHH[A\A]A^A_]H_HH}HEH9uHH}HEH9t HtH fUHAWAVSHxH<HHEHxHHED~D}HG]]yHHLw(MA)HxAuiHutfHxHuwtMHxHut9MHuHULXHHHuHM1H SH H;MuHx[A^A_]E1MOf.fUHSPHH=4HtZH=HtGH=Hlt4H=HYt!H=HFtHH[]H[]ÐUHAVSH=2dIH_HHHeHmH)HѫHHHIHHHHHHYdHH9HHHaHHQHiHHH/HI:HAHKHlHsHHH!HL[A^]H=(H5C%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% % %%%%%%%%%% %"%$%&%(%*%,%.%0%2%4%6%8%:%<%>%@%B%D%F%H%J%L%N%P%R%T%V%X%Z%\%^%`%b%d%f%h%j%l%n%p%r%t%v%x%z%|%~%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% % %%%%%%%%%% %"%$%&%(%*%,%.%0%2%4%6%8%:%<%>%@hhxh3nhIdhcZhzPhFh<h2h(hhhS hgLAS%ݦhhh`hhhh h h hq h h xhZ nh dh ZhH Pht Fh h4h*hZ hh h3hhh5hhh hkhhhD h h h;!hr!vh!lh"bhc"Xh"Nh"Dh:#:h#0h#&h-$hg$h$h%hw%h%hE&h&h&h'ho'h'h'h.(hz(h(h)|hk)rh)hh*^hy*Th*Jh.+@hi+6h+,h,"h[,h,h,ht-h-h*.hi.h.h/hq/h/h/0h0hh&hhxh+nhEdhcZhPhFh int C++: static vtkTypeBool IsTypeOf(const char *type) Get the name of this class IsAV.IsA(string) -> int C++: vtkTypeBool IsA(const char *type) override; Get the name of this class SafeDownCastV.SafeDownCast(vtkObjectBase) -> vtkDirectionEncoder C++: static vtkDirectionEncoder *SafeDownCast(vtkObjectBase *o) Get the name of this class NewInstanceV.NewInstance() -> vtkDirectionEncoder C++: vtkDirectionEncoder *NewInstance() Get the name of this class GetEncodedDirectionV.GetEncodedDirection([float, float, float]) -> int C++: virtual int GetEncodedDirection(float n[3]) Given a normal vector n, return the encoded direction GetDecodedGradientV.GetDecodedGradient(int) -> (float, float, float) C++: virtual float *GetDecodedGradient(int value) / Given an encoded value, return a pointer to the normal vector GetNumberOfEncodedDirectionsV.GetNumberOfEncodedDirections() -> int C++: virtual int GetNumberOfEncodedDirections(void) Return the number of encoded directions GetDecodedGradientTableV.GetDecodedGradientTable() -> (float, ...) C++: virtual float *GetDecodedGradientTable(void) Get the decoded gradient table. There are this->GetNumberOfEncodedDirections() entries in the table, each containing a normal (direction) vector. This is a flat structure - 3 times the number of directions floats in an array. vtkObjectvtkObjectBasep_voidvtkEncodedGradientEstimatorvtkRenderingVolumePython.vtkEncodedGradientEstimatorvtkEncodedGradientEstimator - Superclass for gradient estimation Superclass: vtkObject vtkEncodedGradientEstimator is an abstract superclass for gradient estimation. It takes a scalar input of vtkImageData, computes a gradient value for every point, and encodes this value into a three byte value (2 for direction, 1 for magnitude) using the vtkDirectionEncoder. The direction encoder is defaulted to a vtkRecursiveSphereDirectionEncoder, but can be overridden with the SetDirectionEncoder method. The scale and the bias values for the gradient magnitude are used to convert it into a one byte value according to v = m*scale + bias where m is the magnitude and v is the resulting one byte value. @sa vtkFiniteDifferenceGradientEstimator vtkDirectionEncoder V.IsTypeOf(string) -> int C++: static vtkTypeBool IsTypeOf(const char *type) Return 1 if this class type is the same type of (or a subclass of) the named class. Returns 0 otherwise. This method works in combination with vtkTypeMacro found in vtkSetGet.h. V.IsA(string) -> int C++: vtkTypeBool IsA(const char *type) override; Return 1 if this class is the same type of (or a subclass of) the named class. Returns 0 otherwise. This method works in combination with vtkTypeMacro found in vtkSetGet.h. V.SafeDownCast(vtkObjectBase) -> vtkEncodedGradientEstimator C++: static vtkEncodedGradientEstimator *SafeDownCast( vtkObjectBase *o) V.NewInstance() -> vtkEncodedGradientEstimator C++: vtkEncodedGradientEstimator *NewInstance() SetInputDataV.SetInputData(vtkImageData) C++: virtual void SetInputData(vtkImageData *) Set/Get the scalar input for which the normals will be calculated. Note that this call does not setup a pipeline connection. vtkEncodedGradientEstimator is not an algorithm and does not update its input. If you are directly using this class, you may need to manually update the algorithm that produces this data object. GetInputDataV.GetInputData() -> vtkImageData C++: virtual vtkImageData *GetInputData() Set/Get the scalar input for which the normals will be calculated. Note that this call does not setup a pipeline connection. vtkEncodedGradientEstimator is not an algorithm and does not update its input. If you are directly using this class, you may need to manually update the algorithm that produces this data object. SetGradientMagnitudeScaleV.SetGradientMagnitudeScale(float) C++: virtual void SetGradientMagnitudeScale(float _arg) Set/Get the scale and bias for the gradient magnitude GetGradientMagnitudeScaleV.GetGradientMagnitudeScale() -> float C++: virtual float GetGradientMagnitudeScale() Set/Get the scale and bias for the gradient magnitude SetGradientMagnitudeBiasV.SetGradientMagnitudeBias(float) C++: virtual void SetGradientMagnitudeBias(float _arg) Set/Get the scale and bias for the gradient magnitude GetGradientMagnitudeBiasV.GetGradientMagnitudeBias() -> float C++: virtual float GetGradientMagnitudeBias() Set/Get the scale and bias for the gradient magnitude SetBoundsClipV.SetBoundsClip(int) C++: virtual void SetBoundsClip(int _arg) Turn on / off the bounding of the normal computation by the this->Bounds bounding box GetBoundsClipMinValueV.GetBoundsClipMinValue() -> int C++: virtual int GetBoundsClipMinValue() Turn on / off the bounding of the normal computation by the this->Bounds bounding box GetBoundsClipMaxValueV.GetBoundsClipMaxValue() -> int C++: virtual int GetBoundsClipMaxValue() Turn on / off the bounding of the normal computation by the this->Bounds bounding box GetBoundsClipV.GetBoundsClip() -> int C++: virtual int GetBoundsClip() Turn on / off the bounding of the normal computation by the this->Bounds bounding box BoundsClipOnV.BoundsClipOn() C++: virtual void BoundsClipOn() Turn on / off the bounding of the normal computation by the this->Bounds bounding box BoundsClipOffV.BoundsClipOff() C++: virtual void BoundsClipOff() Turn on / off the bounding of the normal computation by the this->Bounds bounding box SetBoundsV.SetBounds(int, int, int, int, int, int) C++: void SetBounds(int, int, int, int, int, int) V.SetBounds((int, int, int, int, int, int)) C++: void SetBounds(int a[6]) GetBoundsV.GetBounds() -> (int, int, int, int, int, int) C++: int *GetBounds() Set / Get the bounds of the computation (used if this->ComputationBounds is 1.) The bounds are specified xmin, xmax, ymin, ymax, zmin, zmax. UpdateV.Update() C++: void Update(void) Recompute the encoded normals and gradient magnitudes. GetEncodedNormalsV.GetEncodedNormals() -> (int, ...) C++: unsigned short *GetEncodedNormals(void) Get the encoded normals. GetEncodedNormalIndexV.GetEncodedNormalIndex(int) -> int C++: int GetEncodedNormalIndex(vtkIdType xyz_index) V.GetEncodedNormalIndex(int, int, int) -> int C++: int GetEncodedNormalIndex(int x_index, int y_index, int z_index) Get the encoded normal at an x,y,z location in the volume GetGradientMagnitudesV.GetGradientMagnitudes() -> (int, ...) C++: unsigned char *GetGradientMagnitudes(void) Get the gradient magnitudes SetNumberOfThreadsV.SetNumberOfThreads(int) C++: virtual void SetNumberOfThreads(int _arg) Get/Set the number of threads to create when encoding normals This defaults to the number of available processors on the machine GetNumberOfThreadsMinValueV.GetNumberOfThreadsMinValue() -> int C++: virtual int GetNumberOfThreadsMinValue() Get/Set the number of threads to create when encoding normals This defaults to the number of available processors on the machine GetNumberOfThreadsMaxValueV.GetNumberOfThreadsMaxValue() -> int C++: virtual int GetNumberOfThreadsMaxValue() Get/Set the number of threads to create when encoding normals This defaults to the number of available processors on the machine GetNumberOfThreadsV.GetNumberOfThreads() -> int C++: virtual int GetNumberOfThreads() Get/Set the number of threads to create when encoding normals This defaults to the number of available processors on the machine SetDirectionEncoderV.SetDirectionEncoder(vtkDirectionEncoder) C++: void SetDirectionEncoder(vtkDirectionEncoder *direnc) Set / Get the direction encoder used to encode normal directions to fit within two bytes GetDirectionEncoderV.GetDirectionEncoder() -> vtkDirectionEncoder C++: virtual vtkDirectionEncoder *GetDirectionEncoder() Set / Get the direction encoder used to encode normal directions to fit within two bytes SetComputeGradientMagnitudesV.SetComputeGradientMagnitudes(int) C++: virtual void SetComputeGradientMagnitudes(int _arg) If you don't want to compute gradient magnitudes (but you do want normals for shading) this can be used. Be careful - if if you a non-constant gradient magnitude transfer function and you turn this on, it may crash GetComputeGradientMagnitudesV.GetComputeGradientMagnitudes() -> int C++: virtual int GetComputeGradientMagnitudes() If you don't want to compute gradient magnitudes (but you do want normals for shading) this can be used. Be careful - if if you a non-constant gradient magnitude transfer function and you turn this on, it may crash ComputeGradientMagnitudesOnV.ComputeGradientMagnitudesOn() C++: virtual void ComputeGradientMagnitudesOn() If you don't want to compute gradient magnitudes (but you do want normals for shading) this can be used. Be careful - if if you a non-constant gradient magnitude transfer function and you turn this on, it may crash ComputeGradientMagnitudesOffV.ComputeGradientMagnitudesOff() C++: virtual void ComputeGradientMagnitudesOff() If you don't want to compute gradient magnitudes (but you do want normals for shading) this can be used. Be careful - if if you a non-constant gradient magnitude transfer function and you turn this on, it may crash SetCylinderClipV.SetCylinderClip(int) C++: virtual void SetCylinderClip(int _arg) If the data in each slice is only contained within a circle circumscribed within the slice, and the slice is square, then don't compute anything outside the circle. This circle through the slices forms a cylinder. GetCylinderClipV.GetCylinderClip() -> int C++: virtual int GetCylinderClip() If the data in each slice is only contained within a circle circumscribed within the slice, and the slice is square, then don't compute anything outside the circle. This circle through the slices forms a cylinder. CylinderClipOnV.CylinderClipOn() C++: virtual void CylinderClipOn() If the data in each slice is only contained within a circle circumscribed within the slice, and the slice is square, then don't compute anything outside the circle. This circle through the slices forms a cylinder. CylinderClipOffV.CylinderClipOff() C++: virtual void CylinderClipOff() If the data in each slice is only contained within a circle circumscribed within the slice, and the slice is square, then don't compute anything outside the circle. This circle through the slices forms a cylinder. GetLastUpdateTimeInSecondsV.GetLastUpdateTimeInSeconds() -> float C++: virtual float GetLastUpdateTimeInSeconds() Get the time required for the last update in seconds or cpu seconds GetLastUpdateTimeInCPUSecondsV.GetLastUpdateTimeInCPUSeconds() -> float C++: virtual float GetLastUpdateTimeInCPUSeconds() Get the time required for the last update in seconds or cpu seconds GetUseCylinderClipV.GetUseCylinderClip() -> int C++: virtual int GetUseCylinderClip() GetCircleLimitsV.GetCircleLimits() -> (int, ...) C++: int *GetCircleLimits() SetZeroNormalThresholdV.SetZeroNormalThreshold(float) C++: void SetZeroNormalThreshold(float v) Set / Get the ZeroNormalThreshold - this defines the minimum magnitude of a gradient that is considered sufficient to define a direction. Gradients with magnitudes at or less than this value are given a "zero normal" index. These are handled specially in the shader, and you can set the intensity of light for these zero normals in the gradient shader. GetZeroNormalThresholdV.GetZeroNormalThreshold() -> float C++: virtual float GetZeroNormalThreshold() Set / Get the ZeroNormalThreshold - this defines the minimum magnitude of a gradient that is considered sufficient to define a direction. Gradients with magnitudes at or less than this value are given a "zero normal" index. These are handled specially in the shader, and you can set the intensity of light for these zero normals in the gradient shader. SetZeroPadV.SetZeroPad(int) C++: virtual void SetZeroPad(int _arg) Assume that the data value outside the volume is zero when computing normals. GetZeroPadMinValueV.GetZeroPadMinValue() -> int C++: virtual int GetZeroPadMinValue() Assume that the data value outside the volume is zero when computing normals. GetZeroPadMaxValueV.GetZeroPadMaxValue() -> int C++: virtual int GetZeroPadMaxValue() Assume that the data value outside the volume is zero when computing normals. GetZeroPadV.GetZeroPad() -> int C++: virtual int GetZeroPad() Assume that the data value outside the volume is zero when computing normals. ZeroPadOnV.ZeroPadOn() C++: virtual void ZeroPadOn() Assume that the data value outside the volume is zero when computing normals. ZeroPadOffV.ZeroPadOff() C++: virtual void ZeroPadOff() Assume that the data value outside the volume is zero when computing normals. GetInputSizeV.GetInputSize() -> (int, int, int) C++: int *GetInputSize() GetInputAspectV.GetInputAspect() -> (float, float, float) C++: float *GetInputAspect() vtkImageDatavtkEncodedGradientShaderVTK_MAX_SHADING_TABLESvtkRenderingVolumePython.vtkEncodedGradientShadervtkEncodedGradientShader - Compute shading tables for encoded normals. Superclass: vtkObject vtkEncodedGradientShader computes shading tables for encoded normals that indicates the amount of diffuse and specular illumination that is received from all light sources at a surface location with that normal. For diffuse illumination this is accurate, but for specular illumination it is approximate for perspective projections since the center view direction is always used as the view direction. Since the shading table is dependent on the volume (for the transformation that must be applied to the normals to put them into world coordinates) there is a shading table per volume. This is necessary because multiple volumes can share a volume mapper. V.SafeDownCast(vtkObjectBase) -> vtkEncodedGradientShader C++: static vtkEncodedGradientShader *SafeDownCast( vtkObjectBase *o) V.NewInstance() -> vtkEncodedGradientShader C++: vtkEncodedGradientShader *NewInstance() SetZeroNormalDiffuseIntensityV.SetZeroNormalDiffuseIntensity(float) C++: virtual void SetZeroNormalDiffuseIntensity(float _arg) Set / Get the intensity diffuse / specular light used for the zero normals. GetZeroNormalDiffuseIntensityMinValueV.GetZeroNormalDiffuseIntensityMinValue() -> float C++: virtual float GetZeroNormalDiffuseIntensityMinValue() Set / Get the intensity diffuse / specular light used for the zero normals. GetZeroNormalDiffuseIntensityMaxValueV.GetZeroNormalDiffuseIntensityMaxValue() -> float C++: virtual float GetZeroNormalDiffuseIntensityMaxValue() Set / Get the intensity diffuse / specular light used for the zero normals. GetZeroNormalDiffuseIntensityV.GetZeroNormalDiffuseIntensity() -> float C++: virtual float GetZeroNormalDiffuseIntensity() Set / Get the intensity diffuse / specular light used for the zero normals. SetZeroNormalSpecularIntensityV.SetZeroNormalSpecularIntensity(float) C++: virtual void SetZeroNormalSpecularIntensity(float _arg) Set / Get the intensity diffuse / specular light used for the zero normals. GetZeroNormalSpecularIntensityMinValueV.GetZeroNormalSpecularIntensityMinValue() -> float C++: virtual float GetZeroNormalSpecularIntensityMinValue() Set / Get the intensity diffuse / specular light used for the zero normals. GetZeroNormalSpecularIntensityMaxValueV.GetZeroNormalSpecularIntensityMaxValue() -> float C++: virtual float GetZeroNormalSpecularIntensityMaxValue() Set / Get the intensity diffuse / specular light used for the zero normals. GetZeroNormalSpecularIntensityV.GetZeroNormalSpecularIntensity() -> float C++: virtual float GetZeroNormalSpecularIntensity() Set / Get the intensity diffuse / specular light used for the zero normals. UpdateShadingTableV.UpdateShadingTable(vtkRenderer, vtkVolume, vtkEncodedGradientEstimator) C++: void UpdateShadingTable(vtkRenderer *ren, vtkVolume *vol, vtkEncodedGradientEstimator *gradest) Cause the shading table to be updated GetRedDiffuseShadingTableV.GetRedDiffuseShadingTable(vtkVolume) -> (float, ...) C++: float *GetRedDiffuseShadingTable(vtkVolume *vol) Get the red/green/blue shading table. GetGreenDiffuseShadingTableV.GetGreenDiffuseShadingTable(vtkVolume) -> (float, ...) C++: float *GetGreenDiffuseShadingTable(vtkVolume *vol) Get the red/green/blue shading table. GetBlueDiffuseShadingTableV.GetBlueDiffuseShadingTable(vtkVolume) -> (float, ...) C++: float *GetBlueDiffuseShadingTable(vtkVolume *vol) Get the red/green/blue shading table. GetRedSpecularShadingTableV.GetRedSpecularShadingTable(vtkVolume) -> (float, ...) C++: float *GetRedSpecularShadingTable(vtkVolume *vol) Get the red/green/blue shading table. GetGreenSpecularShadingTableV.GetGreenSpecularShadingTable(vtkVolume) -> (float, ...) C++: float *GetGreenSpecularShadingTable(vtkVolume *vol) Get the red/green/blue shading table. GetBlueSpecularShadingTableV.GetBlueSpecularShadingTable(vtkVolume) -> (float, ...) C++: float *GetBlueSpecularShadingTable(vtkVolume *vol) Get the red/green/blue shading table. SetActiveComponentV.SetActiveComponent(int) C++: virtual void SetActiveComponent(int _arg) Set the active component for shading. This component's ambient / diffuse / specular / specular power values will be used to create the shading table. The default is 1.0 GetActiveComponentMinValueV.GetActiveComponentMinValue() -> int C++: virtual int GetActiveComponentMinValue() Set the active component for shading. This component's ambient / diffuse / specular / specular power values will be used to create the shading table. The default is 1.0 GetActiveComponentMaxValueV.GetActiveComponentMaxValue() -> int C++: virtual int GetActiveComponentMaxValue() Set the active component for shading. This component's ambient / diffuse / specular / specular power values will be used to create the shading table. The default is 1.0 GetActiveComponentV.GetActiveComponent() -> int C++: virtual int GetActiveComponent() Set the active component for shading. This component's ambient / diffuse / specular / specular power values will be used to create the shading table. The default is 1.0 vtkRenderervtkVolumevtkFiniteDifferenceGradientEstimatorvtkRenderingVolumePython.vtkFiniteDifferenceGradientEstimatorvtkFiniteDifferenceGradientEstimator - Use finite differences to estimate gradient. Superclass: vtkEncodedGradientEstimator vtkFiniteDifferenceGradientEstimator is a concrete subclass of vtkEncodedGradientEstimator that uses a central differences technique to estimate the gradient. The gradient at some sample location (x,y,z) would be estimated by: nx = (f(x-dx,y,z) - f(x+dx,y,z)) / 2*dx; ny = (f(x,y-dy,z) - f(x,y+dy,z)) / 2*dy; nz = (f(x,y,z-dz) - f(x,y,z+dz)) / 2*dz; This value is normalized to determine a unit direction vector and a magnitude. The normal is computed in voxel space, and dx = dy = dz = SampleSpacingInVoxels. A scaling factor is applied to convert this normal from voxel space to world coordinates. @sa vtkEncodedGradientEstimator V.SafeDownCast(vtkObjectBase) -> vtkFiniteDifferenceGradientEstimator C++: static vtkFiniteDifferenceGradientEstimator *SafeDownCast( vtkObjectBase *o) V.NewInstance() -> vtkFiniteDifferenceGradientEstimator C++: vtkFiniteDifferenceGradientEstimator *NewInstance() SetSampleSpacingInVoxelsV.SetSampleSpacingInVoxels(int) C++: virtual void SetSampleSpacingInVoxels(int _arg) Set/Get the spacing between samples for the finite differences method used to compute the normal. This spacing is in voxel units. GetSampleSpacingInVoxelsV.GetSampleSpacingInVoxels() -> int C++: virtual int GetSampleSpacingInVoxels() Set/Get the spacing between samples for the finite differences method used to compute the normal. This spacing is in voxel units. vtkFixedPointRayCastImagevtkRenderingVolumePython.vtkFixedPointRayCastImagevtkFixedPointRayCastImage - helper class for a ray cast image Superclass: vtkObject This is a helper class for storing the ray cast image including the underlying data and the size of the image. This class is not intended to be used directly - just as an internal class in the vtkFixedPointVolumeRayCastMapper so that multiple mappers can share the same image. This class also stored the ZBuffer (if necessary due to intermixed geometry). Perhaps this class could be generalized in the future to be used for other ray cast methods other than the fixed point method. @sa vtkFixedPointVolumeRayCastMapper V.SafeDownCast(vtkObjectBase) -> vtkFixedPointRayCastImage C++: static vtkFixedPointRayCastImage *SafeDownCast( vtkObjectBase *o) V.NewInstance() -> vtkFixedPointRayCastImage C++: vtkFixedPointRayCastImage *NewInstance() GetImageV.GetImage() -> (int, ...) C++: unsigned short *GetImage() Get the internal storage for the image. It is a pointer to unsigned short with four components (RGBA) per pixel. This memory is allocated when the AllocateImage method is called. SetImageViewportSizeV.SetImageViewportSize(int, int) C++: void SetImageViewportSize(int, int) V.SetImageViewportSize((int, int)) C++: void SetImageViewportSize(int a[2]) GetImageViewportSizeV.GetImageViewportSize() -> (int, int) C++: int *GetImageViewportSize() Set / Get the ImageViewportSize. This is the size of the whole viewport in pixels. SetImageMemorySizeV.SetImageMemorySize(int, int) C++: void SetImageMemorySize(int, int) V.SetImageMemorySize((int, int)) C++: void SetImageMemorySize(int a[2]) GetImageMemorySizeV.GetImageMemorySize() -> (int, int) C++: int *GetImageMemorySize() Set / Get the ImageMemorySize. This is the size in pixels of the Image ivar. This will be a power of two in order to ensure that the texture can be rendered by graphics hardware that requires power of two textures. SetImageInUseSizeV.SetImageInUseSize(int, int) C++: void SetImageInUseSize(int, int) V.SetImageInUseSize((int, int)) C++: void SetImageInUseSize(int a[2]) GetImageInUseSizeV.GetImageInUseSize() -> (int, int) C++: int *GetImageInUseSize() Set / Get the size of the image we are actually using. As long as the memory size is big enough, but not too big, we won't bother deleting and re-allocated, we'll just continue to use the memory size we have. This size will always be equal to or less than the ImageMemorySize. SetImageOriginV.SetImageOrigin(int, int) C++: void SetImageOrigin(int, int) V.SetImageOrigin((int, int)) C++: void SetImageOrigin(int a[2]) GetImageOriginV.GetImageOrigin() -> (int, int) C++: int *GetImageOrigin() Set / Get the origin of the image. This is the starting pixel within the whole viewport that our Image starts on. That is, we could be generating just a subregion of the whole viewport due to the fact that our volume occupies only a portion of the viewport. The Image pixels will start from this location. SetImageSampleDistanceV.SetImageSampleDistance(float) C++: virtual void SetImageSampleDistance(float _arg) Set / Get the ImageSampleDistance that will be used for rendering. This is a copy of the value stored in the mapper. It is stored here for sharing between all mappers that are participating in the creation of this image. GetImageSampleDistanceV.GetImageSampleDistance() -> float C++: virtual float GetImageSampleDistance() Set / Get the ImageSampleDistance that will be used for rendering. This is a copy of the value stored in the mapper. It is stored here for sharing between all mappers that are participating in the creation of this image. AllocateImageV.AllocateImage() C++: void AllocateImage() Call this method once the ImageMemorySize has been set the allocate the image. If an image already exists, it will be deleted first. ClearImageV.ClearImage() C++: void ClearImage() Clear the image to (0,0,0,0) for each pixel. SetZBufferSizeV.SetZBufferSize(int, int) C++: void SetZBufferSize(int, int) V.SetZBufferSize((int, int)) C++: void SetZBufferSize(int a[2]) GetZBufferSizeV.GetZBufferSize() -> (int, int) C++: int *GetZBufferSize() Set / Get the size of the ZBuffer in pixels. The zbuffer will be captured for the region of the screen covered by the ImageInUseSize image. However, due to subsampling, the size of the ImageInUseSize image may be smaller than this ZBuffer image which will be captured at screen resolution. SetZBufferOriginV.SetZBufferOrigin(int, int) C++: void SetZBufferOrigin(int, int) V.SetZBufferOrigin((int, int)) C++: void SetZBufferOrigin(int a[2]) GetZBufferOriginV.GetZBufferOrigin() -> (int, int) C++: int *GetZBufferOrigin() Set / Get the origin of the ZBuffer. This is the distance from the lower left corner of the viewport where the ZBuffer started (multiply the ImageOrigin by the ImageSampleDistance) This is the pixel location on the full resolution viewport where the ZBuffer capture will start. These values are used to convert the (x,y) pixel location within the ImageInUseSize image into a ZBuffer location. SetUseZBufferV.SetUseZBuffer(int) C++: virtual void SetUseZBuffer(int _arg) The UseZBuffer flag indicates whether the ZBuffer is in use. The ZBuffer is captured and used when IntermixIntersectingGeometry is on in the mapper, and when there are props that have been rendered before the current volume. GetUseZBufferMinValueV.GetUseZBufferMinValue() -> int C++: virtual int GetUseZBufferMinValue() The UseZBuffer flag indicates whether the ZBuffer is in use. The ZBuffer is captured and used when IntermixIntersectingGeometry is on in the mapper, and when there are props that have been rendered before the current volume. GetUseZBufferMaxValueV.GetUseZBufferMaxValue() -> int C++: virtual int GetUseZBufferMaxValue() The UseZBuffer flag indicates whether the ZBuffer is in use. The ZBuffer is captured and used when IntermixIntersectingGeometry is on in the mapper, and when there are props that have been rendered before the current volume. GetUseZBufferV.GetUseZBuffer() -> int C++: virtual int GetUseZBuffer() The UseZBuffer flag indicates whether the ZBuffer is in use. The ZBuffer is captured and used when IntermixIntersectingGeometry is on in the mapper, and when there are props that have been rendered before the current volume. UseZBufferOnV.UseZBufferOn() C++: virtual void UseZBufferOn() The UseZBuffer flag indicates whether the ZBuffer is in use. The ZBuffer is captured and used when IntermixIntersectingGeometry is on in the mapper, and when there are props that have been rendered before the current volume. UseZBufferOffV.UseZBufferOff() C++: virtual void UseZBufferOff() The UseZBuffer flag indicates whether the ZBuffer is in use. The ZBuffer is captured and used when IntermixIntersectingGeometry is on in the mapper, and when there are props that have been rendered before the current volume. GetZBufferValueV.GetZBufferValue(int, int) -> float C++: float GetZBufferValue(int x, int y) Get the ZBuffer value corresponding to location (x,y) where (x,y) are indexing into the ImageInUse image. This must be converted to the zbuffer image coordinates. Nearest neighbor value is returned. If UseZBuffer is off, then 1.0 is always returned. GetZBufferV.GetZBuffer() -> (float, ...) C++: float *GetZBuffer() Get the ZBuffer. The size of the ZBuffer should be specific with SetZBufferSize, and AllocateZBuffer method should be called before getting the ZBuffer. AllocateZBufferV.AllocateZBuffer() C++: void AllocateZBuffer() vtkFixedPointVolumeRayCastCompositeGOHelpervtkRenderingVolumePython.vtkFixedPointVolumeRayCastCompositeGOHelpervtkFixedPointVolumeRayCastCompositeGOHelper - A helper that generates composite images for the volume ray cast mapper Superclass: vtkFixedPointVolumeRayCastHelper This is one of the helper classes for the vtkFixedPointVolumeRayCastMapper. It will generate composite images using an alpha blending operation. This class should not be used directly, it is a helper class for the mapper and has no user-level API. @sa vtkFixedPointVolumeRayCastMapper V.SafeDownCast(vtkObjectBase) -> vtkFixedPointVolumeRayCastCompositeGOHelper C++: static vtkFixedPointVolumeRayCastCompositeGOHelper *SafeDownCast( vtkObjectBase *o) V.NewInstance() -> vtkFixedPointVolumeRayCastCompositeGOHelper C++: vtkFixedPointVolumeRayCastCompositeGOHelper *NewInstance() GenerateImageV.GenerateImage(int, int, vtkVolume, vtkFixedPointVolumeRayCastMapper) C++: void GenerateImage(int threadID, int threadCount, vtkVolume *vol, vtkFixedPointVolumeRayCastMapper *mapper) override; vtkFixedPointVolumeRayCastHelpervtkFixedPointVolumeRayCastMappervtkFixedPointVolumeRayCastCompositeGOShadeHelpervtkRenderingVolumePython.vtkFixedPointVolumeRayCastCompositeGOShadeHelpervtkFixedPointVolumeRayCastCompositeGOShadeHelper - A helper that generates composite images for the volume ray cast mapper Superclass: vtkFixedPointVolumeRayCastHelper This is one of the helper classes for the vtkFixedPointVolumeRayCastMapper. It will generate composite images using an alpha blending operation. This class should not be used directly, it is a helper class for the mapper and has no user-level API. @sa vtkFixedPointVolumeRayCastMapper V.SafeDownCast(vtkObjectBase) -> vtkFixedPointVolumeRayCastCompositeGOShadeHelper C++: static vtkFixedPointVolumeRayCastCompositeGOShadeHelper *SafeDownCast( vtkObjectBase *o) V.NewInstance() -> vtkFixedPointVolumeRayCastCompositeGOShadeHelper C++: vtkFixedPointVolumeRayCastCompositeGOShadeHelper *NewInstance( ) vtkFixedPointVolumeRayCastCompositeHelpervtkRenderingVolumePython.vtkFixedPointVolumeRayCastCompositeHelpervtkFixedPointVolumeRayCastCompositeHelper - A helper that generates composite images for the volume ray cast mapper Superclass: vtkFixedPointVolumeRayCastHelper This is one of the helper classes for the vtkFixedPointVolumeRayCastMapper. It will generate composite images using an alpha blending operation. This class should not be used directly, it is a helper class for the mapper and has no user-level API. @sa vtkFixedPointVolumeRayCastMapper V.SafeDownCast(vtkObjectBase) -> vtkFixedPointVolumeRayCastCompositeHelper C++: static vtkFixedPointVolumeRayCastCompositeHelper *SafeDownCast( vtkObjectBase *o) V.NewInstance() -> vtkFixedPointVolumeRayCastCompositeHelper C++: vtkFixedPointVolumeRayCastCompositeHelper *NewInstance() vtkFixedPointVolumeRayCastCompositeShadeHelpervtkRenderingVolumePython.vtkFixedPointVolumeRayCastCompositeShadeHelpervtkFixedPointVolumeRayCastCompositeShadeHelper - A helper that generates composite images for the volume ray cast mapper Superclass: vtkFixedPointVolumeRayCastHelper This is one of the helper classes for the vtkFixedPointVolumeRayCastMapper. It will generate composite images using an alpha blending operation. This class should not be used directly, it is a helper class for the mapper and has no user-level API. @sa vtkFixedPointVolumeRayCastMapper V.SafeDownCast(vtkObjectBase) -> vtkFixedPointVolumeRayCastCompositeShadeHelper C++: static vtkFixedPointVolumeRayCastCompositeShadeHelper *SafeDownCast( vtkObjectBase *o) V.NewInstance() -> vtkFixedPointVolumeRayCastCompositeShadeHelper C++: vtkFixedPointVolumeRayCastCompositeShadeHelper *NewInstance() vtkRenderingVolumePython.vtkFixedPointVolumeRayCastHelpervtkFixedPointVolumeRayCastHelper - An abstract helper that generates images for the volume ray cast mapper Superclass: vtkObject This is the abstract superclass of all helper classes for the vtkFixedPointVolumeRayCastMapper. This class should not be used directly. @sa vtkFixedPointVolumeRayCastMapper V.SafeDownCast(vtkObjectBase) -> vtkFixedPointVolumeRayCastHelper C++: static vtkFixedPointVolumeRayCastHelper *SafeDownCast( vtkObjectBase *o) V.NewInstance() -> vtkFixedPointVolumeRayCastHelper C++: vtkFixedPointVolumeRayCastHelper *NewInstance() V.GenerateImage(int, int, vtkVolume, vtkFixedPointVolumeRayCastMapper) C++: virtual void GenerateImage(int, int, vtkVolume *, vtkFixedPointVolumeRayCastMapper *) vtkFixedPointVolumeRayCastMIPHelpervtkRenderingVolumePython.vtkFixedPointVolumeRayCastMIPHelpervtkFixedPointVolumeRayCastMIPHelper - A helper that generates MIP images for the volume ray cast mapper Superclass: vtkFixedPointVolumeRayCastHelper This is one of the helper classes for the vtkFixedPointVolumeRayCastMapper. It will generate maximum intensity images. This class should not be used directly, it is a helper class for the mapper and has no user-level API. @sa vtkFixedPointVolumeRayCastMapper V.SafeDownCast(vtkObjectBase) -> vtkFixedPointVolumeRayCastMIPHelper C++: static vtkFixedPointVolumeRayCastMIPHelper *SafeDownCast( vtkObjectBase *o) V.NewInstance() -> vtkFixedPointVolumeRayCastMIPHelper C++: vtkFixedPointVolumeRayCastMIPHelper *NewInstance() VTKKW_FP_SHIFTVTKKW_FPMM_SHIFTVTKKW_FP_MASKVTKKW_FP_SCALEvtkRenderingVolumePython.vtkFixedPointVolumeRayCastMappervtkFixedPointVolumeRayCastMapper - A fixed point mapper for volumes Superclass: vtkVolumeMapper This is a software ray caster for rendering volumes in vtkImageData. It works with all input data types and up to four components. It performs composite or MIP rendering, and can be intermixed with geometric data. Space leaping is used to speed up the rendering process. In addition, calculation are performed in 15 bit fixed point precision. This mapper is threaded, and will interleave scan lines across processors. WARNING: This ray caster may not produce consistent results when the number of threads exceeds 1. The class warns if the number of threads > 1. The differences may be subtle. Applications should decide if the trade-off in performance is worth the lack of consistency. Other limitations of this ray caster include that: - it does not do isosurface ray casting - it does only interpolate before classify compositing - it does only maximum scalar value MIP This mapper handles all data type from unsigned char through double. However, some of the internal calcultions are performed in float and therefore even the full float range may cause problems for this mapper (both in scalar data values and in spacing between samples). Space leaping is performed by creating a sub-sampled volume. 4x4x4 cells in the original volume are represented by a min, max, and combined gradient and flag value. The min max volume has three unsigned shorts per 4x4x4 group of cells from the original volume - one reprenting the minimum scalar index (the scalar value adjusted to fit in the 15 bit range), the maximum scalar index, and a third unsigned short which is both the maximum gradient opacity in the neighborhood (an unsigned char) and the flag that is filled in for the current lookup tables to indicate whether this region can be skipped. @sa vtkVolumeMapper V.SafeDownCast(vtkObjectBase) -> vtkFixedPointVolumeRayCastMapper C++: static vtkFixedPointVolumeRayCastMapper *SafeDownCast( vtkObjectBase *o) V.NewInstance() -> vtkFixedPointVolumeRayCastMapper C++: vtkFixedPointVolumeRayCastMapper *NewInstance() SetSampleDistanceV.SetSampleDistance(float) C++: virtual void SetSampleDistance(float _arg) Set/Get the distance between samples used for rendering when AutoAdjustSampleDistances is off, or when this mapper has more than 1 second allocated to it for rendering. GetSampleDistanceV.GetSampleDistance() -> float C++: virtual float GetSampleDistance() Set/Get the distance between samples used for rendering when AutoAdjustSampleDistances is off, or when this mapper has more than 1 second allocated to it for rendering. SetInteractiveSampleDistanceV.SetInteractiveSampleDistance(float) C++: virtual void SetInteractiveSampleDistance(float _arg) Set/Get the distance between samples when interactive rendering is happening. In this case, interactive is defined as this volume mapper having less than 1 second allocated for rendering. When AutoAdjustSampleDistance is On, and the allocated render time is less than 1 second, then this InteractiveSampleDistance will be used instead of the SampleDistance above. GetInteractiveSampleDistanceV.GetInteractiveSampleDistance() -> float C++: virtual float GetInteractiveSampleDistance() Set/Get the distance between samples when interactive rendering is happening. In this case, interactive is defined as this volume mapper having less than 1 second allocated for rendering. When AutoAdjustSampleDistance is On, and the allocated render time is less than 1 second, then this InteractiveSampleDistance will be used instead of the SampleDistance above. V.SetImageSampleDistance(float) C++: virtual void SetImageSampleDistance(float _arg) Sampling distance in the XY image dimensions. Default value of 1 meaning 1 ray cast per pixel. If set to 0.5, 4 rays will be cast per pixel. If set to 2.0, 1 ray will be cast for every 4 (2 by 2) pixels. This value will be adjusted to meet a desired frame rate when AutoAdjustSampleDistances is on. GetImageSampleDistanceMinValueV.GetImageSampleDistanceMinValue() -> float C++: virtual float GetImageSampleDistanceMinValue() Sampling distance in the XY image dimensions. Default value of 1 meaning 1 ray cast per pixel. If set to 0.5, 4 rays will be cast per pixel. If set to 2.0, 1 ray will be cast for every 4 (2 by 2) pixels. This value will be adjusted to meet a desired frame rate when AutoAdjustSampleDistances is on. GetImageSampleDistanceMaxValueV.GetImageSampleDistanceMaxValue() -> float C++: virtual float GetImageSampleDistanceMaxValue() Sampling distance in the XY image dimensions. Default value of 1 meaning 1 ray cast per pixel. If set to 0.5, 4 rays will be cast per pixel. If set to 2.0, 1 ray will be cast for every 4 (2 by 2) pixels. This value will be adjusted to meet a desired frame rate when AutoAdjustSampleDistances is on. V.GetImageSampleDistance() -> float C++: virtual float GetImageSampleDistance() Sampling distance in the XY image dimensions. Default value of 1 meaning 1 ray cast per pixel. If set to 0.5, 4 rays will be cast per pixel. If set to 2.0, 1 ray will be cast for every 4 (2 by 2) pixels. This value will be adjusted to meet a desired frame rate when AutoAdjustSampleDistances is on. SetMinimumImageSampleDistanceV.SetMinimumImageSampleDistance(float) C++: virtual void SetMinimumImageSampleDistance(float _arg) This is the minimum image sample distance allow when the image sample distance is being automatically adjusted. GetMinimumImageSampleDistanceMinValueV.GetMinimumImageSampleDistanceMinValue() -> float C++: virtual float GetMinimumImageSampleDistanceMinValue() This is the minimum image sample distance allow when the image sample distance is being automatically adjusted. GetMinimumImageSampleDistanceMaxValueV.GetMinimumImageSampleDistanceMaxValue() -> float C++: virtual float GetMinimumImageSampleDistanceMaxValue() This is the minimum image sample distance allow when the image sample distance is being automatically adjusted. GetMinimumImageSampleDistanceV.GetMinimumImageSampleDistance() -> float C++: virtual float GetMinimumImageSampleDistance() This is the minimum image sample distance allow when the image sample distance is being automatically adjusted. SetMaximumImageSampleDistanceV.SetMaximumImageSampleDistance(float) C++: virtual void SetMaximumImageSampleDistance(float _arg) This is the maximum image sample distance allow when the image sample distance is being automatically adjusted. GetMaximumImageSampleDistanceMinValueV.GetMaximumImageSampleDistanceMinValue() -> float C++: virtual float GetMaximumImageSampleDistanceMinValue() This is the maximum image sample distance allow when the image sample distance is being automatically adjusted. GetMaximumImageSampleDistanceMaxValueV.GetMaximumImageSampleDistanceMaxValue() -> float C++: virtual float GetMaximumImageSampleDistanceMaxValue() This is the maximum image sample distance allow when the image sample distance is being automatically adjusted. GetMaximumImageSampleDistanceV.GetMaximumImageSampleDistance() -> float C++: virtual float GetMaximumImageSampleDistance() This is the maximum image sample distance allow when the image sample distance is being automatically adjusted. SetAutoAdjustSampleDistancesV.SetAutoAdjustSampleDistances(int) C++: virtual void SetAutoAdjustSampleDistances(int _arg) If AutoAdjustSampleDistances is on, the the ImageSampleDistance and the SampleDistance will be varied to achieve the allocated render time of this prop (controlled by the desired update rate and any culling in use). If this is an interactive render (more than 1 frame per second) the SampleDistance will be increased, otherwise it will not be altered (a binary decision, as opposed to the ImageSampleDistance which will vary continuously). GetAutoAdjustSampleDistancesMinValueV.GetAutoAdjustSampleDistancesMinValue() -> int C++: virtual int GetAutoAdjustSampleDistancesMinValue() If AutoAdjustSampleDistances is on, the the ImageSampleDistance and the SampleDistance will be varied to achieve the allocated render time of this prop (controlled by the desired update rate and any culling in use). If this is an interactive render (more than 1 frame per second) the SampleDistance will be increased, otherwise it will not be altered (a binary decision, as opposed to the ImageSampleDistance which will vary continuously). GetAutoAdjustSampleDistancesMaxValueV.GetAutoAdjustSampleDistancesMaxValue() -> int C++: virtual int GetAutoAdjustSampleDistancesMaxValue() If AutoAdjustSampleDistances is on, the the ImageSampleDistance and the SampleDistance will be varied to achieve the allocated render time of this prop (controlled by the desired update rate and any culling in use). If this is an interactive render (more than 1 frame per second) the SampleDistance will be increased, otherwise it will not be altered (a binary decision, as opposed to the ImageSampleDistance which will vary continuously). GetAutoAdjustSampleDistancesV.GetAutoAdjustSampleDistances() -> int C++: virtual int GetAutoAdjustSampleDistances() If AutoAdjustSampleDistances is on, the the ImageSampleDistance and the SampleDistance will be varied to achieve the allocated render time of this prop (controlled by the desired update rate and any culling in use). If this is an interactive render (more than 1 frame per second) the SampleDistance will be increased, otherwise it will not be altered (a binary decision, as opposed to the ImageSampleDistance which will vary continuously). AutoAdjustSampleDistancesOnV.AutoAdjustSampleDistancesOn() C++: virtual void AutoAdjustSampleDistancesOn() If AutoAdjustSampleDistances is on, the the ImageSampleDistance and the SampleDistance will be varied to achieve the allocated render time of this prop (controlled by the desired update rate and any culling in use). If this is an interactive render (more than 1 frame per second) the SampleDistance will be increased, otherwise it will not be altered (a binary decision, as opposed to the ImageSampleDistance which will vary continuously). AutoAdjustSampleDistancesOffV.AutoAdjustSampleDistancesOff() C++: virtual void AutoAdjustSampleDistancesOff() If AutoAdjustSampleDistances is on, the the ImageSampleDistance and the SampleDistance will be varied to achieve the allocated render time of this prop (controlled by the desired update rate and any culling in use). If this is an interactive render (more than 1 frame per second) the SampleDistance will be increased, otherwise it will not be altered (a binary decision, as opposed to the ImageSampleDistance which will vary continuously). SetLockSampleDistanceToInputSpacingV.SetLockSampleDistanceToInputSpacing(int) C++: virtual void SetLockSampleDistanceToInputSpacing(int _arg) Automatically compute the sample distance from the data spacing. When the number of voxels is 8, the sample distance will be roughly 1/200 the average voxel size. The distance will grow proportionally to numVoxels^(1/3) until it reaches 1/2 average voxel size when number of voxels is 1E6. Note that ScalarOpacityUnitDistance is still taken into account and if different than 1, will effect the sample distance. GetLockSampleDistanceToInputSpacingMinValueV.GetLockSampleDistanceToInputSpacingMinValue() -> int C++: virtual int GetLockSampleDistanceToInputSpacingMinValue() Automatically compute the sample distance from the data spacing. When the number of voxels is 8, the sample distance will be roughly 1/200 the average voxel size. The distance will grow proportionally to numVoxels^(1/3) until it reaches 1/2 average voxel size when number of voxels is 1E6. Note that ScalarOpacityUnitDistance is still taken into account and if different than 1, will effect the sample distance. GetLockSampleDistanceToInputSpacingMaxValueV.GetLockSampleDistanceToInputSpacingMaxValue() -> int C++: virtual int GetLockSampleDistanceToInputSpacingMaxValue() Automatically compute the sample distance from the data spacing. When the number of voxels is 8, the sample distance will be roughly 1/200 the average voxel size. The distance will grow proportionally to numVoxels^(1/3) until it reaches 1/2 average voxel size when number of voxels is 1E6. Note that ScalarOpacityUnitDistance is still taken into account and if different than 1, will effect the sample distance. GetLockSampleDistanceToInputSpacingV.GetLockSampleDistanceToInputSpacing() -> int C++: virtual int GetLockSampleDistanceToInputSpacing() Automatically compute the sample distance from the data spacing. When the number of voxels is 8, the sample distance will be roughly 1/200 the average voxel size. The distance will grow proportionally to numVoxels^(1/3) until it reaches 1/2 average voxel size when number of voxels is 1E6. Note that ScalarOpacityUnitDistance is still taken into account and if different than 1, will effect the sample distance. LockSampleDistanceToInputSpacingOnV.LockSampleDistanceToInputSpacingOn() C++: virtual void LockSampleDistanceToInputSpacingOn() Automatically compute the sample distance from the data spacing. When the number of voxels is 8, the sample distance will be roughly 1/200 the average voxel size. The distance will grow proportionally to numVoxels^(1/3) until it reaches 1/2 average voxel size when number of voxels is 1E6. Note that ScalarOpacityUnitDistance is still taken into account and if different than 1, will effect the sample distance. LockSampleDistanceToInputSpacingOffV.LockSampleDistanceToInputSpacingOff() C++: virtual void LockSampleDistanceToInputSpacingOff() Automatically compute the sample distance from the data spacing. When the number of voxels is 8, the sample distance will be roughly 1/200 the average voxel size. The distance will grow proportionally to numVoxels^(1/3) until it reaches 1/2 average voxel size when number of voxels is 1E6. Note that ScalarOpacityUnitDistance is still taken into account and if different than 1, will effect the sample distance. V.SetNumberOfThreads(int) C++: void SetNumberOfThreads(int num) Set/Get the number of threads to use. This by default is equal to the number of available processors detected. WARNING: If number of threads > 1, results may not be consistent. V.GetNumberOfThreads() -> int C++: int GetNumberOfThreads() Set/Get the number of threads to use. This by default is equal to the number of available processors detected. WARNING: If number of threads > 1, results may not be consistent. SetIntermixIntersectingGeometryV.SetIntermixIntersectingGeometry(int) C++: virtual void SetIntermixIntersectingGeometry(int _arg) If IntermixIntersectingGeometry is turned on, the zbuffer will be captured and used to limit the traversal of the rays. GetIntermixIntersectingGeometryMinValueV.GetIntermixIntersectingGeometryMinValue() -> int C++: virtual int GetIntermixIntersectingGeometryMinValue() If IntermixIntersectingGeometry is turned on, the zbuffer will be captured and used to limit the traversal of the rays. GetIntermixIntersectingGeometryMaxValueV.GetIntermixIntersectingGeometryMaxValue() -> int C++: virtual int GetIntermixIntersectingGeometryMaxValue() If IntermixIntersectingGeometry is turned on, the zbuffer will be captured and used to limit the traversal of the rays. GetIntermixIntersectingGeometryV.GetIntermixIntersectingGeometry() -> int C++: virtual int GetIntermixIntersectingGeometry() If IntermixIntersectingGeometry is turned on, the zbuffer will be captured and used to limit the traversal of the rays. IntermixIntersectingGeometryOnV.IntermixIntersectingGeometryOn() C++: virtual void IntermixIntersectingGeometryOn() If IntermixIntersectingGeometry is turned on, the zbuffer will be captured and used to limit the traversal of the rays. IntermixIntersectingGeometryOffV.IntermixIntersectingGeometryOff() C++: virtual void IntermixIntersectingGeometryOff() If IntermixIntersectingGeometry is turned on, the zbuffer will be captured and used to limit the traversal of the rays. ComputeRequiredImageSampleDistanceV.ComputeRequiredImageSampleDistance(float, vtkRenderer) -> float C++: float ComputeRequiredImageSampleDistance(float desiredTime, vtkRenderer *ren) V.ComputeRequiredImageSampleDistance(float, vtkRenderer, vtkVolume) -> float C++: float ComputeRequiredImageSampleDistance(float desiredTime, vtkRenderer *ren, vtkVolume *vol) What is the image sample distance required to achieve the desired time? A version of this method is provided that does not require the volume argument since if you are using an LODProp3D you may not know this information. If you use this version you must be certain that the ray cast mapper is only used for one volume (and not shared among multiple volumes) RenderV.Render(vtkRenderer, vtkVolume) C++: void Render(vtkRenderer *, vtkVolume *) override; WARNING: INTERNAL METHOD - NOT INTENDED FOR GENERAL USE Initialize rendering for this volume. ToFixedPointPositionV.ToFixedPointPosition(float) -> int C++: unsigned int ToFixedPointPosition(float val) V.ToFixedPointPosition([float, float, float], [int, int, int]) C++: void ToFixedPointPosition(float in[3], unsigned int out[3]) ToFixedPointDirectionV.ToFixedPointDirection(float) -> int C++: unsigned int ToFixedPointDirection(float dir) V.ToFixedPointDirection([float, float, float], [int, int, int]) C++: void ToFixedPointDirection(float in[3], unsigned int out[3]) FixedPointIncrementV.FixedPointIncrement([int, int, int], [int, int, int]) C++: void FixedPointIncrement(unsigned int position[3], unsigned int increment[3]) GetFloatTripleFromPointerV.GetFloatTripleFromPointer([float, float, float], [float, ...]) C++: void GetFloatTripleFromPointer(float v[3], float *ptr) GetUIntTripleFromPointerV.GetUIntTripleFromPointer([int, int, int], [int, ...]) C++: void GetUIntTripleFromPointer(unsigned int v[3], unsigned int *ptr) ShiftVectorDownV.ShiftVectorDown([int, int, int], [int, int, int]) C++: void ShiftVectorDown(unsigned int in[3], unsigned int out[3]) CheckMinMaxVolumeFlagV.CheckMinMaxVolumeFlag([int, int, int], int) -> int C++: int CheckMinMaxVolumeFlag(unsigned int pos[3], int c) CheckMIPMinMaxVolumeFlagV.CheckMIPMinMaxVolumeFlag([int, int, int], int, int, int) -> int C++: int CheckMIPMinMaxVolumeFlag(unsigned int pos[3], int c, unsigned short maxIdx, int flip) LookupColorUCV.LookupColorUC([int, ...], [int, ...], int, [int, int, int, int]) C++: void LookupColorUC(unsigned short *colorTable, unsigned short *scalarOpacityTable, unsigned short index, unsigned char color[4]) LookupDependentColorUCV.LookupDependentColorUC([int, ...], [int, ...], [int, int, int, int], int, [int, int, int, int]) C++: void LookupDependentColorUC(unsigned short *colorTable, unsigned short *scalarOpacityTable, unsigned short index[4], int components, unsigned char color[4]) CheckIfCroppedV.CheckIfCropped([int, int, int]) -> int C++: int CheckIfCropped(unsigned int pos[3]) GetRenderWindowV.GetRenderWindow() -> vtkRenderWindow C++: virtual vtkRenderWindow *GetRenderWindow() GetMIPHelperV.GetMIPHelper() -> vtkFixedPointVolumeRayCastMIPHelper C++: virtual vtkFixedPointVolumeRayCastMIPHelper *GetMIPHelper() GetCompositeHelperV.GetCompositeHelper() -> vtkFixedPointVolumeRayCastCompositeHelper C++: virtual vtkFixedPointVolumeRayCastCompositeHelper *GetCompositeHelper( ) GetCompositeGOHelperV.GetCompositeGOHelper() -> vtkFixedPointVolumeRayCastCompositeGOHelper C++: virtual vtkFixedPointVolumeRayCastCompositeGOHelper *GetCompositeGOHelper( ) GetCompositeGOShadeHelperV.GetCompositeGOShadeHelper() -> vtkFixedPointVolumeRayCastCompositeGOShadeHelper C++: virtual vtkFixedPointVolumeRayCastCompositeGOShadeHelper *GetCompositeGOShadeHelper( ) GetCompositeShadeHelperV.GetCompositeShadeHelper() -> vtkFixedPointVolumeRayCastCompositeShadeHelper C++: virtual vtkFixedPointVolumeRayCastCompositeShadeHelper *GetCompositeShadeHelper( ) GetTableShiftV.GetTableShift() -> (float, float, float, float) C++: float *GetTableShift() GetTableScaleV.GetTableScale() -> (float, float, float, float) C++: float *GetTableScale() GetShadingRequiredV.GetShadingRequired() -> int C++: virtual int GetShadingRequired() GetGradientOpacityRequiredV.GetGradientOpacityRequired() -> int C++: virtual int GetGradientOpacityRequired() GetCurrentScalarsV.GetCurrentScalars() -> vtkDataArray C++: virtual vtkDataArray *GetCurrentScalars() GetPreviousScalarsV.GetPreviousScalars() -> vtkDataArray C++: virtual vtkDataArray *GetPreviousScalars() GetRowBoundsV.GetRowBounds() -> (int, ...) C++: int *GetRowBounds() GetColorTableV.GetColorTable(int) -> (int, ...) C++: unsigned short *GetColorTable(int c) GetScalarOpacityTableV.GetScalarOpacityTable(int) -> (int, ...) C++: unsigned short *GetScalarOpacityTable(int c) GetGradientOpacityTableV.GetGradientOpacityTable(int) -> (int, ...) C++: unsigned short *GetGradientOpacityTable(int c) GetVolumeV.GetVolume() -> vtkVolume C++: vtkVolume *GetVolume() GetDiffuseShadingTableV.GetDiffuseShadingTable(int) -> (int, ...) C++: unsigned short *GetDiffuseShadingTable(int c) GetSpecularShadingTableV.GetSpecularShadingTable(int) -> (int, ...) C++: unsigned short *GetSpecularShadingTable(int c) ComputeRayInfoV.ComputeRayInfo(int, int, [int, int, int], [int, int, int], [int, ...]) C++: void ComputeRayInfo(int x, int y, unsigned int pos[3], unsigned int dir[3], unsigned int *numSteps) InitializeRayInfoV.InitializeRayInfo(vtkVolume) C++: void InitializeRayInfo(vtkVolume *vol) ShouldUseNearestNeighborInterpolationV.ShouldUseNearestNeighborInterpolation(vtkVolume) -> int C++: int ShouldUseNearestNeighborInterpolation(vtkVolume *vol) SetRayCastImageV.SetRayCastImage(vtkFixedPointRayCastImage) C++: void SetRayCastImage(vtkFixedPointRayCastImage *) Set / Get the underlying image object. One will be automatically created - only need to set it when using from an AMR mapper which renders multiple times into the same image. GetRayCastImageV.GetRayCastImage() -> vtkFixedPointRayCastImage C++: virtual vtkFixedPointRayCastImage *GetRayCastImage() Set / Get the underlying image object. One will be automatically created - only need to set it when using from an AMR mapper which renders multiple times into the same image. PerImageInitializationV.PerImageInitialization(vtkRenderer, vtkVolume, int, [float, ...], [float, ...], [int, ...]) -> int C++: int PerImageInitialization(vtkRenderer *, vtkVolume *, int, double *, double *, int *) PerVolumeInitializationV.PerVolumeInitialization(vtkRenderer, vtkVolume) C++: void PerVolumeInitialization(vtkRenderer *, vtkVolume *) PerSubVolumeInitializationV.PerSubVolumeInitialization(vtkRenderer, vtkVolume, int) C++: void PerSubVolumeInitialization(vtkRenderer *, vtkVolume *, int) RenderSubVolumeV.RenderSubVolume() C++: void RenderSubVolume() DisplayRenderedImageV.DisplayRenderedImage(vtkRenderer, vtkVolume) C++: void DisplayRenderedImage(vtkRenderer *, vtkVolume *) AbortRenderV.AbortRender() C++: void AbortRender() CreateCanonicalViewV.CreateCanonicalView(vtkVolume, vtkImageData, int, [float, float, float], [float, float, float]) C++: void CreateCanonicalView(vtkVolume *volume, vtkImageData *image, int blend_mode, double viewDirection[3], double viewUp[3]) GetEstimatedRenderTimeV.GetEstimatedRenderTime(vtkRenderer, vtkVolume) -> float C++: float GetEstimatedRenderTime(vtkRenderer *ren, vtkVolume *vol) V.GetEstimatedRenderTime(vtkRenderer) -> float C++: float GetEstimatedRenderTime(vtkRenderer *ren) Get an estimate of the rendering time for a given volume / renderer. Only valid if this mapper has been used to render that volume for that renderer previously. Estimate is good when the viewing parameters have not changed much since that last render. SetFinalColorWindowV.SetFinalColorWindow(float) C++: virtual void SetFinalColorWindow(float _arg) Set/Get the window / level applied to the final color. This allows brightness / contrast adjustments on the final image. window is the width of the window. level is the center of the window. Initial window value is 1.0 Initial level value is 0.5 window cannot be null but can be negative, this way values will be reversed. |window| can be larger than 1.0 level can be any real value. GetFinalColorWindowV.GetFinalColorWindow() -> float C++: virtual float GetFinalColorWindow() Set/Get the window / level applied to the final color. This allows brightness / contrast adjustments on the final image. window is the width of the window. level is the center of the window. Initial window value is 1.0 Initial level value is 0.5 window cannot be null but can be negative, this way values will be reversed. |window| can be larger than 1.0 level can be any real value. SetFinalColorLevelV.SetFinalColorLevel(float) C++: virtual void SetFinalColorLevel(float _arg) Set/Get the window / level applied to the final color. This allows brightness / contrast adjustments on the final image. window is the width of the window. level is the center of the window. Initial window value is 1.0 Initial level value is 0.5 window cannot be null but can be negative, this way values will be reversed. |window| can be larger than 1.0 level can be any real value. GetFinalColorLevelV.GetFinalColorLevel() -> float C++: virtual float GetFinalColorLevel() Set/Get the window / level applied to the final color. This allows brightness / contrast adjustments on the final image. window is the width of the window. level is the center of the window. Initial window value is 1.0 Initial level value is 0.5 window cannot be null but can be negative, this way values will be reversed. |window| can be larger than 1.0 level can be any real value. GetFlipMIPComparisonV.GetFlipMIPComparison() -> int C++: virtual int GetFlipMIPComparison() ReleaseGraphicsResourcesV.ReleaseGraphicsResources(vtkWindow) C++: void ReleaseGraphicsResources(vtkWindow *) override; WARNING: INTERNAL METHOD - NOT INTENDED FOR GENERAL USE Release any graphics resources that are being consumed by this mapper. The parameter window could be used to determine which graphic resources to release. vtkVolumeMappervtkAbstractVolumeMappervtkAbstractMapper3DvtkAbstractMappervtkAlgorithmvtkWindow(i)vtkGPUVolumeRayCastMapperTFRangeTypeBinaryMaskTypeLabelMapMaskTypeSCALARNATIVEvtkRenderingVolumePython.vtkGPUVolumeRayCastMapper.TFRangeTypevtkRenderingVolumePython.vtkGPUVolumeRayCastMappervtkGPUVolumeRayCastMapper - Ray casting performed on the GPU. Superclass: vtkVolumeMapper vtkGPUVolumeRayCastMapper is a volume mapper that performs ray casting on the GPU using fragment programs. V.SafeDownCast(vtkObjectBase) -> vtkGPUVolumeRayCastMapper C++: static vtkGPUVolumeRayCastMapper *SafeDownCast( vtkObjectBase *o) V.NewInstance() -> vtkGPUVolumeRayCastMapper C++: vtkGPUVolumeRayCastMapper *NewInstance() V.SetAutoAdjustSampleDistances(int) C++: virtual void SetAutoAdjustSampleDistances(int _arg) If AutoAdjustSampleDistances is on, the the ImageSampleDistance will be varied to achieve the allocated render time of this prop (controlled by the desired update rate and any culling in use). V.GetAutoAdjustSampleDistancesMinValue() -> int C++: virtual int GetAutoAdjustSampleDistancesMinValue() If AutoAdjustSampleDistances is on, the the ImageSampleDistance will be varied to achieve the allocated render time of this prop (controlled by the desired update rate and any culling in use). V.GetAutoAdjustSampleDistancesMaxValue() -> int C++: virtual int GetAutoAdjustSampleDistancesMaxValue() If AutoAdjustSampleDistances is on, the the ImageSampleDistance will be varied to achieve the allocated render time of this prop (controlled by the desired update rate and any culling in use). V.GetAutoAdjustSampleDistances() -> int C++: virtual int GetAutoAdjustSampleDistances() If AutoAdjustSampleDistances is on, the the ImageSampleDistance will be varied to achieve the allocated render time of this prop (controlled by the desired update rate and any culling in use). V.AutoAdjustSampleDistancesOn() C++: virtual void AutoAdjustSampleDistancesOn() If AutoAdjustSampleDistances is on, the the ImageSampleDistance will be varied to achieve the allocated render time of this prop (controlled by the desired update rate and any culling in use). V.AutoAdjustSampleDistancesOff() C++: virtual void AutoAdjustSampleDistancesOff() If AutoAdjustSampleDistances is on, the the ImageSampleDistance will be varied to achieve the allocated render time of this prop (controlled by the desired update rate and any culling in use). V.SetLockSampleDistanceToInputSpacing(int) C++: virtual void SetLockSampleDistanceToInputSpacing(int _arg) Compute the sample distance from the data spacing. When the number of voxels is 8, the sample distance will be roughly 1/200 the average voxel size. The distance will grow proportionally to numVoxels^(1/3). Off by default. V.GetLockSampleDistanceToInputSpacingMinValue() -> int C++: virtual int GetLockSampleDistanceToInputSpacingMinValue() Compute the sample distance from the data spacing. When the number of voxels is 8, the sample distance will be roughly 1/200 the average voxel size. The distance will grow proportionally to numVoxels^(1/3). Off by default. V.GetLockSampleDistanceToInputSpacingMaxValue() -> int C++: virtual int GetLockSampleDistanceToInputSpacingMaxValue() Compute the sample distance from the data spacing. When the number of voxels is 8, the sample distance will be roughly 1/200 the average voxel size. The distance will grow proportionally to numVoxels^(1/3). Off by default. V.GetLockSampleDistanceToInputSpacing() -> int C++: virtual int GetLockSampleDistanceToInputSpacing() Compute the sample distance from the data spacing. When the number of voxels is 8, the sample distance will be roughly 1/200 the average voxel size. The distance will grow proportionally to numVoxels^(1/3). Off by default. V.LockSampleDistanceToInputSpacingOn() C++: virtual void LockSampleDistanceToInputSpacingOn() Compute the sample distance from the data spacing. When the number of voxels is 8, the sample distance will be roughly 1/200 the average voxel size. The distance will grow proportionally to numVoxels^(1/3). Off by default. V.LockSampleDistanceToInputSpacingOff() C++: virtual void LockSampleDistanceToInputSpacingOff() Compute the sample distance from the data spacing. When the number of voxels is 8, the sample distance will be roughly 1/200 the average voxel size. The distance will grow proportionally to numVoxels^(1/3). Off by default. SetUseJitteringV.SetUseJittering(int) C++: virtual void SetUseJittering(int _arg) If UseJittering is on, each ray traversal direction will be perturbed slightly using a noise-texture to get rid of wood-grain effect. GetUseJitteringMinValueV.GetUseJitteringMinValue() -> int C++: virtual int GetUseJitteringMinValue() If UseJittering is on, each ray traversal direction will be perturbed slightly using a noise-texture to get rid of wood-grain effect. GetUseJitteringMaxValueV.GetUseJitteringMaxValue() -> int C++: virtual int GetUseJitteringMaxValue() If UseJittering is on, each ray traversal direction will be perturbed slightly using a noise-texture to get rid of wood-grain effect. GetUseJitteringV.GetUseJittering() -> int C++: virtual int GetUseJittering() If UseJittering is on, each ray traversal direction will be perturbed slightly using a noise-texture to get rid of wood-grain effect. UseJitteringOnV.UseJitteringOn() C++: virtual void UseJitteringOn() If UseJittering is on, each ray traversal direction will be perturbed slightly using a noise-texture to get rid of wood-grain effect. UseJitteringOffV.UseJitteringOff() C++: virtual void UseJitteringOff() If UseJittering is on, each ray traversal direction will be perturbed slightly using a noise-texture to get rid of wood-grain effect. SetUseDepthPassV.SetUseDepthPass(int) C++: virtual void SetUseDepthPass(int _arg) If UseDepthPass is on, the mapper will use two passes. In the first pass, an isocontour depth buffer will be utilized as starting point for ray-casting hence eliminating traversal on voxels that are not going to participate in final rendering. UseDepthPass requires reasonable contour values to be set which can be set by calling GetDepthPassContourValues() method and using vtkControurValues API. GetUseDepthPassMinValueV.GetUseDepthPassMinValue() -> int C++: virtual int GetUseDepthPassMinValue() If UseDepthPass is on, the mapper will use two passes. In the first pass, an isocontour depth buffer will be utilized as starting point for ray-casting hence eliminating traversal on voxels that are not going to participate in final rendering. UseDepthPass requires reasonable contour values to be set which can be set by calling GetDepthPassContourValues() method and using vtkControurValues API. GetUseDepthPassMaxValueV.GetUseDepthPassMaxValue() -> int C++: virtual int GetUseDepthPassMaxValue() If UseDepthPass is on, the mapper will use two passes. In the first pass, an isocontour depth buffer will be utilized as starting point for ray-casting hence eliminating traversal on voxels that are not going to participate in final rendering. UseDepthPass requires reasonable contour values to be set which can be set by calling GetDepthPassContourValues() method and using vtkControurValues API. GetUseDepthPassV.GetUseDepthPass() -> int C++: virtual int GetUseDepthPass() If UseDepthPass is on, the mapper will use two passes. In the first pass, an isocontour depth buffer will be utilized as starting point for ray-casting hence eliminating traversal on voxels that are not going to participate in final rendering. UseDepthPass requires reasonable contour values to be set which can be set by calling GetDepthPassContourValues() method and using vtkControurValues API. UseDepthPassOnV.UseDepthPassOn() C++: virtual void UseDepthPassOn() If UseDepthPass is on, the mapper will use two passes. In the first pass, an isocontour depth buffer will be utilized as starting point for ray-casting hence eliminating traversal on voxels that are not going to participate in final rendering. UseDepthPass requires reasonable contour values to be set which can be set by calling GetDepthPassContourValues() method and using vtkControurValues API. UseDepthPassOffV.UseDepthPassOff() C++: virtual void UseDepthPassOff() If UseDepthPass is on, the mapper will use two passes. In the first pass, an isocontour depth buffer will be utilized as starting point for ray-casting hence eliminating traversal on voxels that are not going to participate in final rendering. UseDepthPass requires reasonable contour values to be set which can be set by calling GetDepthPassContourValues() method and using vtkControurValues API. GetDepthPassContourValuesV.GetDepthPassContourValues() -> vtkContourValues C++: vtkContourValues *GetDepthPassContourValues() Return handle to contour values container so that values can be set by the application. Contour values will be used only when UseDepthPass is on. V.SetSampleDistance(float) C++: virtual void SetSampleDistance(float _arg) Set/Get the distance between samples used for rendering when AutoAdjustSampleDistances is off, or when this mapper has more than 1 second allocated to it for rendering. Initial value is 1.0. V.GetSampleDistance() -> float C++: virtual float GetSampleDistance() Set/Get the distance between samples used for rendering when AutoAdjustSampleDistances is off, or when this mapper has more than 1 second allocated to it for rendering. Initial value is 1.0. SetMaxMemoryInBytesV.SetMaxMemoryInBytes(int) C++: virtual void SetMaxMemoryInBytes(vtkIdType _arg) Maximum size of the 3D texture in GPU memory. Will default to the size computed from the graphics card. Can be adjusted by the user. GetMaxMemoryInBytesV.GetMaxMemoryInBytes() -> int C++: virtual vtkIdType GetMaxMemoryInBytes() Maximum size of the 3D texture in GPU memory. Will default to the size computed from the graphics card. Can be adjusted by the user. SetMaxMemoryFractionV.SetMaxMemoryFraction(float) C++: virtual void SetMaxMemoryFraction(float _arg) Maximum fraction of the MaxMemoryInBytes that should be used to hold the texture. Valid values are 0.1 to 1.0. GetMaxMemoryFractionMinValueV.GetMaxMemoryFractionMinValue() -> float C++: virtual float GetMaxMemoryFractionMinValue() Maximum fraction of the MaxMemoryInBytes that should be used to hold the texture. Valid values are 0.1 to 1.0. GetMaxMemoryFractionMaxValueV.GetMaxMemoryFractionMaxValue() -> float C++: virtual float GetMaxMemoryFractionMaxValue() Maximum fraction of the MaxMemoryInBytes that should be used to hold the texture. Valid values are 0.1 to 1.0. GetMaxMemoryFractionV.GetMaxMemoryFraction() -> float C++: virtual float GetMaxMemoryFraction() Maximum fraction of the MaxMemoryInBytes that should be used to hold the texture. Valid values are 0.1 to 1.0. SetReportProgressV.SetReportProgress(bool) C++: virtual void SetReportProgress(bool _arg) Tells if the mapper will report intermediate progress. Initial value is true. As the progress works with a GL blocking call (glFinish()), this can be useful for huge dataset but can slow down rendering of small dataset. It should be set to true for big dataset or complex shading and streaming but to false for small datasets. GetReportProgressV.GetReportProgress() -> bool C++: virtual bool GetReportProgress() Tells if the mapper will report intermediate progress. Initial value is true. As the progress works with a GL blocking call (glFinish()), this can be useful for huge dataset but can slow down rendering of small dataset. It should be set to true for big dataset or complex shading and streaming but to false for small datasets. IsRenderSupportedV.IsRenderSupported(vtkRenderWindow, vtkVolumeProperty) -> int C++: virtual int IsRenderSupported(vtkRenderWindow *window, vtkVolumeProperty *property) Based on hardware and properties, we may or may not be able to render using 3D texture mapping. This indicates if 3D texture mapping is supported by the hardware, and if the other extensions necessary to support the specific properties are available. V.CreateCanonicalView(vtkRenderer, vtkVolume, vtkImageData, int, [float, float, float], [float, float, float]) C++: void CreateCanonicalView(vtkRenderer *ren, vtkVolume *volume, vtkImageData *image, int blend_mode, double viewDirection[3], double viewUp[3]) SetMaskInputV.SetMaskInput(vtkImageData) C++: void SetMaskInput(vtkImageData *mask) Optionally, set a mask input. This mask may be a binary mask or a label map. This must be specified via SetMaskType. * If the mask is a binary mask, the volume rendering is confined to regions * within the binary mask. The binary mask is assumed to have a datatype of * UCHAR and values of 255 (inside) and 0 (outside). * The mask may also be a label map. The label map is allowed to contain only * 3 labels (values of 0, 1 and 2) and must have a datatype of UCHAR. In voxels * with label value of 0, the color transfer function supplied by component * 0 is used. * In voxels with label value of 1, the color transfer function supplied by * component 1 is used and blended with the transfer function supplied by * component 0, with the blending weight being determined by * MaskBlendFactor. * In voxels with a label value of 2, the color transfer function supplied * by component 2 is used and blended with the transfer function supplied by * component 0, with the blending weight being determined by * MaskBlendFactor. GetMaskInputV.GetMaskInput() -> vtkImageData C++: virtual vtkImageData *GetMaskInput() Optionally, set a mask input. This mask may be a binary mask or a label map. This must be specified via SetMaskType. * If the mask is a binary mask, the volume rendering is confined to regions * within the binary mask. The binary mask is assumed to have a datatype of * UCHAR and values of 255 (inside) and 0 (outside). * The mask may also be a label map. The label map is allowed to contain only * 3 labels (values of 0, 1 and 2) and must have a datatype of UCHAR. In voxels * with label value of 0, the color transfer function supplied by component * 0 is used. * In voxels with label value of 1, the color transfer function supplied by * component 1 is used and blended with the transfer function supplied by * component 0, with the blending weight being determined by * MaskBlendFactor. * In voxels with a label value of 2, the color transfer function supplied * by component 2 is used and blended with the transfer function supplied by * component 0, with the blending weight being determined by * MaskBlendFactor. SetMaskTypeV.SetMaskType(int) C++: virtual void SetMaskType(int _arg) Set the mask type, if mask is to be used. See documentation for SetMaskInput(). The default is a LabelMapMaskType. GetMaskTypeV.GetMaskType() -> int C++: virtual int GetMaskType() Set the mask type, if mask is to be used. See documentation for SetMaskInput(). The default is a LabelMapMaskType. SetMaskTypeToBinaryV.SetMaskTypeToBinary() C++: void SetMaskTypeToBinary() Set the mask type, if mask is to be used. See documentation for SetMaskInput(). The default is a LabelMapMaskType. SetMaskTypeToLabelMapV.SetMaskTypeToLabelMap() C++: void SetMaskTypeToLabelMap() Set the mask type, if mask is to be used. See documentation for SetMaskInput(). The default is a LabelMapMaskType. SetMaskBlendFactorV.SetMaskBlendFactor(float) C++: virtual void SetMaskBlendFactor(float _arg) Tells how much mask color transfer function is used compared to the standard color transfer function when the mask is true. This is relevant only for the label map mask. 0.0 means only standard color transfer function. 1.0 means only mask color transfer function. The default value is 1.0. GetMaskBlendFactorMinValueV.GetMaskBlendFactorMinValue() -> float C++: virtual float GetMaskBlendFactorMinValue() Tells how much mask color transfer function is used compared to the standard color transfer function when the mask is true. This is relevant only for the label map mask. 0.0 means only standard color transfer function. 1.0 means only mask color transfer function. The default value is 1.0. GetMaskBlendFactorMaxValueV.GetMaskBlendFactorMaxValue() -> float C++: virtual float GetMaskBlendFactorMaxValue() Tells how much mask color transfer function is used compared to the standard color transfer function when the mask is true. This is relevant only for the label map mask. 0.0 means only standard color transfer function. 1.0 means only mask color transfer function. The default value is 1.0. GetMaskBlendFactorV.GetMaskBlendFactor() -> float C++: virtual float GetMaskBlendFactor() Tells how much mask color transfer function is used compared to the standard color transfer function when the mask is true. This is relevant only for the label map mask. 0.0 means only standard color transfer function. 1.0 means only mask color transfer function. The default value is 1.0. SetRenderToImageV.SetRenderToImage(int) C++: virtual void SetRenderToImage(int _arg) Enable or disable setting output of volume rendering to be color and depth textures. By default this is set to 0 (off). It should be noted that it is possible that underlying API specific mapper may not supoport RenderToImage mode. \warning \li This method ignores any other volumes / props in the scene. \li This method does not respect the general attributes of the scene i.e. background color, etc. It always produces a color image that has a transparent white background outside the bounds of the volume. * \sa GetDepthImage(), GetColorImage() GetRenderToImageV.GetRenderToImage() -> int C++: virtual int GetRenderToImage() Enable or disable setting output of volume rendering to be color and depth textures. By default this is set to 0 (off). It should be noted that it is possible that underlying API specific mapper may not supoport RenderToImage mode. \warning \li This method ignores any other volumes / props in the scene. \li This method does not respect the general attributes of the scene i.e. background color, etc. It always produces a color image that has a transparent white background outside the bounds of the volume. * \sa GetDepthImage(), GetColorImage() RenderToImageOnV.RenderToImageOn() C++: virtual void RenderToImageOn() Enable or disable setting output of volume rendering to be color and depth textures. By default this is set to 0 (off). It should be noted that it is possible that underlying API specific mapper may not supoport RenderToImage mode. \warning \li This method ignores any other volumes / props in the scene. \li This method does not respect the general attributes of the scene i.e. background color, etc. It always produces a color image that has a transparent white background outside the bounds of the volume. * \sa GetDepthImage(), GetColorImage() RenderToImageOffV.RenderToImageOff() C++: virtual void RenderToImageOff() Enable or disable setting output of volume rendering to be color and depth textures. By default this is set to 0 (off). It should be noted that it is possible that underlying API specific mapper may not supoport RenderToImage mode. \warning \li This method ignores any other volumes / props in the scene. \li This method does not respect the general attributes of the scene i.e. background color, etc. It always produces a color image that has a transparent white background outside the bounds of the volume. * \sa GetDepthImage(), GetColorImage() SetDepthImageScalarTypeV.SetDepthImageScalarType(int) C++: virtual void SetDepthImageScalarType(int _arg) Set/Get the scalar type of the depth texture in RenderToImage mode. By default, the type if VTK_FLOAT. \sa SetRenderToImage() GetDepthImageScalarTypeV.GetDepthImageScalarType() -> int C++: virtual int GetDepthImageScalarType() Set/Get the scalar type of the depth texture in RenderToImage mode. By default, the type if VTK_FLOAT. \sa SetRenderToImage() SetDepthImageScalarTypeToUnsignedCharV.SetDepthImageScalarTypeToUnsignedChar() C++: void SetDepthImageScalarTypeToUnsignedChar() Set/Get the scalar type of the depth texture in RenderToImage mode. By default, the type if VTK_FLOAT. \sa SetRenderToImage() SetDepthImageScalarTypeToUnsignedShortV.SetDepthImageScalarTypeToUnsignedShort() C++: void SetDepthImageScalarTypeToUnsignedShort() Set/Get the scalar type of the depth texture in RenderToImage mode. By default, the type if VTK_FLOAT. \sa SetRenderToImage() SetDepthImageScalarTypeToFloatV.SetDepthImageScalarTypeToFloat() C++: void SetDepthImageScalarTypeToFloat() Set/Get the scalar type of the depth texture in RenderToImage mode. By default, the type if VTK_FLOAT. \sa SetRenderToImage() SetClampDepthToBackfaceV.SetClampDepthToBackface(int) C++: virtual void SetClampDepthToBackface(int _arg) Enable or disable clamping the depth value of the fully transparent voxel to the depth of the back-face of the volume. This parameter is used when RenderToImage mode is enabled. When ClampDepthToBackFace is false, the fully transparent voxels will have a value of 1.0 in the depth image. When this is true, the fully transparent voxels will have the depth value of the face at which the ray exits the volume. By default, this is set to 0 (off). \sa SetRenderToImage(), GetDepthImage() GetClampDepthToBackfaceV.GetClampDepthToBackface() -> int C++: virtual int GetClampDepthToBackface() Enable or disable clamping the depth value of the fully transparent voxel to the depth of the back-face of the volume. This parameter is used when RenderToImage mode is enabled. When ClampDepthToBackFace is false, the fully transparent voxels will have a value of 1.0 in the depth image. When this is true, the fully transparent voxels will have the depth value of the face at which the ray exits the volume. By default, this is set to 0 (off). \sa SetRenderToImage(), GetDepthImage() ClampDepthToBackfaceOnV.ClampDepthToBackfaceOn() C++: virtual void ClampDepthToBackfaceOn() Enable or disable clamping the depth value of the fully transparent voxel to the depth of the back-face of the volume. This parameter is used when RenderToImage mode is enabled. When ClampDepthToBackFace is false, the fully transparent voxels will have a value of 1.0 in the depth image. When this is true, the fully transparent voxels will have the depth value of the face at which the ray exits the volume. By default, this is set to 0 (off). \sa SetRenderToImage(), GetDepthImage() ClampDepthToBackfaceOffV.ClampDepthToBackfaceOff() C++: virtual void ClampDepthToBackfaceOff() Enable or disable clamping the depth value of the fully transparent voxel to the depth of the back-face of the volume. This parameter is used when RenderToImage mode is enabled. When ClampDepthToBackFace is false, the fully transparent voxels will have a value of 1.0 in the depth image. When this is true, the fully transparent voxels will have the depth value of the face at which the ray exits the volume. By default, this is set to 0 (off). \sa SetRenderToImage(), GetDepthImage() GetDepthImageV.GetDepthImage(vtkImageData) C++: virtual void GetDepthImage(vtkImageData *) Low level API to export the depth texture as vtkImageData in RenderToImage mode. Should be implemented by the graphics API specific mapper (GL or other). \sa SetRenderToImage() GetColorImageV.GetColorImage(vtkImageData) C++: virtual void GetColorImage(vtkImageData *) Low level API to export the color texture as vtkImageData in RenderToImage mode. Should be implemented by the graphics API specific mapper (GL or other). \sa SetRenderToImage() V.Render(vtkRenderer, vtkVolume) C++: void Render(vtkRenderer *, vtkVolume *) override; Initialize rendering for this volume. \warning INTERNAL METHOD - NOT INTENDED FOR GENERAL USE GPURenderV.GPURender(vtkRenderer, vtkVolume) C++: virtual void GPURender(vtkRenderer *, vtkVolume *) Handled in the subclass - the actual render method \pre input is up-to-date. V.ReleaseGraphicsResources(vtkWindow) C++: void ReleaseGraphicsResources(vtkWindow *) override; Release any graphics resources that are being consumed by this mapper. The parameter window could be used to determine which graphic resources to release. \warning INTERNAL METHOD - NOT INTENDED FOR GENERAL USE GetReductionRatioV.GetReductionRatio([float, float, float]) C++: virtual void GetReductionRatio(double ratio[3]) Return how much the dataset has to be reduced in each dimension to fit on the GPU. If the value is 1.0, there is no need to reduce the dataset. \pre the calling thread has a current OpenGL context. \pre mapper_supported: IsRenderSupported(renderer->GetRenderWindow(),0) The computation is based on hardware limits (3D texture indexable size) and MaxMemoryInBytes. \post valid_i_ratio: ratio[0]>0 && ratio[0]<=1.0 \post valid_j_ratio: ratio[1]>0 && ratio[1]<=1.0 \post valid_k_ratio: ratio[2]>0 && ratio[2]<=1.0 SetColorRangeTypeV.SetColorRangeType(int) C++: virtual void SetColorRangeType(int _arg) Set whether to use the scalar range or the native transfer function range when looking up transfer functions for color and opacity values. When the range is set to TransferFunctionRange::SCALAR, the function is distributed over the entire scalar range. If it is set to TransferFunctionRange::NATIVE, the scalar values outside the native transfer function range will be truncated to native range. By default, the volume scalar range is used. ote The native range of the transfer function is the range returned by vtkColorTransferFunction::GetRange() or vtkPiecewiseFunction::GetRange(). ote There is no special API provided for 2D transfer functions considering that they are set as a pre-generated vtkImageData on this class i.e. the range is already encoded. GetColorRangeTypeV.GetColorRangeType() -> int C++: virtual int GetColorRangeType() Set whether to use the scalar range or the native transfer function range when looking up transfer functions for color and opacity values. When the range is set to TransferFunctionRange::SCALAR, the function is distributed over the entire scalar range. If it is set to TransferFunctionRange::NATIVE, the scalar values outside the native transfer function range will be truncated to native range. By default, the volume scalar range is used. ote The native range of the transfer function is the range returned by vtkColorTransferFunction::GetRange() or vtkPiecewiseFunction::GetRange(). ote There is no special API provided for 2D transfer functions considering that they are set as a pre-generated vtkImageData on this class i.e. the range is already encoded. SetScalarOpacityRangeTypeV.SetScalarOpacityRangeType(int) C++: virtual void SetScalarOpacityRangeType(int _arg) Set whether to use the scalar range or the native transfer function range when looking up transfer functions for color and opacity values. When the range is set to TransferFunctionRange::SCALAR, the function is distributed over the entire scalar range. If it is set to TransferFunctionRange::NATIVE, the scalar values outside the native transfer function range will be truncated to native range. By default, the volume scalar range is used. ote The native range of the transfer function is the range returned by vtkColorTransferFunction::GetRange() or vtkPiecewiseFunction::GetRange(). ote There is no special API provided for 2D transfer functions considering that they are set as a pre-generated vtkImageData on this class i.e. the range is already encoded. GetScalarOpacityRangeTypeV.GetScalarOpacityRangeType() -> int C++: virtual int GetScalarOpacityRangeType() Set whether to use the scalar range or the native transfer function range when looking up transfer functions for color and opacity values. When the range is set to TransferFunctionRange::SCALAR, the function is distributed over the entire scalar range. If it is set to TransferFunctionRange::NATIVE, the scalar values outside the native transfer function range will be truncated to native range. By default, the volume scalar range is used. ote The native range of the transfer function is the range returned by vtkColorTransferFunction::GetRange() or vtkPiecewiseFunction::GetRange(). ote There is no special API provided for 2D transfer functions considering that they are set as a pre-generated vtkImageData on this class i.e. the range is already encoded. SetGradientOpacityRangeTypeV.SetGradientOpacityRangeType(int) C++: virtual void SetGradientOpacityRangeType(int _arg) Set whether to use the scalar range or the native transfer function range when looking up transfer functions for color and opacity values. When the range is set to TransferFunctionRange::SCALAR, the function is distributed over the entire scalar range. If it is set to TransferFunctionRange::NATIVE, the scalar values outside the native transfer function range will be truncated to native range. By default, the volume scalar range is used. ote The native range of the transfer function is the range returned by vtkColorTransferFunction::GetRange() or vtkPiecewiseFunction::GetRange(). ote There is no special API provided for 2D transfer functions considering that they are set as a pre-generated vtkImageData on this class i.e. the range is already encoded. GetGradientOpacityRangeTypeV.GetGradientOpacityRangeType() -> int C++: virtual int GetGradientOpacityRangeType() Set whether to use the scalar range or the native transfer function range when looking up transfer functions for color and opacity values. When the range is set to TransferFunctionRange::SCALAR, the function is distributed over the entire scalar range. If it is set to TransferFunctionRange::NATIVE, the scalar values outside the native transfer function range will be truncated to native range. By default, the volume scalar range is used. ote The native range of the transfer function is the range returned by vtkColorTransferFunction::GetRange() or vtkPiecewiseFunction::GetRange(). ote There is no special API provided for 2D transfer functions considering that they are set as a pre-generated vtkImageData on this class i.e. the range is already encoded. vtkRenderWindowvtkVolumePropertyvtkOSPRayVolumeInterfacevtkRenderingVolumePython.vtkOSPRayVolumeInterfacevtkOSPRayVolumeInterface - Removes link dependence on optional ospray module. Superclass: vtkVolumeMapper Class allows SmartVolume to use OSPRay for rendering when ospray is enabled. When disabled, this class does nothing but return a warning. V.SafeDownCast(vtkObjectBase) -> vtkOSPRayVolumeInterface C++: static vtkOSPRayVolumeInterface *SafeDownCast( vtkObjectBase *o) V.NewInstance() -> vtkOSPRayVolumeInterface C++: vtkOSPRayVolumeInterface *NewInstance() V.Render(vtkRenderer, vtkVolume) C++: void Render(vtkRenderer *, vtkVolume *) override; Overridden to warn about lack of OSPRay if not overridden. vtkProjectedTetrahedraMappervtkRenderingVolumePython.vtkProjectedTetrahedraMappervtkProjectedTetrahedraMapper - Unstructured grid volume renderer. Superclass: vtkUnstructuredGridVolumeMapper vtkProjectedTetrahedraMapper is an implementation of the classic Projected Tetrahedra algorithm presented by Shirley and Tuchman in "A Polygonal Approximation to Direct Scalar Volume Rendering" in Computer Graphics, December 1990. @bug This mapper relies highly on the implementation of the OpenGL pipeline. A typical hardware driver has lots of options and some settings can cause this mapper to produce artifacts. V.SafeDownCast(vtkObjectBase) -> vtkProjectedTetrahedraMapper C++: static vtkProjectedTetrahedraMapper *SafeDownCast( vtkObjectBase *o) V.NewInstance() -> vtkProjectedTetrahedraMapper C++: vtkProjectedTetrahedraMapper *NewInstance() SetVisibilitySortV.SetVisibilitySort(vtkVisibilitySort) C++: virtual void SetVisibilitySort(vtkVisibilitySort *sort) GetVisibilitySortV.GetVisibilitySort() -> vtkVisibilitySort C++: virtual vtkVisibilitySort *GetVisibilitySort() MapScalarsToColorsV.MapScalarsToColors(vtkDataArray, vtkVolumeProperty, vtkDataArray) C++: static void MapScalarsToColors(vtkDataArray *colors, vtkVolumeProperty *property, vtkDataArray *scalars) TransformPointsV.TransformPoints(vtkPoints, (float, float, float, float, float, float, float, float, float, float, float, float, float, float, float, float), (float, float, float, float, float, float, float, float, float, float, float, float, float, float, float, float), vtkFloatArray) C++: static void TransformPoints(vtkPoints *inPoints, const float projection_mat[16], const float modelview_mat[16], vtkFloatArray *outPoints) IsSupportedV.IsSupported(vtkRenderWindow) -> bool C++: virtual bool IsSupported(vtkRenderWindow *) Return true if the rendering context provides the nececessary functionality to use this class. vtkUnstructuredGridVolumeMappervtkVisibilitySortvtkDataArrayvtkPointsvtkFloatArrayvtkRayCastImageDisplayHelpervtkRenderingVolumePython.vtkRayCastImageDisplayHelpervtkRayCastImageDisplayHelper - helper class that draws the image to the screen Superclass: vtkObject This is a helper class for drawing images created from ray casting on the screen. This is the abstract device-independent superclass. @sa vtkUnstructuredGridVolumeRayCastMapper vtkOpenGLRayCastImageDisplayHelper V.SafeDownCast(vtkObjectBase) -> vtkRayCastImageDisplayHelper C++: static vtkRayCastImageDisplayHelper *SafeDownCast( vtkObjectBase *o) V.NewInstance() -> vtkRayCastImageDisplayHelper C++: vtkRayCastImageDisplayHelper *NewInstance() RenderTextureV.RenderTexture(vtkVolume, vtkRenderer, [int, int], [int, int], [int, int], [int, int], float, [int, ...]) C++: virtual void RenderTexture(vtkVolume *vol, vtkRenderer *ren, int imageMemorySize[2], int imageViewportSize[2], int imageInUseSize[2], int imageOrigin[2], float requestedDepth, unsigned char *image) V.RenderTexture(vtkVolume, vtkRenderer, [int, int], [int, int], [int, int], [int, int], float, [int, ...]) C++: virtual void RenderTexture(vtkVolume *vol, vtkRenderer *ren, int imageMemorySize[2], int imageViewportSize[2], int imageInUseSize[2], int imageOrigin[2], float requestedDepth, unsigned short *image) V.RenderTexture(vtkVolume, vtkRenderer, vtkFixedPointRayCastImage, float) C++: virtual void RenderTexture(vtkVolume *vol, vtkRenderer *ren, vtkFixedPointRayCastImage *image, float requestedDepth) SetPreMultipliedColorsV.SetPreMultipliedColors(int) C++: virtual void SetPreMultipliedColors(int _arg) GetPreMultipliedColorsMinValueV.GetPreMultipliedColorsMinValue() -> int C++: virtual int GetPreMultipliedColorsMinValue() GetPreMultipliedColorsMaxValueV.GetPreMultipliedColorsMaxValue() -> int C++: virtual int GetPreMultipliedColorsMaxValue() GetPreMultipliedColorsV.GetPreMultipliedColors() -> int C++: virtual int GetPreMultipliedColors() PreMultipliedColorsOnV.PreMultipliedColorsOn() C++: virtual void PreMultipliedColorsOn() PreMultipliedColorsOffV.PreMultipliedColorsOff() C++: virtual void PreMultipliedColorsOff() SetPixelScaleV.SetPixelScale(float) C++: virtual void SetPixelScale(float _arg) Set / Get the pixel scale to be applied to the image before display. Can be set to scale the incoming pixel values - for example the fixed point mapper uses the unsigned short API but with 15 bit values so needs a scale of 2.0. GetPixelScaleV.GetPixelScale() -> float C++: virtual float GetPixelScale() Set / Get the pixel scale to be applied to the image before display. Can be set to scale the incoming pixel values - for example the fixed point mapper uses the unsigned short API but with 15 bit values so needs a scale of 2.0. V.ReleaseGraphicsResources(vtkWindow) C++: virtual void ReleaseGraphicsResources(vtkWindow *) Derived class should implemen this if needed @VVPPPPfP *vtkVolume *vtkRenderer *i *i *i *i *B@VVPPPPfP *vtkVolume *vtkRenderer *i *i *i *i *HvtkRecursiveSphereDirectionEncodervtkRenderingVolumePython.vtkRecursiveSphereDirectionEncodervtkRecursiveSphereDirectionEncoder - A direction encoder based on the recursive subdivision of an octahedron Superclass: vtkDirectionEncoder vtkRecursiveSphereDirectionEncoder is a direction encoder which uses the vertices of a recursive subdivision of an octahedron (with the vertices pushed out onto the surface of an enclosing sphere) to encode directions into a two byte value. @sa vtkDirectionEncoder V.SafeDownCast(vtkObjectBase) -> vtkRecursiveSphereDirectionEncoder C++: static vtkRecursiveSphereDirectionEncoder *SafeDownCast( vtkObjectBase *o) Get the name of this class V.NewInstance() -> vtkRecursiveSphereDirectionEncoder C++: vtkRecursiveSphereDirectionEncoder *NewInstance() Get the name of this class V.GetEncodedDirection([float, float, float]) -> int C++: int GetEncodedDirection(float n[3]) override; Given a normal vector n, return the encoded direction V.GetDecodedGradient(int) -> (float, float, float) C++: float *GetDecodedGradient(int value) override; / Given an encoded value, return a pointer to the normal vector V.GetNumberOfEncodedDirections() -> int C++: int GetNumberOfEncodedDirections(void) override; Return the number of encoded directions V.GetDecodedGradientTable() -> (float, ...) C++: float *GetDecodedGradientTable(void) override; Get the decoded gradient table. There are this->GetNumberOfEncodedDirections() entries in the table, each containing a normal (direction) vector. This is a flat structure - 3 times the number of directions floats in an array. SetRecursionDepthV.SetRecursionDepth(int) C++: virtual void SetRecursionDepth(int _arg) Set / Get the recursion depth for the subdivision. This indicates how many time one triangle on the initial 8-sided sphere model is replaced by four triangles formed by connecting triangle edge midpoints. A recursion level of 0 yields 8 triangles with 6 unique vertices. The normals are the vectors from the sphere center through the vertices. The number of directions will be 11 since the four normals with 0 z values will be duplicated in the table - once with +0 values and the other time with -0 values, and an addition index will be used to represent the (0,0,0) normal. If we instead choose a recursion level of 6 (the maximum that can fit within 2 bytes) the number of directions is 16643, with 16386 unique directions and a zero normal. GetRecursionDepthMinValueV.GetRecursionDepthMinValue() -> int C++: virtual int GetRecursionDepthMinValue() Set / Get the recursion depth for the subdivision. This indicates how many time one triangle on the initial 8-sided sphere model is replaced by four triangles formed by connecting triangle edge midpoints. A recursion level of 0 yields 8 triangles with 6 unique vertices. The normals are the vectors from the sphere center through the vertices. The number of directions will be 11 since the four normals with 0 z values will be duplicated in the table - once with +0 values and the other time with -0 values, and an addition index will be used to represent the (0,0,0) normal. If we instead choose a recursion level of 6 (the maximum that can fit within 2 bytes) the number of directions is 16643, with 16386 unique directions and a zero normal. GetRecursionDepthMaxValueV.GetRecursionDepthMaxValue() -> int C++: virtual int GetRecursionDepthMaxValue() Set / Get the recursion depth for the subdivision. This indicates how many time one triangle on the initial 8-sided sphere model is replaced by four triangles formed by connecting triangle edge midpoints. A recursion level of 0 yields 8 triangles with 6 unique vertices. The normals are the vectors from the sphere center through the vertices. The number of directions will be 11 since the four normals with 0 z values will be duplicated in the table - once with +0 values and the other time with -0 values, and an addition index will be used to represent the (0,0,0) normal. If we instead choose a recursion level of 6 (the maximum that can fit within 2 bytes) the number of directions is 16643, with 16386 unique directions and a zero normal. GetRecursionDepthV.GetRecursionDepth() -> int C++: virtual int GetRecursionDepth() Set / Get the recursion depth for the subdivision. This indicates how many time one triangle on the initial 8-sided sphere model is replaced by four triangles formed by connecting triangle edge midpoints. A recursion level of 0 yields 8 triangles with 6 unique vertices. The normals are the vectors from the sphere center through the vertices. The number of directions will be 11 since the four normals with 0 z values will be duplicated in the table - once with +0 values and the other time with -0 values, and an addition index will be used to represent the (0,0,0) normal. If we instead choose a recursion level of 6 (the maximum that can fit within 2 bytes) the number of directions is 16643, with 16386 unique directions and a zero normal. vtkSphericalDirectionEncodervtkRenderingVolumePython.vtkSphericalDirectionEncodervtkSphericalDirectionEncoder - A direction encoder based on spherical coordinates Superclass: vtkDirectionEncoder vtkSphericalDirectionEncoder is a direction encoder which uses spherical coordinates for mapping (nx, ny, nz) into an azimuth, elevation pair. @sa vtkDirectionEncoder V.SafeDownCast(vtkObjectBase) -> vtkSphericalDirectionEncoder C++: static vtkSphericalDirectionEncoder *SafeDownCast( vtkObjectBase *o) Get the name of this class V.NewInstance() -> vtkSphericalDirectionEncoder C++: vtkSphericalDirectionEncoder *NewInstance() Get the name of this class vtkUnstructuredGridBunykRayCastFunctionVTK_BUNYKRCF_MAX_ARRAYSVTK_BUNYKRCF_ARRAY_SIZEvtkRenderingVolumePython.vtkUnstructuredGridBunykRayCastFunctionvtkUnstructuredGridBunykRayCastFunction - a superclass for ray casting functions Superclass: vtkUnstructuredGridVolumeRayCastFunction vtkUnstructuredGridBunykRayCastFunction is a concrete implementation of a ray cast function for unstructured grid data. This class was based on the paper "Simple, Fast, Robust Ray Casting of Irregular Grids" by Paul Bunyk, Arie Kaufmna, and Claudio Silva. This method is quite memory intensive (with extra explicit copies of the data) and therefore should not be used for very large data. This method assumes that the input data is composed entirely of tetras - use vtkDataSetTriangleFilter before setting the input on the mapper. The basic idea of this method is as follows: 1) Enumerate the triangles. At each triangle have space for some information that will be used during rendering. This includes which tetra the triangles belong to, the plane equation and the Barycentric coefficients. 2) Keep a reference to all four triangles for each tetra. 3) At the beginning of each render, do the precomputation. This includes creating an array of transformed points (in view coordinates) and computing the view dependent info per triangle (plane equations and barycentric coords in view space) 4) Find all front facing boundary triangles (a triangle is on the boundary if it belongs to only one tetra). For each triangle, find all pixels in the image that intersect the triangle, and add this to the sorted (by depth) intersection list at each pixel. 5) For each ray cast, traverse the intersection list. At each intersection, accumulate opacity and color contribution per tetra along the ray until you reach an exiting triangle (on the boundary). @sa vtkUnstructuredGridVolumeRayCastMapper V.SafeDownCast(vtkObjectBase) -> vtkUnstructuredGridBunykRayCastFunction C++: static vtkUnstructuredGridBunykRayCastFunction *SafeDownCast( vtkObjectBase *o) V.NewInstance() -> vtkUnstructuredGridBunykRayCastFunction C++: vtkUnstructuredGridBunykRayCastFunction *NewInstance() InitializeV.Initialize(vtkRenderer, vtkVolume) C++: void Initialize(vtkRenderer *ren, vtkVolume *vol) override; Called by the ray cast mapper at the start of rendering FinalizeV.Finalize() C++: void Finalize() override; Called by the ray cast mapper at the end of rendering NewIteratorV.NewIterator() -> vtkUnstructuredGridVolumeRayCastIterator C++: vtkUnstructuredGridVolumeRayCastIterator *NewIterator() override; Returns a new object that will iterate over all the intersections of a ray with the cells of the input. The calling code is responsible for deleting the returned object. GetPointsV.GetPoints() -> (float, ...) C++: double *GetPoints() Access to an internal structure for the templated method. GetViewToWorldMatrixV.GetViewToWorldMatrix() -> vtkMatrix4x4 C++: virtual vtkMatrix4x4 *GetViewToWorldMatrix() Access to an internal structure for the templated method. V.GetImageOrigin() -> (int, int) C++: int *GetImageOrigin() Access to an internal structure for the templated method. V.GetImageViewportSize() -> (int, int) C++: int *GetImageViewportSize() Access to an internal structure for the templated method. vtkUnstructuredGridVolumeRayCastFunctionvtkUnstructuredGridHomogeneousRayIntegratorvtkRenderingVolumePython.vtkUnstructuredGridHomogeneousRayIntegratorvtkUnstructuredGridHomogeneousRayIntegrator - performs peicewise constant ray integration. Superclass: vtkUnstructuredGridVolumeRayIntegrator vtkUnstructuredGridHomogeneousRayIntegrator performs homogeneous ray integration. This is a good method to use when volume rendering scalars that are defined on cells. V.SafeDownCast(vtkObjectBase) -> vtkUnstructuredGridHomogeneousRayIntegrator C++: static vtkUnstructuredGridHomogeneousRayIntegrator *SafeDownCast( vtkObjectBase *o) V.NewInstance() -> vtkUnstructuredGridHomogeneousRayIntegrator C++: vtkUnstructuredGridHomogeneousRayIntegrator *NewInstance() V.Initialize(vtkVolume, vtkDataArray) C++: void Initialize(vtkVolume *volume, vtkDataArray *scalars) override; Set up the integrator with the given properties and scalars. IntegrateV.Integrate(vtkDoubleArray, vtkDataArray, vtkDataArray, [float, float, float, float]) C++: void Integrate(vtkDoubleArray *intersectionLengths, vtkDataArray *nearIntersections, vtkDataArray *farIntersections, float color[4]) override; Given a set of intersections (defined by the three arrays), compute the peicewise integration of the array in front to back order. /c intersectionLengths holds the lengths of each peicewise segment. /c nearIntersections and /c farIntersections hold the scalar values at the front and back of each segment. /c color should contain the RGBA value of the volume in front of the segments passed in, and the result will be placed back into /c color. SetTransferFunctionTableSizeV.SetTransferFunctionTableSize(int) C++: virtual void SetTransferFunctionTableSize(int _arg) For quick lookup, the transfer function is sampled into a table. This parameter sets how big of a table to use. By default, 1024 entries are used. GetTransferFunctionTableSizeV.GetTransferFunctionTableSize() -> int C++: virtual int GetTransferFunctionTableSize() For quick lookup, the transfer function is sampled into a table. This parameter sets how big of a table to use. By default, 1024 entries are used. vtkUnstructuredGridVolumeRayIntegratorvtkDoubleArrayvtkUnstructuredGridLinearRayIntegratorvtkRenderingVolumePython.vtkUnstructuredGridLinearRayIntegratorvtkUnstructuredGridLinearRayIntegrator - performs piecewise linear ray integration. Superclass: vtkUnstructuredGridVolumeRayIntegrator vtkUnstructuredGridLinearRayIntegrator performs piecewise linear ray integration. Considering that transfer functions in VTK are piecewise linear, this class should give the "correct" integration under most circumstances. However, the computations performed are fairly hefty and should, for the most part, only be used as a benchmark for other, faster methods. @sa vtkUnstructuredGridPartialPreIntegration V.SafeDownCast(vtkObjectBase) -> vtkUnstructuredGridLinearRayIntegrator C++: static vtkUnstructuredGridLinearRayIntegrator *SafeDownCast( vtkObjectBase *o) V.NewInstance() -> vtkUnstructuredGridLinearRayIntegrator C++: vtkUnstructuredGridLinearRayIntegrator *NewInstance() IntegrateRayV.IntegrateRay(float, float, float, float, float, [float, float, float, float]) C++: static void IntegrateRay(double length, double intensity_front, double attenuation_front, double intensity_back, double attenuation_back, float color[4]) V.IntegrateRay(float, (float, float, float), float, (float, float, float), float, [float, float, float, float]) C++: static void IntegrateRay(double length, const double color_front[3], double attenuation_front, const double color_back[3], double attenuation_back, float color[4]) Integrates a single ray segment. color is blended with the result (with color in front). The result is written back into color. PsiV.Psi(float, float, float) -> float C++: static float Psi(float length, float attenuation_front, float attenuation_back) Computes Psi (as defined by Moreland and Angel, "A Fast High Accuracy Volume Renderer for Unstructured Data"). dddddP *fdPdPdP *d *d *fvtkUnstructuredGridPartialPreIntegrationvtkRenderingVolumePython.vtkUnstructuredGridPartialPreIntegrationvtkUnstructuredGridPartialPreIntegration - performs piecewise linear ray integration. Superclass: vtkUnstructuredGridVolumeRayIntegrator vtkUnstructuredGridPartialPreIntegration performs piecewise linear ray integration. This will give the same results as vtkUnstructuredGridLinearRayIntegration (with potentially a error due to table lookup quantization), but should be notably faster. The algorithm used is given by Moreland and Angel, "A Fast High Accuracy Volume Renderer for Unstructured Data." This class is thread safe only after the first instance is created. V.SafeDownCast(vtkObjectBase) -> vtkUnstructuredGridPartialPreIntegration C++: static vtkUnstructuredGridPartialPreIntegration *SafeDownCast( vtkObjectBase *o) V.NewInstance() -> vtkUnstructuredGridPartialPreIntegration C++: vtkUnstructuredGridPartialPreIntegration *NewInstance() V.Psi(float, float) -> float C++: static float Psi(float taufD, float taubD) Looks up Psi (as defined by Moreland and Angel, "A Fast High Accuracy Volume Renderer for Unstructured Data") in a table. The table must be created first, which happens on the first instantiation of this class or when BuildPsiTable is first called. GetPsiTableV.GetPsiTable(int) -> (float, ...) C++: static float *GetPsiTable(int &size) Looks up Psi (as defined by Moreland and Angel, "A Fast High Accuracy Volume Renderer for Unstructured Data") in a table. The table must be created first, which happens on the first instantiation of this class or when BuildPsiTable is first called. BuildPsiTableV.BuildPsiTable() C++: static void BuildPsiTable() Looks up Psi (as defined by Moreland and Angel, "A Fast High Accuracy Volume Renderer for Unstructured Data") in a table. The table must be created first, which happens on the first instantiation of this class or when BuildPsiTable is first called. vtkUnstructuredGridPreIntegrationvtkRenderingVolumePython.vtkUnstructuredGridPreIntegrationvtkUnstructuredGridPreIntegration - performs ray integration with pre-integration tables. Superclass: vtkUnstructuredGridVolumeRayIntegrator vtkUnstructuredGridPreIntegration performs ray integration by looking into a precomputed table. The result should be equivalent to that computed by vtkUnstructuredGridLinearRayIntegrator and vtkUnstructuredGridPartialPreIntegration, but faster than either one. The pre-integration algorithm was first introduced by Roettger, Kraus, and Ertl in "Hardware-Accelerated Volume And Isosurface Rendering Based On Cell-Projection." Due to table size limitations, a table can only be indexed by independent scalars. Thus, dependent scalars are not supported. V.SafeDownCast(vtkObjectBase) -> vtkUnstructuredGridPreIntegration C++: static vtkUnstructuredGridPreIntegration *SafeDownCast( vtkObjectBase *o) V.NewInstance() -> vtkUnstructuredGridPreIntegration C++: vtkUnstructuredGridPreIntegration *NewInstance() GetIntegratorV.GetIntegrator() -> vtkUnstructuredGridVolumeRayIntegrator C++: virtual vtkUnstructuredGridVolumeRayIntegrator *GetIntegrator( ) The class used to fill the pre integration table. By default, a vtkUnstructuredGridPartialPreIntegration is built. SetIntegratorV.SetIntegrator(vtkUnstructuredGridVolumeRayIntegrator) C++: virtual void SetIntegrator( vtkUnstructuredGridVolumeRayIntegrator *) The class used to fill the pre integration table. By default, a vtkUnstructuredGridPartialPreIntegration is built. SetIntegrationTableScalarResolutionV.SetIntegrationTableScalarResolution(int) C++: virtual void SetIntegrationTableScalarResolution(int _arg) Set/Get the size of the integration table built. GetIntegrationTableScalarResolutionV.GetIntegrationTableScalarResolution() -> int C++: virtual int GetIntegrationTableScalarResolution() Set/Get the size of the integration table built. SetIntegrationTableLengthResolutionV.SetIntegrationTableLengthResolution(int) C++: virtual void SetIntegrationTableLengthResolution(int _arg) Set/Get the size of the integration table built. GetIntegrationTableLengthResolutionV.GetIntegrationTableLengthResolution() -> int C++: virtual int GetIntegrationTableLengthResolution() Set/Get the size of the integration table built. GetIntegrationTableScalarShiftV.GetIntegrationTableScalarShift(int) -> float C++: virtual double GetIntegrationTableScalarShift( int component=0) Get how an integration table is indexed. GetIntegrationTableScalarScaleV.GetIntegrationTableScalarScale(int) -> float C++: virtual double GetIntegrationTableScalarScale( int component=0) Get how an integration table is indexed. GetIntegrationTableLengthScaleV.GetIntegrationTableLengthScale() -> float C++: virtual double GetIntegrationTableLengthScale() Get how an integration table is indexed. GetIncrementalPreIntegrationV.GetIncrementalPreIntegration() -> int C++: virtual int GetIncrementalPreIntegration() Get/set whether to use incremental pre-integration (by default it's on). Incremental pre-integration is much faster but can introduce error due to numerical imprecision. Under most circumstances, the error is not noticeable. SetIncrementalPreIntegrationV.SetIncrementalPreIntegration(int) C++: virtual void SetIncrementalPreIntegration(int _arg) Get/set whether to use incremental pre-integration (by default it's on). Incremental pre-integration is much faster but can introduce error due to numerical imprecision. Under most circumstances, the error is not noticeable. IncrementalPreIntegrationOnV.IncrementalPreIntegrationOn() C++: virtual void IncrementalPreIntegrationOn() Get/set whether to use incremental pre-integration (by default it's on). Incremental pre-integration is much faster but can introduce error due to numerical imprecision. Under most circumstances, the error is not noticeable. IncrementalPreIntegrationOffV.IncrementalPreIntegrationOff() C++: virtual void IncrementalPreIntegrationOff() Get/set whether to use incremental pre-integration (by default it's on). Incremental pre-integration is much faster but can introduce error due to numerical imprecision. Under most circumstances, the error is not noticeable. GetPreIntegrationTableV.GetPreIntegrationTable(int) -> (float, ...) C++: virtual float *GetPreIntegrationTable(int component=0) Get the partial pre-integration table for the given scalar component. The tables are built when Initialize is called. A segment of length d with a front scalar of sf and a back scalar of sb is referenced in the resulting table as 4 * ((l * IntegrationTableLengthScale) * \c IntegrationTableScalarResolution * IntegrationTableScalarResolution + (sb * IntegrationTableScalarScale + \c IntegrationTableScalarShift) * IntegrationTableScalarResolution + (sf * IntegrationTableScalarScale + \c IntegrationTableScalarShift)). GetTableEntryV.GetTableEntry(float, float, float, int) -> (float, ...) C++: float *GetTableEntry(double scalar_front, double scalar_back, double lenth, int component=0) Get an entry (RGBA) in one of the pre-integration tables. The tables are built when Initialize is called. GetIndexedTableEntryV.GetIndexedTableEntry(int, int, int, int) -> (float, ...) C++: float *GetIndexedTableEntry(int scalar_front_index, int scalar_back_index, int length_index, int component=0) Like GetTableEntry, except the inputs are scaled indices into the table rather than than the actual scalar and length values. Use GetTableEntry unless you are really sure you know what you are doing. COMPOSITE_BLENDMAXIMUM_INTENSITY_BLENDvtkRenderingVolumePython.vtkUnstructuredGridVolumeMappervtkUnstructuredGridVolumeMapper - Abstract class for a unstructured grid volume mapper Superclass: vtkAbstractVolumeMapper vtkUnstructuredGridVolumeMapper is the abstract definition of a volume mapper for unstructured data (vtkUnstructuredGrid). Several basic types of volume mappers are supported as subclasses @sa vtkUnstructuredGridVolumeRayCastMapper V.SafeDownCast(vtkObjectBase) -> vtkUnstructuredGridVolumeMapper C++: static vtkUnstructuredGridVolumeMapper *SafeDownCast( vtkObjectBase *o) V.NewInstance() -> vtkUnstructuredGridVolumeMapper C++: vtkUnstructuredGridVolumeMapper *NewInstance() V.SetInputData(vtkUnstructuredGridBase) C++: virtual void SetInputData(vtkUnstructuredGridBase *) V.SetInputData(vtkDataSet) C++: virtual void SetInputData(vtkDataSet *) Set/Get the input data GetInputV.GetInput() -> vtkUnstructuredGridBase C++: vtkUnstructuredGridBase *GetInput() Set/Get the input data SetBlendModeV.SetBlendMode(int) C++: virtual void SetBlendMode(int _arg) SetBlendModeToCompositeV.SetBlendModeToComposite() C++: void SetBlendModeToComposite() SetBlendModeToMaximumIntensityV.SetBlendModeToMaximumIntensity() C++: void SetBlendModeToMaximumIntensity() GetBlendModeV.GetBlendMode() -> int C++: virtual int GetBlendMode() V.Render(vtkRenderer, vtkVolume) C++: void Render(vtkRenderer *ren, vtkVolume *vol) override = 0; WARNING: INTERNAL METHOD - NOT INTENDED FOR GENERAL USE DO NOT USE THIS METHOD OUTSIDE OF THE RENDERING PROCESS Render the volume @V *vtkUnstructuredGridBase@V *vtkDataSetvtkUnstructuredGridBasevtkDataSetvtkRenderingVolumePython.vtkUnstructuredGridVolumeRayCastFunctionvtkUnstructuredGridVolumeRayCastFunction - a superclass for ray casting functions Superclass: vtkObject vtkUnstructuredGridVolumeRayCastFunction is a superclass for ray casting functions that can be used within a vtkUnstructuredGridVolumeRayCastMapper. @sa vtkUnstructuredGridVolumeRayCastMapper vtkUnstructuredGridVolumeRayIntegrator V.SafeDownCast(vtkObjectBase) -> vtkUnstructuredGridVolumeRayCastFunction C++: static vtkUnstructuredGridVolumeRayCastFunction *SafeDownCast( vtkObjectBase *o) V.NewInstance() -> vtkUnstructuredGridVolumeRayCastFunction C++: vtkUnstructuredGridVolumeRayCastFunction *NewInstance() V.Initialize(vtkRenderer, vtkVolume) C++: virtual void Initialize(vtkRenderer *ren, vtkVolume *vol) V.Finalize() C++: virtual void Finalize() V.NewIterator() -> vtkUnstructuredGridVolumeRayCastIterator C++: virtual vtkUnstructuredGridVolumeRayCastIterator *NewIterator( ) Returns a new object that will iterate over all the intersections of a ray with the cells of the input. The calling code is responsible for deleting the returned object. vtkUnstructuredGridVolumeRayCastIteratorvtkRenderingVolumePython.vtkUnstructuredGridVolumeRayCastIteratorvtkUnstructuredGridVolumeRayCastIterator - vtkUnstructuredGridVolumeRayCastIterator is a superclass for iterating over the intersections of a viewing ray with a group of unstructured cells. Superclass: vtkObject These iterators are created with a vtkUnstructuredGridVolumeRayCastFunction. @sa vtkUnstructuredGridVolumeRayCastFunction V.SafeDownCast(vtkObjectBase) -> vtkUnstructuredGridVolumeRayCastIterator C++: static vtkUnstructuredGridVolumeRayCastIterator *SafeDownCast( vtkObjectBase *o) V.NewInstance() -> vtkUnstructuredGridVolumeRayCastIterator C++: vtkUnstructuredGridVolumeRayCastIterator *NewInstance() V.Initialize(int, int) C++: virtual void Initialize(int x, int y) Initializes the iteration to the start of the ray at the given screen coordinates. GetNextIntersectionsV.GetNextIntersections(vtkIdList, vtkDoubleArray, vtkDataArray, vtkDataArray, vtkDataArray) -> int C++: virtual vtkIdType GetNextIntersections( vtkIdList *intersectedCells, vtkDoubleArray *intersectionLengths, vtkDataArray *scalars, vtkDataArray *nearIntersections, vtkDataArray *farIntersections) Get the intersections of the next several cells. The cell ids are stored in intersectedCells and the length of each ray segment within the cell is stored in intersectionLengths. The point scalars scalars are interpolated and stored in nearIntersections and farIntersections. intersectedCells, intersectionLengths, or scalars may be NULL to suppress passing the associated information. The number of intersections actually encountered is returned. 0 is returned if and only if no more intersections are to be found. V.SetBounds(float, float) C++: void SetBounds(double, double) V.SetBounds((float, float)) C++: void SetBounds(double a[2]) V.GetBounds() -> (float, float) C++: double *GetBounds() SetMaxNumberOfIntersectionsV.SetMaxNumberOfIntersections(int) C++: virtual void SetMaxNumberOfIntersections(vtkIdType _arg) GetMaxNumberOfIntersectionsV.GetMaxNumberOfIntersections() -> int C++: virtual vtkIdType GetMaxNumberOfIntersections() vtkIdListvtkUnstructuredGridVolumeRayCastMappervtkRenderingVolumePython.vtkUnstructuredGridVolumeRayCastMappervtkUnstructuredGridVolumeRayCastMapper - A software mapper for unstructured volumes Superclass: vtkUnstructuredGridVolumeMapper This is a software ray caster for rendering volumes in vtkUnstructuredGrid. @sa vtkVolumeMapper V.SafeDownCast(vtkObjectBase) -> vtkUnstructuredGridVolumeRayCastMapper C++: static vtkUnstructuredGridVolumeRayCastMapper *SafeDownCast( vtkObjectBase *o) V.NewInstance() -> vtkUnstructuredGridVolumeRayCastMapper C++: vtkUnstructuredGridVolumeRayCastMapper *NewInstance() V.SetImageSampleDistance(float) C++: virtual void SetImageSampleDistance(float _arg) Sampling distance in the XY image dimensions. Default value of 1 meaning 1 ray cast per pixel. If set to 0.5, 4 rays will be cast per pixel. If set to 2.0, 1 ray will be cast for every 4 (2 by 2) pixels. V.GetImageSampleDistanceMinValue() -> float C++: virtual float GetImageSampleDistanceMinValue() Sampling distance in the XY image dimensions. Default value of 1 meaning 1 ray cast per pixel. If set to 0.5, 4 rays will be cast per pixel. If set to 2.0, 1 ray will be cast for every 4 (2 by 2) pixels. V.GetImageSampleDistanceMaxValue() -> float C++: virtual float GetImageSampleDistanceMaxValue() Sampling distance in the XY image dimensions. Default value of 1 meaning 1 ray cast per pixel. If set to 0.5, 4 rays will be cast per pixel. If set to 2.0, 1 ray will be cast for every 4 (2 by 2) pixels. V.GetImageSampleDistance() -> float C++: virtual float GetImageSampleDistance() Sampling distance in the XY image dimensions. Default value of 1 meaning 1 ray cast per pixel. If set to 0.5, 4 rays will be cast per pixel. If set to 2.0, 1 ray will be cast for every 4 (2 by 2) pixels. V.SetMinimumImageSampleDistance(float) C++: virtual void SetMinimumImageSampleDistance(float _arg) This is the minimum image sample distance allow when the image sample distance is being automatically adjusted V.GetMinimumImageSampleDistanceMinValue() -> float C++: virtual float GetMinimumImageSampleDistanceMinValue() This is the minimum image sample distance allow when the image sample distance is being automatically adjusted V.GetMinimumImageSampleDistanceMaxValue() -> float C++: virtual float GetMinimumImageSampleDistanceMaxValue() This is the minimum image sample distance allow when the image sample distance is being automatically adjusted V.GetMinimumImageSampleDistance() -> float C++: virtual float GetMinimumImageSampleDistance() This is the minimum image sample distance allow when the image sample distance is being automatically adjusted V.SetMaximumImageSampleDistance(float) C++: virtual void SetMaximumImageSampleDistance(float _arg) This is the maximum image sample distance allow when the image sample distance is being automatically adjusted V.GetMaximumImageSampleDistanceMinValue() -> float C++: virtual float GetMaximumImageSampleDistanceMinValue() This is the maximum image sample distance allow when the image sample distance is being automatically adjusted V.GetMaximumImageSampleDistanceMaxValue() -> float C++: virtual float GetMaximumImageSampleDistanceMaxValue() This is the maximum image sample distance allow when the image sample distance is being automatically adjusted V.GetMaximumImageSampleDistance() -> float C++: virtual float GetMaximumImageSampleDistance() This is the maximum image sample distance allow when the image sample distance is being automatically adjusted V.SetNumberOfThreads(int) C++: virtual void SetNumberOfThreads(int _arg) Set/Get the number of threads to use. This by default is equal to the number of available processors detected. V.GetNumberOfThreads() -> int C++: virtual int GetNumberOfThreads() Set/Get the number of threads to use. This by default is equal to the number of available processors detected. SetRayCastFunctionV.SetRayCastFunction(vtkUnstructuredGridVolumeRayCastFunction) C++: virtual void SetRayCastFunction( vtkUnstructuredGridVolumeRayCastFunction *f) Set/Get the helper class for casting rays. GetRayCastFunctionV.GetRayCastFunction() -> vtkUnstructuredGridVolumeRayCastFunction C++: virtual vtkUnstructuredGridVolumeRayCastFunction *GetRayCastFunction( ) Set/Get the helper class for casting rays. SetRayIntegratorV.SetRayIntegrator(vtkUnstructuredGridVolumeRayIntegrator) C++: virtual void SetRayIntegrator( vtkUnstructuredGridVolumeRayIntegrator *ri) Set/Get the helper class for integrating rays. If set to NULL, a default integrator will be assigned. GetRayIntegratorV.GetRayIntegrator() -> vtkUnstructuredGridVolumeRayIntegrator C++: virtual vtkUnstructuredGridVolumeRayIntegrator *GetRayIntegrator( ) Set/Get the helper class for integrating rays. If set to NULL, a default integrator will be assigned. V.GetImageInUseSize() -> (int, int) C++: int *GetImageInUseSize() V.GetImageOrigin() -> (int, int) C++: int *GetImageOrigin() V.GetImageViewportSize() -> (int, int) C++: int *GetImageViewportSize() CastRaysV.CastRays(int, int) C++: void CastRays(int threadID, int threadCount) vtkRenderingVolumePython.vtkUnstructuredGridVolumeRayIntegratorvtkUnstructuredGridVolumeRayIntegrator - a superclass for volume ray integration functions Superclass: vtkObject vtkUnstructuredGridVolumeRayIntegrator is a superclass for ray integration functions that can be used within a vtkUnstructuredGridVolumeRayCastMapper. @sa vtkUnstructuredGridVolumeRayCastMapper vtkUnstructuredGridVolumeRayCastFunction V.SafeDownCast(vtkObjectBase) -> vtkUnstructuredGridVolumeRayIntegrator C++: static vtkUnstructuredGridVolumeRayIntegrator *SafeDownCast( vtkObjectBase *o) V.NewInstance() -> vtkUnstructuredGridVolumeRayIntegrator C++: vtkUnstructuredGridVolumeRayIntegrator *NewInstance() V.Initialize(vtkVolume, vtkDataArray) C++: virtual void Initialize(vtkVolume *volume, vtkDataArray *scalars) Set up the integrator with the given properties and scalars. V.Integrate(vtkDoubleArray, vtkDataArray, vtkDataArray, [float, float, float, float]) C++: virtual void Integrate(vtkDoubleArray *intersectionLengths, vtkDataArray *nearIntersections, vtkDataArray *farIntersections, float color[4]) Given a set of intersections (defined by the three arrays), compute the peicewise integration of the array in front to back order. /c intersectionLengths holds the lengths of each peicewise segment. /c nearIntersections and /c farIntersections hold the scalar values at the front and back of each segment. /c color should contain the RGBA value of the volume in front of the segments passed in, and the result will be placed back into /c color. vtkUnstructuredGridVolumeZSweepMappervtkRenderingVolumePython.vtkUnstructuredGridVolumeZSweepMappervtkUnstructuredGridVolumeZSweepMapper - Unstructured grid volume mapper based the ZSweep Algorithm Superclass: vtkUnstructuredGridVolumeMapper This is a volume mapper for unstructured grid implemented with the ZSweep algorithm. This is a software projective method. @sa vtkVolumetMapper @par Background: The algorithm is described in the following paper: Ricardo Farias, Joseph S. B. Mitchell and Claudio T. Silva. ZSWEEP: An Efficient and Exact Projection Algorithm for Unstructured Volume Rendering. In 2000 Volume Visualization Symposium, pages 91--99. October 2000. http://www.cse.ogi.edu/~csilva/papers/volvis2000.pdf V.SafeDownCast(vtkObjectBase) -> vtkUnstructuredGridVolumeZSweepMapper C++: static vtkUnstructuredGridVolumeZSweepMapper *SafeDownCast( vtkObjectBase *o) V.NewInstance() -> vtkUnstructuredGridVolumeZSweepMapper C++: vtkUnstructuredGridVolumeZSweepMapper *NewInstance() GetMaxPixelListSizeV.GetMaxPixelListSize() -> int C++: int GetMaxPixelListSize() Maximum size allowed for a pixel list. Default is 32. During the rendering, if a list of pixel is full, incremental compositing is performed. Even if it is a user setting, it is an advanced parameter. You have to understand how the algorithm works to change this value. SetMaxPixelListSizeV.SetMaxPixelListSize(int) C++: void SetMaxPixelListSize(int size) Change the maximum size allowed for a pixel list. It is an advanced parameter. \pre positive_size: size>1 V.Render(vtkRenderer, vtkVolume) C++: void Render(vtkRenderer *ren, vtkVolume *vol) override; WARNING: INTERNAL METHOD - NOT INTENDED FOR GENERAL USE DO NOT USE THIS METHOD OUTSIDE OF THE RENDERING PROCESS Render the volume BlendModesMINIMUM_INTENSITY_BLENDAVERAGE_INTENSITY_BLENDADDITIVE_BLENDVTK_CROP_SUBVOLUMEVTK_CROP_FENCEVTK_CROP_INVERTED_FENCEVTK_CROP_CROSSVTK_CROP_INVERTED_CROSSvtkRenderingVolumePython.vtkVolumeMapper.BlendModesvtkRenderingVolumePython.vtkVolumeMappervtkVolumeMapper - Abstract class for a volume mapper Superclass: vtkAbstractVolumeMapper vtkVolumeMapper is the abstract definition of a volume mapper for regular rectilinear data (vtkImageData). Several basic types of volume mappers are supported. V.SafeDownCast(vtkObjectBase) -> vtkVolumeMapper C++: static vtkVolumeMapper *SafeDownCast(vtkObjectBase *o) V.NewInstance() -> vtkVolumeMapper C++: vtkVolumeMapper *NewInstance() V.SetInputData(vtkImageData) C++: virtual void SetInputData(vtkImageData *) V.SetInputData(vtkDataSet) C++: virtual void SetInputData(vtkDataSet *) Set/Get the input data V.GetInput() -> vtkImageData C++: vtkImageData *GetInput() Set/Get the input data V.SetBlendMode(int) C++: virtual void SetBlendMode(int _arg) Set/Get the blend mode. The default mode is Composite where the scalar values are sampled through the volume and composited in a front-to-back scheme through alpha blending. The final color and opacity is determined using the color and opacity transfer functions. Maximum and minimum intensity blend modes use the maximum and minimum scalar values, respectively, along the sampling ray. The final color and opacity is determined by passing the resultant value through the color and opacity transfer functions. Additive blend mode accumulates scalar values by passing each value through the opacity transfer function and then adding up the product of the value and its opacity. In other words, the scalar values are scaled using the opacity transfer function and summed to derive the final color. Note that the resulting image is always grayscale i.e. aggregated values are not passed through the color transfer function. This is because the final value is a derived value and not a real data value along the sampling ray. Average intensity blend mode works similar to the additive blend mode where the scalar values are multiplied by opacity calculated from the opacity transfer function and then added. The additional step here is to divide the sum by the number of samples taken through the volume. One can control the scalar range by setting the AverageIPScalarRange ivar to disregard scalar values, not in the range of interest, from the average computation. As is the case with the additive intensity projection, the final image will always be grayscale i.e. the aggregated values are not passed through the color transfer function. This is because the resultant value is a derived value and not a real data value along the sampling ray. ote vtkVolumeMapper::AVERAGE_INTENSITY_BLEND is only supported by the vtkGPUVolumeRayCastMapper with the OpenGL2 backend. \sa SetAverageIPScalarRange() V.SetBlendModeToComposite() C++: void SetBlendModeToComposite() Set/Get the blend mode. The default mode is Composite where the scalar values are sampled through the volume and composited in a front-to-back scheme through alpha blending. The final color and opacity is determined using the color and opacity transfer functions. Maximum and minimum intensity blend modes use the maximum and minimum scalar values, respectively, along the sampling ray. The final color and opacity is determined by passing the resultant value through the color and opacity transfer functions. Additive blend mode accumulates scalar values by passing each value through the opacity transfer function and then adding up the product of the value and its opacity. In other words, the scalar values are scaled using the opacity transfer function and summed to derive the final color. Note that the resulting image is always grayscale i.e. aggregated values are not passed through the color transfer function. This is because the final value is a derived value and not a real data value along the sampling ray. Average intensity blend mode works similar to the additive blend mode where the scalar values are multiplied by opacity calculated from the opacity transfer function and then added. The additional step here is to divide the sum by the number of samples taken through the volume. One can control the scalar range by setting the AverageIPScalarRange ivar to disregard scalar values, not in the range of interest, from the average computation. As is the case with the additive intensity projection, the final image will always be grayscale i.e. the aggregated values are not passed through the color transfer function. This is because the resultant value is a derived value and not a real data value along the sampling ray. ote vtkVolumeMapper::AVERAGE_INTENSITY_BLEND is only supported by the vtkGPUVolumeRayCastMapper with the OpenGL2 backend. \sa SetAverageIPScalarRange() V.SetBlendModeToMaximumIntensity() C++: void SetBlendModeToMaximumIntensity() Set/Get the blend mode. The default mode is Composite where the scalar values are sampled through the volume and composited in a front-to-back scheme through alpha blending. The final color and opacity is determined using the color and opacity transfer functions. Maximum and minimum intensity blend modes use the maximum and minimum scalar values, respectively, along the sampling ray. The final color and opacity is determined by passing the resultant value through the color and opacity transfer functions. Additive blend mode accumulates scalar values by passing each value through the opacity transfer function and then adding up the product of the value and its opacity. In other words, the scalar values are scaled using the opacity transfer function and summed to derive the final color. Note that the resulting image is always grayscale i.e. aggregated values are not passed through the color transfer function. This is because the final value is a derived value and not a real data value along the sampling ray. Average intensity blend mode works similar to the additive blend mode where the scalar values are multiplied by opacity calculated from the opacity transfer function and then added. The additional step here is to divide the sum by the number of samples taken through the volume. One can control the scalar range by setting the AverageIPScalarRange ivar to disregard scalar values, not in the range of interest, from the average computation. As is the case with the additive intensity projection, the final image will always be grayscale i.e. the aggregated values are not passed through the color transfer function. This is because the resultant value is a derived value and not a real data value along the sampling ray. ote vtkVolumeMapper::AVERAGE_INTENSITY_BLEND is only supported by the vtkGPUVolumeRayCastMapper with the OpenGL2 backend. \sa SetAverageIPScalarRange() SetBlendModeToMinimumIntensityV.SetBlendModeToMinimumIntensity() C++: void SetBlendModeToMinimumIntensity() Set/Get the blend mode. The default mode is Composite where the scalar values are sampled through the volume and composited in a front-to-back scheme through alpha blending. The final color and opacity is determined using the color and opacity transfer functions. Maximum and minimum intensity blend modes use the maximum and minimum scalar values, respectively, along the sampling ray. The final color and opacity is determined by passing the resultant value through the color and opacity transfer functions. Additive blend mode accumulates scalar values by passing each value through the opacity transfer function and then adding up the product of the value and its opacity. In other words, the scalar values are scaled using the opacity transfer function and summed to derive the final color. Note that the resulting image is always grayscale i.e. aggregated values are not passed through the color transfer function. This is because the final value is a derived value and not a real data value along the sampling ray. Average intensity blend mode works similar to the additive blend mode where the scalar values are multiplied by opacity calculated from the opacity transfer function and then added. The additional step here is to divide the sum by the number of samples taken through the volume. One can control the scalar range by setting the AverageIPScalarRange ivar to disregard scalar values, not in the range of interest, from the average computation. As is the case with the additive intensity projection, the final image will always be grayscale i.e. the aggregated values are not passed through the color transfer function. This is because the resultant value is a derived value and not a real data value along the sampling ray. ote vtkVolumeMapper::AVERAGE_INTENSITY_BLEND is only supported by the vtkGPUVolumeRayCastMapper with the OpenGL2 backend. \sa SetAverageIPScalarRange() SetBlendModeToAverageIntensityV.SetBlendModeToAverageIntensity() C++: void SetBlendModeToAverageIntensity() Set/Get the blend mode. The default mode is Composite where the scalar values are sampled through the volume and composited in a front-to-back scheme through alpha blending. The final color and opacity is determined using the color and opacity transfer functions. Maximum and minimum intensity blend modes use the maximum and minimum scalar values, respectively, along the sampling ray. The final color and opacity is determined by passing the resultant value through the color and opacity transfer functions. Additive blend mode accumulates scalar values by passing each value through the opacity transfer function and then adding up the product of the value and its opacity. In other words, the scalar values are scaled using the opacity transfer function and summed to derive the final color. Note that the resulting image is always grayscale i.e. aggregated values are not passed through the color transfer function. This is because the final value is a derived value and not a real data value along the sampling ray. Average intensity blend mode works similar to the additive blend mode where the scalar values are multiplied by opacity calculated from the opacity transfer function and then added. The additional step here is to divide the sum by the number of samples taken through the volume. One can control the scalar range by setting the AverageIPScalarRange ivar to disregard scalar values, not in the range of interest, from the average computation. As is the case with the additive intensity projection, the final image will always be grayscale i.e. the aggregated values are not passed through the color transfer function. This is because the resultant value is a derived value and not a real data value along the sampling ray. ote vtkVolumeMapper::AVERAGE_INTENSITY_BLEND is only supported by the vtkGPUVolumeRayCastMapper with the OpenGL2 backend. \sa SetAverageIPScalarRange() SetBlendModeToAdditiveV.SetBlendModeToAdditive() C++: void SetBlendModeToAdditive() Set/Get the blend mode. The default mode is Composite where the scalar values are sampled through the volume and composited in a front-to-back scheme through alpha blending. The final color and opacity is determined using the color and opacity transfer functions. Maximum and minimum intensity blend modes use the maximum and minimum scalar values, respectively, along the sampling ray. The final color and opacity is determined by passing the resultant value through the color and opacity transfer functions. Additive blend mode accumulates scalar values by passing each value through the opacity transfer function and then adding up the product of the value and its opacity. In other words, the scalar values are scaled using the opacity transfer function and summed to derive the final color. Note that the resulting image is always grayscale i.e. aggregated values are not passed through the color transfer function. This is because the final value is a derived value and not a real data value along the sampling ray. Average intensity blend mode works similar to the additive blend mode where the scalar values are multiplied by opacity calculated from the opacity transfer function and then added. The additional step here is to divide the sum by the number of samples taken through the volume. One can control the scalar range by setting the AverageIPScalarRange ivar to disregard scalar values, not in the range of interest, from the average computation. As is the case with the additive intensity projection, the final image will always be grayscale i.e. the aggregated values are not passed through the color transfer function. This is because the resultant value is a derived value and not a real data value along the sampling ray. ote vtkVolumeMapper::AVERAGE_INTENSITY_BLEND is only supported by the vtkGPUVolumeRayCastMapper with the OpenGL2 backend. \sa SetAverageIPScalarRange() V.GetBlendMode() -> int C++: virtual int GetBlendMode() Set/Get the blend mode. The default mode is Composite where the scalar values are sampled through the volume and composited in a front-to-back scheme through alpha blending. The final color and opacity is determined using the color and opacity transfer functions. Maximum and minimum intensity blend modes use the maximum and minimum scalar values, respectively, along the sampling ray. The final color and opacity is determined by passing the resultant value through the color and opacity transfer functions. Additive blend mode accumulates scalar values by passing each value through the opacity transfer function and then adding up the product of the value and its opacity. In other words, the scalar values are scaled using the opacity transfer function and summed to derive the final color. Note that the resulting image is always grayscale i.e. aggregated values are not passed through the color transfer function. This is because the final value is a derived value and not a real data value along the sampling ray. Average intensity blend mode works similar to the additive blend mode where the scalar values are multiplied by opacity calculated from the opacity transfer function and then added. The additional step here is to divide the sum by the number of samples taken through the volume. One can control the scalar range by setting the AverageIPScalarRange ivar to disregard scalar values, not in the range of interest, from the average computation. As is the case with the additive intensity projection, the final image will always be grayscale i.e. the aggregated values are not passed through the color transfer function. This is because the resultant value is a derived value and not a real data value along the sampling ray. ote vtkVolumeMapper::AVERAGE_INTENSITY_BLEND is only supported by the vtkGPUVolumeRayCastMapper with the OpenGL2 backend. \sa SetAverageIPScalarRange() SetAverageIPScalarRangeV.SetAverageIPScalarRange(float, float) C++: void SetAverageIPScalarRange(double, double) V.SetAverageIPScalarRange((float, float)) C++: void SetAverageIPScalarRange(double a[2]) GetAverageIPScalarRangeV.GetAverageIPScalarRange() -> (float, float) C++: double *GetAverageIPScalarRange() Set/Get the scalar range to be considered for average intensity projection blend mode. Only scalar values between this range will be averaged during ray casting. This can be useful when volume rendering CT datasets where the areas occupied by air would deviate the final rendering. By default, the range is set to (VTK_DOUBLE_MIN, VTK_DOUBLE_MAX). \sa SetBlendModeToAverageIntensity() SetCroppingV.SetCropping(int) C++: virtual void SetCropping(int _arg) Turn On/Off orthogonal cropping. (Clipping planes are perpendicular to the coordinate axes.) GetCroppingMinValueV.GetCroppingMinValue() -> int C++: virtual int GetCroppingMinValue() Turn On/Off orthogonal cropping. (Clipping planes are perpendicular to the coordinate axes.) GetCroppingMaxValueV.GetCroppingMaxValue() -> int C++: virtual int GetCroppingMaxValue() Turn On/Off orthogonal cropping. (Clipping planes are perpendicular to the coordinate axes.) GetCroppingV.GetCropping() -> int C++: virtual int GetCropping() Turn On/Off orthogonal cropping. (Clipping planes are perpendicular to the coordinate axes.) CroppingOnV.CroppingOn() C++: virtual void CroppingOn() Turn On/Off orthogonal cropping. (Clipping planes are perpendicular to the coordinate axes.) CroppingOffV.CroppingOff() C++: virtual void CroppingOff() Turn On/Off orthogonal cropping. (Clipping planes are perpendicular to the coordinate axes.) SetCroppingRegionPlanesV.SetCroppingRegionPlanes(float, float, float, float, float, float) C++: void SetCroppingRegionPlanes(double, double, double, double, double, double) V.SetCroppingRegionPlanes((float, float, float, float, float, float)) C++: void SetCroppingRegionPlanes(double a[6]) GetCroppingRegionPlanesV.GetCroppingRegionPlanes() -> (float, float, float, float, float, float) C++: double *GetCroppingRegionPlanes() Set/Get the Cropping Region Planes ( xmin, xmax, ymin, ymax, zmin, zmax ) These planes are defined in volume coordinates - spacing and origin are considered. GetVoxelCroppingRegionPlanesV.GetVoxelCroppingRegionPlanes() -> (float, float, float, float, float, float) C++: double *GetVoxelCroppingRegionPlanes() Get the cropping region planes in voxels. Only valid during the rendering process SetCroppingRegionFlagsV.SetCroppingRegionFlags(int) C++: virtual void SetCroppingRegionFlags(int _arg) Set the flags for the cropping regions. The clipping planes divide the volume into 27 regions - there is one bit for each region. The regions start from the one containing voxel (0,0,0), moving along the x axis fastest, the y axis next, and the z axis slowest. These are represented from the lowest bit to bit number 27 in the integer containing the flags. There are several convenience functions to set some common configurations - subvolume (the default), fence (between any of the clip plane pairs), inverted fence, cross (between any two of the clip plane pairs) and inverted cross. GetCroppingRegionFlagsMinValueV.GetCroppingRegionFlagsMinValue() -> int C++: virtual int GetCroppingRegionFlagsMinValue() Set the flags for the cropping regions. The clipping planes divide the volume into 27 regions - there is one bit for each region. The regions start from the one containing voxel (0,0,0), moving along the x axis fastest, the y axis next, and the z axis slowest. These are represented from the lowest bit to bit number 27 in the integer containing the flags. There are several convenience functions to set some common configurations - subvolume (the default), fence (between any of the clip plane pairs), inverted fence, cross (between any two of the clip plane pairs) and inverted cross. GetCroppingRegionFlagsMaxValueV.GetCroppingRegionFlagsMaxValue() -> int C++: virtual int GetCroppingRegionFlagsMaxValue() Set the flags for the cropping regions. The clipping planes divide the volume into 27 regions - there is one bit for each region. The regions start from the one containing voxel (0,0,0), moving along the x axis fastest, the y axis next, and the z axis slowest. These are represented from the lowest bit to bit number 27 in the integer containing the flags. There are several convenience functions to set some common configurations - subvolume (the default), fence (between any of the clip plane pairs), inverted fence, cross (between any two of the clip plane pairs) and inverted cross. GetCroppingRegionFlagsV.GetCroppingRegionFlags() -> int C++: virtual int GetCroppingRegionFlags() Set the flags for the cropping regions. The clipping planes divide the volume into 27 regions - there is one bit for each region. The regions start from the one containing voxel (0,0,0), moving along the x axis fastest, the y axis next, and the z axis slowest. These are represented from the lowest bit to bit number 27 in the integer containing the flags. There are several convenience functions to set some common configurations - subvolume (the default), fence (between any of the clip plane pairs), inverted fence, cross (between any two of the clip plane pairs) and inverted cross. SetCroppingRegionFlagsToSubVolumeV.SetCroppingRegionFlagsToSubVolume() C++: void SetCroppingRegionFlagsToSubVolume() Set the flags for the cropping regions. The clipping planes divide the volume into 27 regions - there is one bit for each region. The regions start from the one containing voxel (0,0,0), moving along the x axis fastest, the y axis next, and the z axis slowest. These are represented from the lowest bit to bit number 27 in the integer containing the flags. There are several convenience functions to set some common configurations - subvolume (the default), fence (between any of the clip plane pairs), inverted fence, cross (between any two of the clip plane pairs) and inverted cross. SetCroppingRegionFlagsToFenceV.SetCroppingRegionFlagsToFence() C++: void SetCroppingRegionFlagsToFence() Set the flags for the cropping regions. The clipping planes divide the volume into 27 regions - there is one bit for each region. The regions start from the one containing voxel (0,0,0), moving along the x axis fastest, the y axis next, and the z axis slowest. These are represented from the lowest bit to bit number 27 in the integer containing the flags. There are several convenience functions to set some common configurations - subvolume (the default), fence (between any of the clip plane pairs), inverted fence, cross (between any two of the clip plane pairs) and inverted cross. SetCroppingRegionFlagsToInvertedFenceV.SetCroppingRegionFlagsToInvertedFence() C++: void SetCroppingRegionFlagsToInvertedFence() Set the flags for the cropping regions. The clipping planes divide the volume into 27 regions - there is one bit for each region. The regions start from the one containing voxel (0,0,0), moving along the x axis fastest, the y axis next, and the z axis slowest. These are represented from the lowest bit to bit number 27 in the integer containing the flags. There are several convenience functions to set some common configurations - subvolume (the default), fence (between any of the clip plane pairs), inverted fence, cross (between any two of the clip plane pairs) and inverted cross. SetCroppingRegionFlagsToCrossV.SetCroppingRegionFlagsToCross() C++: void SetCroppingRegionFlagsToCross() Set the flags for the cropping regions. The clipping planes divide the volume into 27 regions - there is one bit for each region. The regions start from the one containing voxel (0,0,0), moving along the x axis fastest, the y axis next, and the z axis slowest. These are represented from the lowest bit to bit number 27 in the integer containing the flags. There are several convenience functions to set some common configurations - subvolume (the default), fence (between any of the clip plane pairs), inverted fence, cross (between any two of the clip plane pairs) and inverted cross. SetCroppingRegionFlagsToInvertedCrossV.SetCroppingRegionFlagsToInvertedCross() C++: void SetCroppingRegionFlagsToInvertedCross() Set the flags for the cropping regions. The clipping planes divide the volume into 27 regions - there is one bit for each region. The regions start from the one containing voxel (0,0,0), moving along the x axis fastest, the y axis next, and the z axis slowest. These are represented from the lowest bit to bit number 27 in the integer containing the flags. There are several convenience functions to set some common configurations - subvolume (the default), fence (between any of the clip plane pairs), inverted fence, cross (between any two of the clip plane pairs) and inverted cross. @V *vtkImageDatavtkVolumeOutlineSourcevtkRenderingVolumePython.vtkVolumeOutlineSourcevtkVolumeOutlineSource - outline of volume cropping region Superclass: vtkPolyDataAlgorithm vtkVolumeOutlineSource generates a wireframe outline that corresponds to the cropping region of a vtkVolumeMapper. It requires a vtkVolumeMapper as input. The GenerateFaces option turns on the solid faces of the outline, and the GenerateScalars option generates color scalars. When GenerateScalars is on, it is possible to set an "ActivePlaneId" value in the range [0..6] to highlight one of the six cropping planes.@par Thanks: Thanks to David Gobbi for contributing this class to VTK. V.SafeDownCast(vtkObjectBase) -> vtkVolumeOutlineSource C++: static vtkVolumeOutlineSource *SafeDownCast(vtkObjectBase *o) V.NewInstance() -> vtkVolumeOutlineSource C++: vtkVolumeOutlineSource *NewInstance() SetVolumeMapperV.SetVolumeMapper(vtkVolumeMapper) C++: virtual void SetVolumeMapper(vtkVolumeMapper *mapper) Set the mapper that has the cropping region that the outline will be generated for. The mapper must have an input, because the bounds of the data must be computed in order to generate the outline. GetVolumeMapperV.GetVolumeMapper() -> vtkVolumeMapper C++: vtkVolumeMapper *GetVolumeMapper() Set the mapper that has the cropping region that the outline will be generated for. The mapper must have an input, because the bounds of the data must be computed in order to generate the outline. SetGenerateScalarsV.SetGenerateScalars(int) C++: virtual void SetGenerateScalars(int _arg) Set whether to generate color scalars for the output. By default, the output has no scalars and the color must be set in the property of the actor. GenerateScalarsOnV.GenerateScalarsOn() C++: virtual void GenerateScalarsOn() Set whether to generate color scalars for the output. By default, the output has no scalars and the color must be set in the property of the actor. GenerateScalarsOffV.GenerateScalarsOff() C++: virtual void GenerateScalarsOff() Set whether to generate color scalars for the output. By default, the output has no scalars and the color must be set in the property of the actor. GetGenerateScalarsV.GetGenerateScalars() -> int C++: virtual int GetGenerateScalars() Set whether to generate color scalars for the output. By default, the output has no scalars and the color must be set in the property of the actor. SetGenerateOutlineV.SetGenerateOutline(int) C++: virtual void SetGenerateOutline(int _arg) Set whether to generate an outline wherever an input face was cut by a plane. This is on by default. GenerateOutlineOnV.GenerateOutlineOn() C++: virtual void GenerateOutlineOn() Set whether to generate an outline wherever an input face was cut by a plane. This is on by default. GenerateOutlineOffV.GenerateOutlineOff() C++: virtual void GenerateOutlineOff() Set whether to generate an outline wherever an input face was cut by a plane. This is on by default. GetGenerateOutlineV.GetGenerateOutline() -> int C++: virtual int GetGenerateOutline() Set whether to generate an outline wherever an input face was cut by a plane. This is on by default. SetGenerateFacesV.SetGenerateFaces(int) C++: virtual void SetGenerateFaces(int _arg) Set whether to generate polygonal faces for the output. By default, only lines are generated. The faces will form a closed, watertight surface. GenerateFacesOnV.GenerateFacesOn() C++: virtual void GenerateFacesOn() Set whether to generate polygonal faces for the output. By default, only lines are generated. The faces will form a closed, watertight surface. GenerateFacesOffV.GenerateFacesOff() C++: virtual void GenerateFacesOff() Set whether to generate polygonal faces for the output. By default, only lines are generated. The faces will form a closed, watertight surface. GetGenerateFacesV.GetGenerateFaces() -> int C++: virtual int GetGenerateFaces() Set whether to generate polygonal faces for the output. By default, only lines are generated. The faces will form a closed, watertight surface. SetColorV.SetColor(float, float, float) C++: void SetColor(double, double, double) V.SetColor((float, float, float)) C++: void SetColor(double a[3]) GetColorV.GetColor() -> (float, float, float) C++: double *GetColor() SetActivePlaneIdV.SetActivePlaneId(int) C++: virtual void SetActivePlaneId(int _arg) Set the active plane, e.g. to display which plane is currently being modified by an interaction. Set this to -1 if there is no active plane. The default value is -1. GetActivePlaneIdV.GetActivePlaneId() -> int C++: virtual int GetActivePlaneId() Set the active plane, e.g. to display which plane is currently being modified by an interaction. Set this to -1 if there is no active plane. The default value is -1. SetActivePlaneColorV.SetActivePlaneColor(float, float, float) C++: void SetActivePlaneColor(double, double, double) V.SetActivePlaneColor((float, float, float)) C++: void SetActivePlaneColor(double a[3]) GetActivePlaneColorV.GetActivePlaneColor() -> (float, float, float) C++: double *GetActivePlaneColor() vtkPolyDataAlgorithmvtkVolumePickervtkRenderingVolumePython.vtkVolumePickervtkVolumePicker - ray-cast picker enhanced for volumes Superclass: vtkCellPicker vtkVolumePicker is a subclass of vtkCellPicker. It has one advantage over vtkCellPicker for volumes: it will be able to correctly perform picking when CroppingPlanes are present. This isn't possible for vtkCellPicker since it doesn't link to the VolumeRendering classes and hence cannot access information about the CroppingPlanes. @sa vtkPicker vtkPointPicker vtkCellPicker @par Thanks: This class was contributed to VTK by David Gobbi on behalf of Atamai Inc. V.SafeDownCast(vtkObjectBase) -> vtkVolumePicker C++: static vtkVolumePicker *SafeDownCast(vtkObjectBase *o) V.NewInstance() -> vtkVolumePicker C++: vtkVolumePicker *NewInstance() SetPickCroppingPlanesV.SetPickCroppingPlanes(int) C++: virtual void SetPickCroppingPlanes(int _arg) Set whether to pick the cropping planes of props that have them. If this is set, then the pick will be done on the cropping planes rather than on the data. The GetCroppingPlaneId() method will return the index of the cropping plane of the volume that was picked. This setting is only relevant to the picking of volumes. PickCroppingPlanesOnV.PickCroppingPlanesOn() C++: virtual void PickCroppingPlanesOn() Set whether to pick the cropping planes of props that have them. If this is set, then the pick will be done on the cropping planes rather than on the data. The GetCroppingPlaneId() method will return the index of the cropping plane of the volume that was picked. This setting is only relevant to the picking of volumes. PickCroppingPlanesOffV.PickCroppingPlanesOff() C++: virtual void PickCroppingPlanesOff() Set whether to pick the cropping planes of props that have them. If this is set, then the pick will be done on the cropping planes rather than on the data. The GetCroppingPlaneId() method will return the index of the cropping plane of the volume that was picked. This setting is only relevant to the picking of volumes. GetPickCroppingPlanesV.GetPickCroppingPlanes() -> int C++: virtual int GetPickCroppingPlanes() Set whether to pick the cropping planes of props that have them. If this is set, then the pick will be done on the cropping planes rather than on the data. The GetCroppingPlaneId() method will return the index of the cropping plane of the volume that was picked. This setting is only relevant to the picking of volumes. GetCroppingPlaneIdV.GetCroppingPlaneId() -> int C++: virtual int GetCroppingPlaneId() Get the index of the cropping plane that the pick ray passed through on its way to the prop. This will be set regardless of whether PickCroppingPlanes is on. The crop planes are ordered as follows: xmin, xmax, ymin, ymax, zmin, zmax. If the volume is not cropped, the value will bet set to -1. vtkCellPickervtkPickervtkAbstractPropPickervtkAbstractPickervtkVolumeRayCastSpaceLeapingImageFiltervtkRenderingVolumePython.vtkVolumeRayCastSpaceLeapingImageFiltervtkVolumeRayCastSpaceLeapingImageFilter - Builds the space leaping data structure. Superclass: vtkThreadedImageAlgorithm This is an optimized multi-threaded imaging filter that builds the space leaping datastructure, used by vtkFixedPointVolumeRayCastMapper. Empty space leaping is used to skip large empty regions in the scalar opacity and/or the gradient opacity transfer functions. Depending on the various options set by vtkFixedPointVolumeRayCastMapper, the class will internally invoke one of the many optmized routines to compute the min/max/gradient-max values within a fixed block size, trying to compute everything in a single multi-threaded pass through the data The block size may be changed at compile time. Its ifdef'ed to 4 in the CXX file. V.SafeDownCast(vtkObjectBase) -> vtkVolumeRayCastSpaceLeapingImageFilter C++: static vtkVolumeRayCastSpaceLeapingImageFilter *SafeDownCast( vtkObjectBase *o) V.NewInstance() -> vtkVolumeRayCastSpaceLeapingImageFilter C++: vtkVolumeRayCastSpaceLeapingImageFilter *NewInstance() SetCurrentScalarsV.SetCurrentScalars(vtkDataArray) C++: virtual void SetCurrentScalars(vtkDataArray *) Set the scalars. V.GetCurrentScalars() -> vtkDataArray C++: virtual vtkDataArray *GetCurrentScalars() Set the scalars. SetIndependentComponentsV.SetIndependentComponents(int) C++: virtual void SetIndependentComponents(int _arg) Do we use independent components, or dependent components ? GetIndependentComponentsV.GetIndependentComponents() -> int C++: virtual int GetIndependentComponents() Do we use independent components, or dependent components ? SetComputeGradientOpacityV.SetComputeGradientOpacity(int) C++: virtual void SetComputeGradientOpacity(int _arg) Compute gradient opacity ? GetComputeGradientOpacityV.GetComputeGradientOpacity() -> int C++: virtual int GetComputeGradientOpacity() Compute gradient opacity ? ComputeGradientOpacityOnV.ComputeGradientOpacityOn() C++: virtual void ComputeGradientOpacityOn() Compute gradient opacity ? ComputeGradientOpacityOffV.ComputeGradientOpacityOff() C++: virtual void ComputeGradientOpacityOff() Compute gradient opacity ? SetComputeMinMaxV.SetComputeMinMax(int) C++: virtual void SetComputeMinMax(int _arg) Compute the min max structure ?. GetComputeMinMaxV.GetComputeMinMax() -> int C++: virtual int GetComputeMinMax() Compute the min max structure ?. ComputeMinMaxOnV.ComputeMinMaxOn() C++: virtual void ComputeMinMaxOn() Compute the min max structure ?. ComputeMinMaxOffV.ComputeMinMaxOff() C++: virtual void ComputeMinMaxOff() Compute the min max structure ?. SetUpdateGradientOpacityFlagsV.SetUpdateGradientOpacityFlags(int) C++: virtual void SetUpdateGradientOpacityFlags(int _arg) Update the gradient opacity flags. (The scalar opacity flags are always updated upon execution of this filter.) GetUpdateGradientOpacityFlagsV.GetUpdateGradientOpacityFlags() -> int C++: virtual int GetUpdateGradientOpacityFlags() Update the gradient opacity flags. (The scalar opacity flags are always updated upon execution of this filter.) UpdateGradientOpacityFlagsOnV.UpdateGradientOpacityFlagsOn() C++: virtual void UpdateGradientOpacityFlagsOn() Update the gradient opacity flags. (The scalar opacity flags are always updated upon execution of this filter.) UpdateGradientOpacityFlagsOffV.UpdateGradientOpacityFlagsOff() C++: virtual void UpdateGradientOpacityFlagsOff() Update the gradient opacity flags. (The scalar opacity flags are always updated upon execution of this filter.) GetLastMinMaxBuildTimeV.GetLastMinMaxBuildTime() -> int C++: vtkMTimeType GetLastMinMaxBuildTime() Get the last execution time. This is updated every time the scalars or the gradient opacity values are computed GetLastMinMaxFlagTimeV.GetLastMinMaxFlagTime() -> int C++: vtkMTimeType GetLastMinMaxFlagTime() Get the last execution time. This is updated every time the flags bits are re-computed. SetTableShiftV.SetTableShift(float, float, float, float) C++: void SetTableShift(float, float, float, float) V.SetTableShift((float, float, float, float)) C++: void SetTableShift(float a[4]) SetTableScaleV.SetTableScale(float, float, float, float) C++: void SetTableScale(float, float, float, float) V.SetTableScale((float, float, float, float)) C++: void SetTableScale(float a[4]) SetTableSizeV.SetTableSize(int, int, int, int) C++: void SetTableSize(int, int, int, int) V.SetTableSize((int, int, int, int)) C++: void SetTableSize(int a[4]) GetTableSizeV.GetTableSize() -> (int, int, int, int) C++: int *GetTableSize() GetNumberOfIndependentComponentsV.GetNumberOfIndependentComponents() -> int C++: int GetNumberOfIndependentComponents() Get the number of independent components for which we need to keep track of min/max GetMinMaxVolumeV.GetMinMaxVolume([int, int, int, int]) -> (int, ...) C++: unsigned short *GetMinMaxVolume(int dims[4]) Get the raw pointer to the final computed space leaping datastructure. The result is only valid after Update() has been called on the filter. Note that this filter holds onto its memory. The dimensions of the min- max volume are in dims. The 4th value in the array indicates the number of independent components, (also queried via GetNumberOfIndependentComponents()) SetCacheV.SetCache(vtkImageData) C++: virtual void SetCache(vtkImageData *imageCache) INTERNAL - Do not use Set the last cached min-max volume, as used by vtkFixedPointVolumeRayCastMapper. ComputeInputExtentsForOutputV.ComputeInputExtentsForOutput([int, int, int, int, int, int], [int, int, int], [int, int, int, int, int, int], vtkImageData) C++: static void ComputeInputExtentsForOutput(int inExt[6], int inDim[3], int outExt[6], vtkImageData *inData) Compute the extents and dimensions of the input that's required to generate an output min-max structure given by outExt. INTERNAL - Do not use GetMinNonZeroScalarIndexV.GetMinNonZeroScalarIndex() -> (int, ...) C++: unsigned short *GetMinNonZeroScalarIndex() Get the first non-zero scalar opacity and gradient opacity indices for each independent copmonent INTERNAL - Do not use. GetMinNonZeroGradientMagnitudeIndexV.GetMinNonZeroGradientMagnitudeIndex() -> (int, ...) C++: unsigned char *GetMinNonZeroGradientMagnitudeIndex() Get the first non-zero scalar opacity and gradient opacity indices for each independent copmonent INTERNAL - Do not use. SetScalarOpacityTableV.SetScalarOpacityTable(int, [int, ...]) C++: void SetScalarOpacityTable(int c, unsigned short *t) Set the scalar opacity and gradient opacity tables computed for each component by the vtkFixedPointVolumeRayCastMapper SetGradientOpacityTableV.SetGradientOpacityTable(int, [int, ...]) C++: void SetGradientOpacityTable(int c, unsigned short *t) Set the scalar opacity and gradient opacity tables computed for each component by the vtkFixedPointVolumeRayCastMapper ComputeOffsetV.ComputeOffset((int, int, int, int, int, int), (int, int, int, int, int, int), int) -> int C++: vtkIdType ComputeOffset(const int ext[6], const int wholeExt[6], int nComponents) INTERNAL - Do not use Compute the offset within an image of whole extents wholeExt, to access the data starting at extents ext. vtkThreadedImageAlgorithmvtkImageAlgorithmreal_initvtkRenderingVolumePythoncan't get dictionary for module vtkRenderingVolumePythonvtkRenderingVolumePython??@?Y@?B=D?/Aoo   x   }^1    S Ymm8<!aXQX @TS X60p666 6P60^ 7@c87P7h7 PPP  P 0 @@P`pP !!$$)*`+`,--/p11P3`46 7`9p:<=DDpE FFGGIJKPLL@MMO`QRSUU0WW`Y[\`^ _`@bbcefg@hhijlmnnpopPrsspt@upv xPyy@z{@|} Ё`@pЊp` @@@СТ@`` `PPp@P00@0`@PP`P  0 0  p0 `!"p#`$ %&&' *++,`/ 00134679:==@F`G@HHIJJpK L0MNO@PPQR@T UVWXYY@ZP[]^_0``cddejopppqrPtvxyypz {{||~ @00О@УХpppPP@`@`@``p `P0 ppPP@P `  @@`@0p  0  PPp0pp "##p$0'(p),-/11244`5@6:;p= >>ACPFGGKOpOPP RSTPU0VPX0YP[0\P^p`bbdePhhiklm`ppPq`rPsuuvpw xy {| ~`Њ0@pЙ`@^ *4>HR\fpz*4>Hƙ  R\fpzʟԟޟ$.8BLV`jt~Йڙ  *4>HR\fpzʚԚޚ$.8BLV`jt~ěΛ؛ (2<FPZdnxȜҜܜ",6@JT^hr|̝֝&0:DNXblvƞОڞĠΠؠHD-T]ǣ0ˣ.@;ͤ٤EYP  ͦSkHDTǣ0.@ ͤ.H۲p0- FҴ w / EP   ,p¸jtIP@)?KPa׼ѽþt0I]<r0"M]x0` V !i!"@#$!%4%op&z '' (YP)hHD"T@+ǣ+.-ͤ-./0@1P2o"3I3.4 P5`6Z@7v 8*9s9+:G;<P=>.XHDT?ǣ@.Aͤ`BN`C@DHDTEǣF.GMͤPH2PI;+J@pK0L-M@]`NoO fPuRtRSpTU&U5W$W50YYZ ["OP\]z ]  ]  p^  @_&  _ gHDTpaǣ@b.pcqͤ d! eHD=T@gǣh.@iͤijHDTmǣm.oͤo1pHD%Trǣs.tͤuv*HDdTxǣ`y.pzͤ {- |h HD T0~ǣ.0B"ͤ"#HD#Tǣ.+ͤ`+,`*, -P2-#.@.0,0t1x33$5C5`6M8 k8@9@f9F:l:L;j;:<X<-=S=3>`Y>9? W?'@D@[BBDDFPGI4I>K[KgMpMOpOQ RT;T@VcV`XPX׼Zv[e\P\b]P]r^^__z`p`ia0a[b~b8e?ee fffg0g}h hi0/iii?jUjjjkkhllmmmpnan nnnnooQp0kp#q;qqqIrPWrrrssqspss sFtStttt`u_u wuuuv4vvPvw ww`w;x axxxzz-{PD{|'||`|:}`J}|}}} ~3~`G~9P30G , چ̈@/! `HDT@ǣ.\ͤ'@A[BaD`F Iґ>KgM@O@GQT@V`?`X ћx@N]+` ΢` Ĥ ܤ ʦ m@ }E _,X -dt@1x3`3$5 C56M8k8@9f9F:l:L;@j;:<X<-= S=3>Y>9?W?3`GP, چkWk>`S 2@!""ʹ_#q`$3~P%(Ⱥ"(/p)HP*T++ , 0-&-9.Lp/5P0F11-p2yP3_4a4f 5S5k6@7 68N}898eP:SP;]@<==Op> P?Y@s@.AJpBJHD|TDǣD.EtͤF8eGUHD=TpIǣ J.0KPͤKALSM,pN?@O  pP  HD6 T0RǣR.St ͤT e Us 0W9 XXX3YJ@Z[ S[a\`]0^I@czHD T@i]ǣjˣ.@kͤk^ElPn0o0Sop@ qZ #Pr#&s'J*HD*Tt]ǣuˣ.v+ͤ`wG,E`xyz0S`{-HD^-T}ǣ`~.E4ͤ@4f5@q56@6~6677;8P8@8_98:HD}:Tǣ.;ͤ`h<f5`<=`=[@Px@kA0ABHDCTАǣ.В7EͤEf5<==TFpaFIIP!J! JCJHDJTpǣ@.pLͤ nMf5 <= =TFaFI@M2O`>OPP!J ! JQHD!RT@ǣ.@TͤtUf5<==UUVVWpXXPXuYY7Z[ZZ[[[x\\#]P@]}^^__-aJabPb epe(f=fgHDhTPǣ.iͤjjEkNkkkll^l }lll8epm`/0mnHnHDnTǣ.oͤ@pf5@q60iq~6qrHD3sTǣP.`tͤ0uf5uBvWv`yjzWzszzz{HD{T0ǣ.|ͤL}t}x3~$5@BM8`@9 4F:L;:<`-=3>@t9?S'@A[BaDpF0Iґ>K׼P"0܉e\\b]]r^^_P_z``iaapgPz;LDU8e?e/pJ0̎hHDT`ǣ.  ͤf5&=֒HDTǣ0 .@ dͤ t }x3 ~$5 BM8P`@9p4F:0L;:<-=3>t9?PS'@A[BaDFI0ґ>Ke\\b]]r^P^__z``iaa}0ߚ; LD`!U8e"#J#$̎/ cHDT)ǣ`*.p+ͤ , -CEk`-k.Dl.^l/Y 0x40S`1&l2x2EP4]55A66M7Y088!99Rp<j0=p=%>D? `@eA(AFPBBeC8e0Dm E/E4FnTHǣI.JͤpKSpLPMMN O@PPyQ/RB@SSTmU~L@V]1 V:  Y  Y#  Z"  `[  ] i HD Taǣb.cͤPd(qPe0f.fPgfPhHDTjǣk.kͤl7mIqsn0o3pmpq  Pr !s!!s!"t"t"Pu""v"M#vk#<$wZ$&%PxC%&y%&&y''`z'|({(q`}q>) ~L)IrWr*@ **p**0++Є+-@-l. .0`(00 "1 2#2233@45P!Q"`#F`ATFppRASASASASASASASAp8ppRASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASAp8ppRASASASASASASASASASASASASASASASASASASASASASASAp8ppRASASASASASAp8ppRASASASASASASASASASASASASASASASASASASASASASASASASASASASASASAp8ppRASASASASAp8ppRASASASASAp8ppRASASASASAp8ppRASASASASAp8ppRASASASASAp8ppRASASASASAp8ppRASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASAp8pppRASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASAp8ppRASASASASAp8ppRASASASASASASASASAp8ppRASASASASASASASASASASASASASAp(p8ppRASASASASASASASASASASASAp8ppRASASASASASASASAp8ppRASASASASASASASASASASAp8ppRASASASASASASASAp8ppRASASASASASASASAp(p8ppRASASASASASASASASASAp(p8ppRASASASASASASASASASASASASASASASASASASASASASAp8ppRASASASASASASASASASASASAp(p8ppRASASASASASASAp8ppRASASASASASASASASASAp8ppRASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASAp8ppRASASASASASAp8ppRASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASAp8pppRASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASAp(p RASASASASASASASASASASASASASASASASASASASASASASASAp8ppRASASASASASASASASAp8ppRASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASASApHpQ>@_PyLong_TypeQqrt@_PyObject_Free@_PyObject_GC_Delqr   @_PyObject_GenericGetAttrqr   @_PyObject_GenericSetAttrqr   @_PyType_Typeq r     @__Py_NoneStructq`@__ZN28vtkSphericalDirectionEncoder20DecodedGradientTableE@__ZN40vtkUnstructuredGridPartialPreIntegration8PsiTableE @_PyVTKObject_AsBufferr   @_PyVTKObject_Deleteq0r    @_PyVTKObject_GetSetq8r   @_PyVTKObject_Newq@r   @_PyVTKObject_ReprqHr    @_PyVTKObject_StringqPr   @_PyVTKObject_TraverseqXr   @__ZN13vtkPythonArgs5ArrayIdEC1El@__ZN13vtkPythonArgs5ArrayIfEC1El@__ZN13vtkPythonArgs5ArrayIhEC1El@__ZN13vtkPythonArgs5ArrayIiEC1El@__ZN13vtkPythonArgs5ArrayIjEC1El@__ZN13vtkPythonArgs5ArrayItEC1El &@__ZN20vtkDebugLeaksManagerD1Evqh@__ZN31vtkObjectFactoryRegistryCleanupD1Ev (@__ZdaPvr @___gxx_personality_v0q )@___stack_chk_guard@dyld_stub_binder@__ZN13vtkPythonArgs5ArrayIdEC1ElQr@__ZN13vtkPythonArgs5ArrayIfEC1El@__ZN13vtkPythonArgs5ArrayIhEC1El@__ZN13vtkPythonArgs5ArrayIiEC1El@__ZN13vtkPythonArgs5ArrayIjEC1El@__ZN13vtkPythonArgs5ArrayItEC1El@__ZdaPv r>@_PyBool_FromLongr>@_PyDict_SetItemStringr>@_PyErr_Occurredr>@_PyFloat_FromDoubler >@_PyLong_FromLongr(>@_PyLong_FromLongLongr0>@_PyLong_FromUnsignedLongr8>@_PyModule_Create2r@>@_PyModule_GetDictrH>@_PyType_ReadyrP>@_PyUnicode_FromStringrX @_PyVTKClass_Addr` @_PyVTKObject_Checkrh @_PyVTKObject_GetObjectrp @_PyVTKObject_SetFlagrx>@_Py_BuildValuer@_PyvtkAbstractVolumeMapper_ClassNewr@_PyvtkCellPicker_ClassNewr @_PyvtkObject_ClassNewr@_PyvtkPolyDataAlgorithm_ClassNewr@_PyvtkThreadedImageAlgorithm_ClassNewr>@__Py_Deallocr>@__Py_FatalErrorFuncr )@__Unwind_Resumer &@__ZN13vtkObjectBase8IsTypeOfEPKcr @__ZN13vtkPythonArgs10BuildTupleEPKdir @__ZN13vtkPythonArgs10BuildTupleEPKfir @__ZN13vtkPythonArgs10BuildTupleEPKiir @__ZN13vtkPythonArgs10GetArgSizeEir @__ZN13vtkPythonArgs11SetArgValueEiir @__ZN13vtkPythonArgs13ArgCountErrorEiPKcr @__ZN13vtkPythonArgs13ArgCountErrorEiir @__ZN13vtkPythonArgs16PureVirtualErrorEvr @__ZN13vtkPythonArgs17GetArgAsVTKObjectEPKcRbr @__ZN13vtkPythonArgs19GetSelfFromFirstArgEP7_objectS1_r @__ZN13vtkPythonArgs8GetArrayEPdir @__ZN13vtkPythonArgs8GetArrayEPfir @__ZN13vtkPythonArgs8GetArrayEPhir @__ZN13vtkPythonArgs8GetArrayEPiir @__ZN13vtkPythonArgs8GetArrayEPjir @__ZN13vtkPythonArgs8GetArrayEPtir @__ZN13vtkPythonArgs8GetValueERPcr @__ZN13vtkPythonArgs8GetValueERbr @__ZN13vtkPythonArgs8GetValueERdr @__ZN13vtkPythonArgs8GetValueERfr @__ZN13vtkPythonArgs8GetValueERir @__ZN13vtkPythonArgs8GetValueERtr @__ZN13vtkPythonArgs8GetValueERxr @__ZN13vtkPythonArgs8SetArrayEiPKdir @__ZN13vtkPythonArgs8SetArrayEiPKfir @__ZN13vtkPythonArgs8SetArrayEiPKhir @__ZN13vtkPythonArgs8SetArrayEiPKiir @__ZN13vtkPythonArgs8SetArrayEiPKjir @__ZN13vtkPythonArgs8SetArrayEiPKtir @__ZN13vtkPythonUtil12AddEnumToMapEP11_typeobjectr @__ZN13vtkPythonUtil13ManglePointerEPKvPKcr @__ZN13vtkPythonUtil20GetObjectFromPointerEP13vtkObjectBaser@__ZN15vtkVolumeMapper12SetInputDataEP10vtkDataSetr@__ZN15vtkVolumeMapper12SetInputDataEP12vtkImageDatar@__ZN15vtkVolumeMapper8GetInputEvr@__ZN15vtkVolumePicker3NewEvr @__ZN17vtkPythonOverload10CallMethodEP11PyMethodDefP7_objectS3_r &@__ZN20vtkDebugLeaksManagerC1Evr@__ZN22vtkVolumeOutlineSource15SetVolumeMapperEP15vtkVolumeMapperr@__ZN22vtkVolumeOutlineSource3NewEvr@__ZN24vtkEncodedGradientShader18UpdateShadingTableEP11vtkRendererP9vtkVolumeP27vtkEncodedGradientEstimatorr@__ZN24vtkEncodedGradientShader25GetRedDiffuseShadingTableEP9vtkVolumer@__ZN24vtkEncodedGradientShader26GetBlueDiffuseShadingTableEP9vtkVolumer@__ZN24vtkEncodedGradientShader26GetRedSpecularShadingTableEP9vtkVolumer@__ZN24vtkEncodedGradientShader27GetBlueSpecularShadingTableEP9vtkVolumer@__ZN24vtkEncodedGradientShader27GetGreenDiffuseShadingTableEP9vtkVolumer@__ZN24vtkEncodedGradientShader28GetGreenSpecularShadingTableEP9vtkVolumer@__ZN24vtkEncodedGradientShader3NewEvr@__ZN24vtkOSPRayVolumeInterface3NewEvr@__ZN24vtkOSPRayVolumeInterface6RenderEP11vtkRendererP9vtkVolumer@__ZN25vtkFixedPointRayCastImage10ClearImageEvr@__ZN25vtkFixedPointRayCastImage13AllocateImageEvr@__ZN25vtkFixedPointRayCastImage15AllocateZBufferEvr@__ZN25vtkFixedPointRayCastImage15GetZBufferValueEiir@__ZN25vtkFixedPointRayCastImage3NewEvr@__ZN25vtkGPUVolumeRayCastMapper12SetMaskInputEP12vtkImageDatar@__ZN25vtkGPUVolumeRayCastMapper19CreateCanonicalViewEP11vtkRendererP9vtkVolumeP12vtkImageDataiPdS6_r@__ZN25vtkGPUVolumeRayCastMapper19SetMaskTypeToBinaryEvr@__ZN25vtkGPUVolumeRayCastMapper21SetMaskTypeToLabelMapEvr@__ZN25vtkGPUVolumeRayCastMapper25GetDepthPassContourValuesEvr@__ZN25vtkGPUVolumeRayCastMapper30SetDepthImageScalarTypeToFloatEvr@__ZN25vtkGPUVolumeRayCastMapper37SetDepthImageScalarTypeToUnsignedCharEvr@__ZN25vtkGPUVolumeRayCastMapper38SetDepthImageScalarTypeToUnsignedShortEvr@__ZN25vtkGPUVolumeRayCastMapper3NewEvr@__ZN25vtkGPUVolumeRayCastMapper6RenderEP11vtkRendererP9vtkVolumer@__ZN27vtkEncodedGradientEstimator12SetInputDataEP12vtkImageDatar@__ZN27vtkEncodedGradientEstimator17GetEncodedNormalsEvr@__ZN27vtkEncodedGradientEstimator19SetDirectionEncoderEP19vtkDirectionEncoderr@__ZN27vtkEncodedGradientEstimator21GetEncodedNormalIndexEiiir@__ZN27vtkEncodedGradientEstimator21GetEncodedNormalIndexExr@__ZN27vtkEncodedGradientEstimator21GetGradientMagnitudesEvr@__ZN27vtkEncodedGradientEstimator22SetZeroNormalThresholdEfr@__ZN27vtkEncodedGradientEstimator6UpdateEvr@__ZN28vtkProjectedTetrahedraMapper15TransformPointsEP9vtkPointsPKfS3_P13vtkFloatArrayr@__ZN28vtkProjectedTetrahedraMapper17SetVisibilitySortEP17vtkVisibilitySortr@__ZN28vtkProjectedTetrahedraMapper18MapScalarsToColorsEP12vtkDataArrayP17vtkVolumePropertyS1_r@__ZN28vtkProjectedTetrahedraMapper3NewEvr@__ZN28vtkRayCastImageDisplayHelper3NewEvr@__ZN28vtkSphericalDirectionEncoder18GetDecodedGradientEir@__ZN28vtkSphericalDirectionEncoder19GetEncodedDirectionEPfr@__ZN28vtkSphericalDirectionEncoder3NewEvr &@__ZN31vtkObjectFactoryRegistryCleanupC1Evr@__ZN31vtkUnstructuredGridVolumeMapper12SetInputDataEP10vtkDataSetr@__ZN31vtkUnstructuredGridVolumeMapper12SetInputDataEP23vtkUnstructuredGridBaser@__ZN31vtkUnstructuredGridVolumeMapper8GetInputEvr@__ZN32vtkFixedPointVolumeRayCastHelper3NewEvr@__ZN32vtkFixedPointVolumeRayCastMapper11AbortRenderEvr@__ZN32vtkFixedPointVolumeRayCastMapper14ComputeRayInfoEiiPjS0_S0_r@__ZN32vtkFixedPointVolumeRayCastMapper15RenderSubVolumeEvr@__ZN32vtkFixedPointVolumeRayCastMapper15SetRayCastImageEP25vtkFixedPointRayCastImager@__ZN32vtkFixedPointVolumeRayCastMapper17InitializeRayInfoEP9vtkVolumer@__ZN32vtkFixedPointVolumeRayCastMapper18GetNumberOfThreadsEvr@__ZN32vtkFixedPointVolumeRayCastMapper18RetrieveRenderTimeEP11vtkRendererr@__ZN32vtkFixedPointVolumeRayCastMapper18RetrieveRenderTimeEP11vtkRendererP9vtkVolumer@__ZN32vtkFixedPointVolumeRayCastMapper18SetNumberOfThreadsEir@__ZN32vtkFixedPointVolumeRayCastMapper19CreateCanonicalViewEP9vtkVolumeP12vtkImageDataiPdS4_r@__ZN32vtkFixedPointVolumeRayCastMapper20DisplayRenderedImageEP11vtkRendererP9vtkVolumer@__ZN32vtkFixedPointVolumeRayCastMapper22PerImageInitializationEP11vtkRendererP9vtkVolumeiPdS4_Pir@__ZN32vtkFixedPointVolumeRayCastMapper23PerVolumeInitializationEP11vtkRendererP9vtkVolumer@__ZN32vtkFixedPointVolumeRayCastMapper24ReleaseGraphicsResourcesEP9vtkWindowr@__ZN32vtkFixedPointVolumeRayCastMapper26PerSubVolumeInitializationEP11vtkRendererP9vtkVolumeir@__ZN32vtkFixedPointVolumeRayCastMapper34ComputeRequiredImageSampleDistanceEfP11vtkRendererr@__ZN32vtkFixedPointVolumeRayCastMapper34ComputeRequiredImageSampleDistanceEfP11vtkRendererP9vtkVolumer@__ZN32vtkFixedPointVolumeRayCastMapper37ShouldUseNearestNeighborInterpolationEP9vtkVolumer@__ZN32vtkFixedPointVolumeRayCastMapper3NewEvr@__ZN32vtkFixedPointVolumeRayCastMapper6RenderEP11vtkRendererP9vtkVolumer@__ZN33vtkUnstructuredGridPreIntegration10InitializeEP9vtkVolumeP12vtkDataArrayr@__ZN33vtkUnstructuredGridPreIntegration13SetIntegratorEP38vtkUnstructuredGridVolumeRayIntegratorr@__ZN33vtkUnstructuredGridPreIntegration22GetPreIntegrationTableEir@__ZN33vtkUnstructuredGridPreIntegration30GetIntegrationTableLengthScaleEvr@__ZN33vtkUnstructuredGridPreIntegration30GetIntegrationTableScalarScaleEir@__ZN33vtkUnstructuredGridPreIntegration30GetIntegrationTableScalarShiftEir@__ZN33vtkUnstructuredGridPreIntegration3NewEvr @__ZN33vtkUnstructuredGridPreIntegration9IntegrateEP14vtkDoubleArrayP12vtkDataArrayS3_Pfr @__ZN34vtkRecursiveSphereDirectionEncoder18GetDecodedGradientEir @__ZN34vtkRecursiveSphereDirectionEncoder19GetEncodedDirectionEPfr @__ZN34vtkRecursiveSphereDirectionEncoder23GetDecodedGradientTableEvr @__ZN34vtkRecursiveSphereDirectionEncoder28GetNumberOfEncodedDirectionsEvr @__ZN34vtkRecursiveSphereDirectionEncoder3NewEvr @__ZN35vtkFixedPointVolumeRayCastMIPHelper13GenerateImageEiiP9vtkVolumeP32vtkFixedPointVolumeRayCastMapperr @__ZN35vtkFixedPointVolumeRayCastMIPHelper3NewEvr @__ZN36vtkFiniteDifferenceGradientEstimator3NewEvr @__ZN37vtkUnstructuredGridVolumeZSweepMapper16SetRayIntegratorEP38vtkUnstructuredGridVolumeRayIntegratorr @__ZN37vtkUnstructuredGridVolumeZSweepMapper19GetMaxPixelListSizeEvr @__ZN37vtkUnstructuredGridVolumeZSweepMapper19SetMaxPixelListSizeEir @__ZN37vtkUnstructuredGridVolumeZSweepMapper3NewEvr @__ZN37vtkUnstructuredGridVolumeZSweepMapper6RenderEP11vtkRendererP9vtkVolumer @__ZN38vtkUnstructuredGridLinearRayIntegrator10InitializeEP9vtkVolumeP12vtkDataArrayr @__ZN38vtkUnstructuredGridLinearRayIntegrator12IntegrateRayEdPKddS1_dPfr @__ZN38vtkUnstructuredGridLinearRayIntegrator12IntegrateRayEdddddPfr @__ZN38vtkUnstructuredGridLinearRayIntegrator3NewEvr @__ZN38vtkUnstructuredGridLinearRayIntegrator3PsiEfffr @__ZN38vtkUnstructuredGridLinearRayIntegrator9IntegrateEP14vtkDoubleArrayP12vtkDataArrayS3_Pfr @__ZN38vtkUnstructuredGridVolumeRayCastMapper16SetRayIntegratorEP38vtkUnstructuredGridVolumeRayIntegratorr @__ZN38vtkUnstructuredGridVolumeRayCastMapper18SetRayCastFunctionEP40vtkUnstructuredGridVolumeRayCastFunctionr @__ZN38vtkUnstructuredGridVolumeRayCastMapper24ReleaseGraphicsResourcesEP9vtkWindowr @__ZN38vtkUnstructuredGridVolumeRayCastMapper3NewEvr @__ZN38vtkUnstructuredGridVolumeRayCastMapper6RenderEP11vtkRendererP9vtkVolumer @__ZN38vtkUnstructuredGridVolumeRayCastMapper8CastRaysEiir @__ZN39vtkUnstructuredGridBunykRayCastFunction10InitializeEP11vtkRendererP9vtkVolumer @__ZN39vtkUnstructuredGridBunykRayCastFunction11NewIteratorEvr @__ZN39vtkUnstructuredGridBunykRayCastFunction3NewEvr @__ZN39vtkUnstructuredGridBunykRayCastFunction8FinalizeEvr @__ZN39vtkVolumeRayCastSpaceLeapingImageFilter13ComputeOffsetEPKiS1_ir @__ZN39vtkVolumeRayCastSpaceLeapingImageFilter15GetMinMaxVolumeEPir @__ZN39vtkVolumeRayCastSpaceLeapingImageFilter17SetCurrentScalarsEP12vtkDataArrayr @__ZN39vtkVolumeRayCastSpaceLeapingImageFilter21SetScalarOpacityTableEiPtr @__ZN39vtkVolumeRayCastSpaceLeapingImageFilter23SetGradientOpacityTableEiPtr @__ZN39vtkVolumeRayCastSpaceLeapingImageFilter24GetMinNonZeroScalarIndexEvr @__ZN39vtkVolumeRayCastSpaceLeapingImageFilter28ComputeInputExtentsForOutputEPiS0_S0_P12vtkImageDatar @__ZN39vtkVolumeRayCastSpaceLeapingImageFilter32GetNumberOfIndependentComponentsEvr @__ZN39vtkVolumeRayCastSpaceLeapingImageFilter35GetMinNonZeroGradientMagnitudeIndexEvr @__ZN39vtkVolumeRayCastSpaceLeapingImageFilter3NewEvr @__ZN39vtkVolumeRayCastSpaceLeapingImageFilter8SetCacheEP12vtkImageDatar @__ZN40vtkUnstructuredGridPartialPreIntegration10InitializeEP9vtkVolumeP12vtkDataArrayr @__ZN40vtkUnstructuredGridPartialPreIntegration13BuildPsiTableEvr @__ZN40vtkUnstructuredGridPartialPreIntegration3NewEvr @__ZN40vtkUnstructuredGridPartialPreIntegration9IntegrateEP14vtkDoubleArrayP12vtkDataArrayS3_Pfr @__ZN41vtkFixedPointVolumeRayCastCompositeHelper13GenerateImageEiiP9vtkVolumeP32vtkFixedPointVolumeRayCastMapperr @__ZN41vtkFixedPointVolumeRayCastCompositeHelper3NewEvr @__ZN43vtkFixedPointVolumeRayCastCompositeGOHelper13GenerateImageEiiP9vtkVolumeP32vtkFixedPointVolumeRayCastMapperr @__ZN43vtkFixedPointVolumeRayCastCompositeGOHelper3NewEvr @__ZN43vtkUnstructuredGridHomogeneousRayIntegrator10InitializeEP9vtkVolumeP12vtkDataArrayr @__ZN43vtkUnstructuredGridHomogeneousRayIntegrator3NewEvr @__ZN43vtkUnstructuredGridHomogeneousRayIntegrator9IntegrateEP14vtkDoubleArrayP12vtkDataArrayS3_Pfr @__ZN46vtkFixedPointVolumeRayCastCompositeShadeHelper13GenerateImageEiiP9vtkVolumeP32vtkFixedPointVolumeRayCastMapperr @__ZN46vtkFixedPointVolumeRayCastCompositeShadeHelper3NewEvr @__ZN48vtkFixedPointVolumeRayCastCompositeGOShadeHelper13GenerateImageEiiP9vtkVolumeP32vtkFixedPointVolumeRayCastMapperr @__ZN48vtkFixedPointVolumeRayCastCompositeGOShadeHelper3NewEvr )@___cxa_atexitr )@___stack_chk_failr )@_expr )@_strcmp_Py4_Zreal_initvtkRenderingVolumePythonvtkLVTKAddFile_vtk DirectionEncoder_ClassNewEncodedGradientFiGPUVolumeRayCastMapper_ClassNew OSPRayVolumeInterface_ClassNew ProjectedTetrahedraMapper_ClassNew R SphericalDirectionEncoder_ClassNew UnstructuredGrid Volume DirectionEncoderEncodedGradientFiGPUVolumeRayCastMapper OSPRayVolumeInterface ProjectedTetrahedraMapper R SphericalDirectionEncoder UnstructuredGrid VolumeEstimator_ClassNewShader_ClassNewEstimatorShaderniteDifferenceGradientEstimator_ClassNewxedPointniteDifferenceGradientEstimatorxedPointRayCastImage_ClassNewVolumeRayCastRayCastImageVolumeRayCastЊCompositeHelper_ClassNewMGOHelper_ClassNewShadeHelper_ClassNewHelper_ClassNewShadeHelper_ClassNewCompositeHelperMGOHelperShadeHelperHelperShadeHelperIPHelper_ClassNewapper_ClassNewIPHelperapper48PyvtkGPUVolumeRayCastMapper_TFRangeType_FromEnumi 37PyvtkVolumeMapper_BlendModes_FromEnumi    ayCastImageDisplayHelper_ClassNew ecursiveSphereDirectionEncoder_ClassNew  ayCastImageDisplayHelper ecursiveSphereDirectionEncoder      BunykRayCastFunction_ClassNew HomogeneousRayIntegrator_ClassNew LinearRayIntegrator_ClassNew P Volume BunykRayCastFunction HomogeneousRayIntegrator LinearRayIntegrator P Volume     artialPreIntegration_ClassNew reIntegration_ClassNew artialPreIntegrationreIntegration   Mapper_ClassNewRayZSweepMapper_ClassNew MapperRayZSweepMapper CastIntegrator_ClassNewFunction_ClassNewIterator_ClassNewMapper_ClassNew CastIntegratorFunctionIteratorMapper      Ё    Mapper_ClassNewOutlineSource_ClassNewPicker_ClassNewRayCastSpaceLeapingImageFilter_ClassNew MapperOutlineSourcePickerRayCastSpaceLeapingImageFilter       PPPPPPPPPPPP PPPPP PPPPP0P0pPP@PPPPPP@PPP;L;0x;@;;<PX< <<=0J=@==>=>>p>0? c? ? ? -@P k@ @ @p AZAA@ABP_BBB8C{C0CDPDD0D6EvEE0E5FF` F !G!QG"G@#G$H%]H%Hp&H 'I'PI(IP)I`*I@+.J+_J-J-J.K/pK0K@1L2ZL3L3M4NMP5M`6M@7N 8fN9N9N:AO;O<OP=P>QP?P?P@QANQ`BQ`CQ@D7R@EdRERFRGSPHBSPIySJSpKS0L@TMT`NTOUP>UR{URUSVpTAVUzVUVWVW3W0YrWYWZW[6XP\rX]X]Xp^'Y@_`Y_Y`Ypa&Z@bjZpcZ d[ eQ[f[@g[h,\@i~\i\j"]pl_]m]m]o3^o}^p^@r _rW_s_t_u=`v`x`xa`y9apz{a {a |a}6b0~wbb0beee.fff`"g ug@g+h~hh,i`i i,jjjP2kkkp.lpl lImmPmBnnPnPx`x xyayPy y`9z zz{Pj{{`|`L|||` }l}0} }F~~~@& Q~@"\`M -@@ق3`փ (f@0m` ` / u  @ . v@;` Ոf@Z N`܋P_$`g @!"@"#`$P%B(}(p)P*,+n+ ,0-<-.Ƒp/P0D11p2P3M44 5C56ϔ@78Z89ҕP:P;?@<=Ɩp>P?F@@֗A pBjpCD̘DE7FpGHԙpI JC0KKLMDpN@OɛpPQ60RpRST U_0W XXGY@Z֞[[]\`]0^(@cjh@ij@k_klPn70ooڢp#qtPrţst>txuv`w(`xmyz`{H|}Ȧ`~Q@@@%m@Oߩ(l``P`P0T0АҬВY+psPHЛpʯ@ pU   -w@`L @Z@׳Z޴"p|Pֵ0߶4Pܷ/ԸP!peP%f$e Ep`ͼ0Uܽ&@o@0FPͿ```A:t0?2@7 R` @m~p06P30?PWpPN(|p0c`* rD}0 @ E   5 P7p0OgPu+02PP0Q `!5"v## $Z)`*p+ , -A`-n../ 0a0`122LP4566(7X0889p</0=p=>?1`@lAAPB>BC0D E2EfFHHI'J_pKpLPM MJNO@PPAQ~R@SS6TqU@VVYQYZ`[]G@^x@aabc!PdQPe0ffg8PhoiijkPklm/n}0op'p}qPr(s~stPudvv wePxyyk`z{`}Q ~@.pw0Є @e ` s@cpX6p66666 7#876P7Ih7\ pk   @  @+ T   `   E  @  7 @q   ` K | `   @B غi p   p  9 Pc   0 . pX  0  P+ g    0L  0  0, Y  P  P5 Pk  P  C z  0   h( H u     F @n    P$Np*u ?PE` fIlPr x}pNvCHQh$ tO| @+[@,a0(HPai,Lt*>D`(Pfe lqw@}3` CH@QPh, sU 0|   ) PW @   @ S  % G ` i P%@ w        $ 1 C U d u        %<Pas 6 \i})&-OsLm3Tu9Yy"Eh!S&"&AV,tGu 3q E W}56r>' b   & &![!!!">"""#U###1$n$$"%%%*&&&I'''(g(( )T)))*n***2+{++,D,u,, -c---6.}...(///[000/1h111-2f222>333444'5[55586m667u778U888H999o::(:):(:):):);) ;)RSTUVWXZ[`bcegknopqrstuwxyz{|}~      !"#$%&'()*+,-/12Y]^_adfhijlmv.03RSTUVWXZ[`bcegknopqrstuwxyz{|}~      !"#$%&'()*+,-/12 _PyVTKAddFile_vtkDirectionEncoder_PyVTKAddFile_vtkEncodedGradientEstimator_PyVTKAddFile_vtkEncodedGradientShader_PyVTKAddFile_vtkFiniteDifferenceGradientEstimator_PyVTKAddFile_vtkFixedPointRayCastImage_PyVTKAddFile_vtkFixedPointVolumeRayCastCompositeGOHelper_PyVTKAddFile_vtkFixedPointVolumeRayCastCompositeGOShadeHelper_PyVTKAddFile_vtkFixedPointVolumeRayCastCompositeHelper_PyVTKAddFile_vtkFixedPointVolumeRayCastCompositeShadeHelper_PyVTKAddFile_vtkFixedPointVolumeRayCastHelper_PyVTKAddFile_vtkFixedPointVolumeRayCastMIPHelper_PyVTKAddFile_vtkFixedPointVolumeRayCastMapper_PyVTKAddFile_vtkGPUVolumeRayCastMapper_PyVTKAddFile_vtkOSPRayVolumeInterface_PyVTKAddFile_vtkProjectedTetrahedraMapper_PyVTKAddFile_vtkRayCastImageDisplayHelper_PyVTKAddFile_vtkRecursiveSphereDirectionEncoder_PyVTKAddFile_vtkSphericalDirectionEncoder_PyVTKAddFile_vtkUnstructuredGridBunykRayCastFunction_PyVTKAddFile_vtkUnstructuredGridHomogeneousRayIntegrator_PyVTKAddFile_vtkUnstructuredGridLinearRayIntegrator_PyVTKAddFile_vtkUnstructuredGridPartialPreIntegration_PyVTKAddFile_vtkUnstructuredGridPreIntegration_PyVTKAddFile_vtkUnstructuredGridVolumeMapper_PyVTKAddFile_vtkUnstructuredGridVolumeRayCastFunction_PyVTKAddFile_vtkUnstructuredGridVolumeRayCastIterator_PyVTKAddFile_vtkUnstructuredGridVolumeRayCastMapper_PyVTKAddFile_vtkUnstructuredGridVolumeRayIntegrator_PyVTKAddFile_vtkUnstructuredGridVolumeZSweepMapper_PyVTKAddFile_vtkVolumeMapper_PyVTKAddFile_vtkVolumeOutlineSource_PyVTKAddFile_vtkVolumePicker_PyVTKAddFile_vtkVolumeRayCastSpaceLeapingImageFilter_PyvtkDirectionEncoder_ClassNew_PyvtkEncodedGradientEstimator_ClassNew_PyvtkEncodedGradientShader_ClassNew_PyvtkFiniteDifferenceGradientEstimator_ClassNew_PyvtkFixedPointRayCastImage_ClassNew_PyvtkFixedPointVolumeRayCastCompositeGOHelper_ClassNew_PyvtkFixedPointVolumeRayCastCompositeGOShadeHelper_ClassNew_PyvtkFixedPointVolumeRayCastCompositeHelper_ClassNew_PyvtkFixedPointVolumeRayCastCompositeShadeHelper_ClassNew_PyvtkFixedPointVolumeRayCastHelper_ClassNew_PyvtkFixedPointVolumeRayCastMIPHelper_ClassNew_PyvtkFixedPointVolumeRayCastMapper_ClassNew_PyvtkGPUVolumeRayCastMapper_ClassNew_PyvtkOSPRayVolumeInterface_ClassNew_PyvtkProjectedTetrahedraMapper_ClassNew_PyvtkRayCastImageDisplayHelper_ClassNew_PyvtkRecursiveSphereDirectionEncoder_ClassNew_PyvtkSphericalDirectionEncoder_ClassNew_PyvtkUnstructuredGridBunykRayCastFunction_ClassNew_PyvtkUnstructuredGridHomogeneousRayIntegrator_ClassNew_PyvtkUnstructuredGridLinearRayIntegrator_ClassNew_PyvtkUnstructuredGridPartialPreIntegration_ClassNew_PyvtkUnstructuredGridPreIntegration_ClassNew_PyvtkUnstructuredGridVolumeMapper_ClassNew_PyvtkUnstructuredGridVolumeRayCastFunction_ClassNew_PyvtkUnstructuredGridVolumeRayCastIterator_ClassNew_PyvtkUnstructuredGridVolumeRayCastMapper_ClassNew_PyvtkUnstructuredGridVolumeRayIntegrator_ClassNew_PyvtkUnstructuredGridVolumeZSweepMapper_ClassNew_PyvtkVolumeMapper_ClassNew_PyvtkVolumeOutlineSource_ClassNew_PyvtkVolumePicker_ClassNew_PyvtkVolumeRayCastSpaceLeapingImageFilter_ClassNew__Z37PyvtkVolumeMapper_BlendModes_FromEnumi__Z48PyvtkGPUVolumeRayCastMapper_TFRangeType_FromEnumi_real_initvtkRenderingVolumePython_PyBool_FromLong_PyDict_SetItemString_PyErr_Occurred_PyFloat_FromDouble_PyLong_FromLong_PyLong_FromLongLong_PyLong_FromUnsignedLong_PyLong_Type_PyModule_Create2_PyModule_GetDict_PyObject_Free_PyObject_GC_Del_PyObject_GenericGetAttr_PyObject_GenericSetAttr_PyType_Ready_PyType_Type_PyUnicode_FromString_PyVTKClass_Add_PyVTKObject_AsBuffer_PyVTKObject_Check_PyVTKObject_Delete_PyVTKObject_GetObject_PyVTKObject_GetSet_PyVTKObject_New_PyVTKObject_Repr_PyVTKObject_SetFlag_PyVTKObject_String_PyVTKObject_Traverse_Py_BuildValue_PyvtkAbstractVolumeMapper_ClassNew_PyvtkCellPicker_ClassNew_PyvtkObject_ClassNew_PyvtkPolyDataAlgorithm_ClassNew_PyvtkThreadedImageAlgorithm_ClassNew__Py_Dealloc__Py_FatalErrorFunc__Py_NoneStruct__Unwind_Resume__ZN13vtkObjectBase8IsTypeOfEPKc__ZN13vtkPythonArgs10BuildTupleEPKdi__ZN13vtkPythonArgs10BuildTupleEPKfi__ZN13vtkPythonArgs10BuildTupleEPKii__ZN13vtkPythonArgs10GetArgSizeEi__ZN13vtkPythonArgs11SetArgValueEii__ZN13vtkPythonArgs13ArgCountErrorEiPKc__ZN13vtkPythonArgs13ArgCountErrorEii__ZN13vtkPythonArgs16PureVirtualErrorEv__ZN13vtkPythonArgs17GetArgAsVTKObjectEPKcRb__ZN13vtkPythonArgs19GetSelfFromFirstArgEP7_objectS1___ZN13vtkPythonArgs5ArrayIdEC1El__ZN13vtkPythonArgs5ArrayIfEC1El__ZN13vtkPythonArgs5ArrayIhEC1El__ZN13vtkPythonArgs5ArrayIiEC1El__ZN13vtkPythonArgs5ArrayIjEC1El__ZN13vtkPythonArgs5ArrayItEC1El__ZN13vtkPythonArgs8GetArrayEPdi__ZN13vtkPythonArgs8GetArrayEPfi__ZN13vtkPythonArgs8GetArrayEPhi__ZN13vtkPythonArgs8GetArrayEPii__ZN13vtkPythonArgs8GetArrayEPji__ZN13vtkPythonArgs8GetArrayEPti__ZN13vtkPythonArgs8GetValueERPc__ZN13vtkPythonArgs8GetValueERb__ZN13vtkPythonArgs8GetValueERd__ZN13vtkPythonArgs8GetValueERf__ZN13vtkPythonArgs8GetValueERi__ZN13vtkPythonArgs8GetValueERt__ZN13vtkPythonArgs8GetValueERx__ZN13vtkPythonArgs8SetArrayEiPKdi__ZN13vtkPythonArgs8SetArrayEiPKfi__ZN13vtkPythonArgs8SetArrayEiPKhi__ZN13vtkPythonArgs8SetArrayEiPKii__ZN13vtkPythonArgs8SetArrayEiPKji__ZN13vtkPythonArgs8SetArrayEiPKti__ZN13vtkPythonUtil12AddEnumToMapEP11_typeobject__ZN13vtkPythonUtil13ManglePointerEPKvPKc__ZN13vtkPythonUtil20GetObjectFromPointerEP13vtkObjectBase__ZN15vtkVolumeMapper12SetInputDataEP10vtkDataSet__ZN15vtkVolumeMapper12SetInputDataEP12vtkImageData__ZN15vtkVolumeMapper8GetInputEv__ZN15vtkVolumePicker3NewEv__ZN17vtkPythonOverload10CallMethodEP11PyMethodDefP7_objectS3___ZN20vtkDebugLeaksManagerC1Ev__ZN20vtkDebugLeaksManagerD1Ev__ZN22vtkVolumeOutlineSource15SetVolumeMapperEP15vtkVolumeMapper__ZN22vtkVolumeOutlineSource3NewEv__ZN24vtkEncodedGradientShader18UpdateShadingTableEP11vtkRendererP9vtkVolumeP27vtkEncodedGradientEstimator__ZN24vtkEncodedGradientShader25GetRedDiffuseShadingTableEP9vtkVolume__ZN24vtkEncodedGradientShader26GetBlueDiffuseShadingTableEP9vtkVolume__ZN24vtkEncodedGradientShader26GetRedSpecularShadingTableEP9vtkVolume__ZN24vtkEncodedGradientShader27GetBlueSpecularShadingTableEP9vtkVolume__ZN24vtkEncodedGradientShader27GetGreenDiffuseShadingTableEP9vtkVolume__ZN24vtkEncodedGradientShader28GetGreenSpecularShadingTableEP9vtkVolume__ZN24vtkEncodedGradientShader3NewEv__ZN24vtkOSPRayVolumeInterface3NewEv__ZN24vtkOSPRayVolumeInterface6RenderEP11vtkRendererP9vtkVolume__ZN25vtkFixedPointRayCastImage10ClearImageEv__ZN25vtkFixedPointRayCastImage13AllocateImageEv__ZN25vtkFixedPointRayCastImage15AllocateZBufferEv__ZN25vtkFixedPointRayCastImage15GetZBufferValueEii__ZN25vtkFixedPointRayCastImage3NewEv__ZN25vtkGPUVolumeRayCastMapper12SetMaskInputEP12vtkImageData__ZN25vtkGPUVolumeRayCastMapper19CreateCanonicalViewEP11vtkRendererP9vtkVolumeP12vtkImageDataiPdS6___ZN25vtkGPUVolumeRayCastMapper19SetMaskTypeToBinaryEv__ZN25vtkGPUVolumeRayCastMapper21SetMaskTypeToLabelMapEv__ZN25vtkGPUVolumeRayCastMapper25GetDepthPassContourValuesEv__ZN25vtkGPUVolumeRayCastMapper30SetDepthImageScalarTypeToFloatEv__ZN25vtkGPUVolumeRayCastMapper37SetDepthImageScalarTypeToUnsignedCharEv__ZN25vtkGPUVolumeRayCastMapper38SetDepthImageScalarTypeToUnsignedShortEv__ZN25vtkGPUVolumeRayCastMapper3NewEv__ZN25vtkGPUVolumeRayCastMapper6RenderEP11vtkRendererP9vtkVolume__ZN27vtkEncodedGradientEstimator12SetInputDataEP12vtkImageData__ZN27vtkEncodedGradientEstimator17GetEncodedNormalsEv__ZN27vtkEncodedGradientEstimator19SetDirectionEncoderEP19vtkDirectionEncoder__ZN27vtkEncodedGradientEstimator21GetEncodedNormalIndexEiii__ZN27vtkEncodedGradientEstimator21GetEncodedNormalIndexEx__ZN27vtkEncodedGradientEstimator21GetGradientMagnitudesEv__ZN27vtkEncodedGradientEstimator22SetZeroNormalThresholdEf__ZN27vtkEncodedGradientEstimator6UpdateEv__ZN28vtkProjectedTetrahedraMapper15TransformPointsEP9vtkPointsPKfS3_P13vtkFloatArray__ZN28vtkProjectedTetrahedraMapper17SetVisibilitySortEP17vtkVisibilitySort__ZN28vtkProjectedTetrahedraMapper18MapScalarsToColorsEP12vtkDataArrayP17vtkVolumePropertyS1___ZN28vtkProjectedTetrahedraMapper3NewEv__ZN28vtkRayCastImageDisplayHelper3NewEv__ZN28vtkSphericalDirectionEncoder18GetDecodedGradientEi__ZN28vtkSphericalDirectionEncoder19GetEncodedDirectionEPf__ZN28vtkSphericalDirectionEncoder20DecodedGradientTableE__ZN28vtkSphericalDirectionEncoder3NewEv__ZN31vtkObjectFactoryRegistryCleanupC1Ev__ZN31vtkObjectFactoryRegistryCleanupD1Ev__ZN31vtkUnstructuredGridVolumeMapper12SetInputDataEP10vtkDataSet__ZN31vtkUnstructuredGridVolumeMapper12SetInputDataEP23vtkUnstructuredGridBase__ZN31vtkUnstructuredGridVolumeMapper8GetInputEv__ZN32vtkFixedPointVolumeRayCastHelper3NewEv__ZN32vtkFixedPointVolumeRayCastMapper11AbortRenderEv__ZN32vtkFixedPointVolumeRayCastMapper14ComputeRayInfoEiiPjS0_S0___ZN32vtkFixedPointVolumeRayCastMapper15RenderSubVolumeEv__ZN32vtkFixedPointVolumeRayCastMapper15SetRayCastImageEP25vtkFixedPointRayCastImage__ZN32vtkFixedPointVolumeRayCastMapper17InitializeRayInfoEP9vtkVolume__ZN32vtkFixedPointVolumeRayCastMapper18GetNumberOfThreadsEv__ZN32vtkFixedPointVolumeRayCastMapper18RetrieveRenderTimeEP11vtkRenderer__ZN32vtkFixedPointVolumeRayCastMapper18RetrieveRenderTimeEP11vtkRendererP9vtkVolume__ZN32vtkFixedPointVolumeRayCastMapper18SetNumberOfThreadsEi__ZN32vtkFixedPointVolumeRayCastMapper19CreateCanonicalViewEP9vtkVolumeP12vtkImageDataiPdS4___ZN32vtkFixedPointVolumeRayCastMapper20DisplayRenderedImageEP11vtkRendererP9vtkVolume__ZN32vtkFixedPointVolumeRayCastMapper22PerImageInitializationEP11vtkRendererP9vtkVolumeiPdS4_Pi__ZN32vtkFixedPointVolumeRayCastMapper23PerVolumeInitializationEP11vtkRendererP9vtkVolume__ZN32vtkFixedPointVolumeRayCastMapper24ReleaseGraphicsResourcesEP9vtkWindow__ZN32vtkFixedPointVolumeRayCastMapper26PerSubVolumeInitializationEP11vtkRendererP9vtkVolumei__ZN32vtkFixedPointVolumeRayCastMapper34ComputeRequiredImageSampleDistanceEfP11vtkRenderer__ZN32vtkFixedPointVolumeRayCastMapper34ComputeRequiredImageSampleDistanceEfP11vtkRendererP9vtkVolume__ZN32vtkFixedPointVolumeRayCastMapper37ShouldUseNearestNeighborInterpolationEP9vtkVolume__ZN32vtkFixedPointVolumeRayCastMapper3NewEv__ZN32vtkFixedPointVolumeRayCastMapper6RenderEP11vtkRendererP9vtkVolume__ZN33vtkUnstructuredGridPreIntegration10InitializeEP9vtkVolumeP12vtkDataArray__ZN33vtkUnstructuredGridPreIntegration13SetIntegratorEP38vtkUnstructuredGridVolumeRayIntegrator__ZN33vtkUnstructuredGridPreIntegration22GetPreIntegrationTableEi__ZN33vtkUnstructuredGridPreIntegration30GetIntegrationTableLengthScaleEv__ZN33vtkUnstructuredGridPreIntegration30GetIntegrationTableScalarScaleEi__ZN33vtkUnstructuredGridPreIntegration30GetIntegrationTableScalarShiftEi__ZN33vtkUnstructuredGridPreIntegration3NewEv__ZN33vtkUnstructuredGridPreIntegration9IntegrateEP14vtkDoubleArrayP12vtkDataArrayS3_Pf__ZN34vtkRecursiveSphereDirectionEncoder18GetDecodedGradientEi__ZN34vtkRecursiveSphereDirectionEncoder19GetEncodedDirectionEPf__ZN34vtkRecursiveSphereDirectionEncoder23GetDecodedGradientTableEv__ZN34vtkRecursiveSphereDirectionEncoder28GetNumberOfEncodedDirectionsEv__ZN34vtkRecursiveSphereDirectionEncoder3NewEv__ZN35vtkFixedPointVolumeRayCastMIPHelper13GenerateImageEiiP9vtkVolumeP32vtkFixedPointVolumeRayCastMapper__ZN35vtkFixedPointVolumeRayCastMIPHelper3NewEv__ZN36vtkFiniteDifferenceGradientEstimator3NewEv__ZN37vtkUnstructuredGridVolumeZSweepMapper16SetRayIntegratorEP38vtkUnstructuredGridVolumeRayIntegrator__ZN37vtkUnstructuredGridVolumeZSweepMapper19GetMaxPixelListSizeEv__ZN37vtkUnstructuredGridVolumeZSweepMapper19SetMaxPixelListSizeEi__ZN37vtkUnstructuredGridVolumeZSweepMapper3NewEv__ZN37vtkUnstructuredGridVolumeZSweepMapper6RenderEP11vtkRendererP9vtkVolume__ZN38vtkUnstructuredGridLinearRayIntegrator10InitializeEP9vtkVolumeP12vtkDataArray__ZN38vtkUnstructuredGridLinearRayIntegrator12IntegrateRayEdPKddS1_dPf__ZN38vtkUnstructuredGridLinearRayIntegrator12IntegrateRayEdddddPf__ZN38vtkUnstructuredGridLinearRayIntegrator3NewEv__ZN38vtkUnstructuredGridLinearRayIntegrator3PsiEfff__ZN38vtkUnstructuredGridLinearRayIntegrator9IntegrateEP14vtkDoubleArrayP12vtkDataArrayS3_Pf__ZN38vtkUnstructuredGridVolumeRayCastMapper16SetRayIntegratorEP38vtkUnstructuredGridVolumeRayIntegrator__ZN38vtkUnstructuredGridVolumeRayCastMapper18SetRayCastFunctionEP40vtkUnstructuredGridVolumeRayCastFunction__ZN38vtkUnstructuredGridVolumeRayCastMapper24ReleaseGraphicsResourcesEP9vtkWindow__ZN38vtkUnstructuredGridVolumeRayCastMapper3NewEv__ZN38vtkUnstructuredGridVolumeRayCastMapper6RenderEP11vtkRendererP9vtkVolume__ZN38vtkUnstructuredGridVolumeRayCastMapper8CastRaysEii__ZN39vtkUnstructuredGridBunykRayCastFunction10InitializeEP11vtkRendererP9vtkVolume__ZN39vtkUnstructuredGridBunykRayCastFunction11NewIteratorEv__ZN39vtkUnstructuredGridBunykRayCastFunction3NewEv__ZN39vtkUnstructuredGridBunykRayCastFunction8FinalizeEv__ZN39vtkVolumeRayCastSpaceLeapingImageFilter13ComputeOffsetEPKiS1_i__ZN39vtkVolumeRayCastSpaceLeapingImageFilter15GetMinMaxVolumeEPi__ZN39vtkVolumeRayCastSpaceLeapingImageFilter17SetCurrentScalarsEP12vtkDataArray__ZN39vtkVolumeRayCastSpaceLeapingImageFilter21SetScalarOpacityTableEiPt__ZN39vtkVolumeRayCastSpaceLeapingImageFilter23SetGradientOpacityTableEiPt__ZN39vtkVolumeRayCastSpaceLeapingImageFilter24GetMinNonZeroScalarIndexEv__ZN39vtkVolumeRayCastSpaceLeapingImageFilter28ComputeInputExtentsForOutputEPiS0_S0_P12vtkImageData__ZN39vtkVolumeRayCastSpaceLeapingImageFilter32GetNumberOfIndependentComponentsEv__ZN39vtkVolumeRayCastSpaceLeapingImageFilter35GetMinNonZeroGradientMagnitudeIndexEv__ZN39vtkVolumeRayCastSpaceLeapingImageFilter3NewEv__ZN39vtkVolumeRayCastSpaceLeapingImageFilter8SetCacheEP12vtkImageData__ZN40vtkUnstructuredGridPartialPreIntegration10InitializeEP9vtkVolumeP12vtkDataArray__ZN40vtkUnstructuredGridPartialPreIntegration13BuildPsiTableEv__ZN40vtkUnstructuredGridPartialPreIntegration3NewEv__ZN40vtkUnstructuredGridPartialPreIntegration8PsiTableE__ZN40vtkUnstructuredGridPartialPreIntegration9IntegrateEP14vtkDoubleArrayP12vtkDataArrayS3_Pf__ZN41vtkFixedPointVolumeRayCastCompositeHelper13GenerateImageEiiP9vtkVolumeP32vtkFixedPointVolumeRayCastMapper__ZN41vtkFixedPointVolumeRayCastCompositeHelper3NewEv__ZN43vtkFixedPointVolumeRayCastCompositeGOHelper13GenerateImageEiiP9vtkVolumeP32vtkFixedPointVolumeRayCastMapper__ZN43vtkFixedPointVolumeRayCastCompositeGOHelper3NewEv__ZN43vtkUnstructuredGridHomogeneousRayIntegrator10InitializeEP9vtkVolumeP12vtkDataArray__ZN43vtkUnstructuredGridHomogeneousRayIntegrator3NewEv__ZN43vtkUnstructuredGridHomogeneousRayIntegrator9IntegrateEP14vtkDoubleArrayP12vtkDataArrayS3_Pf__ZN46vtkFixedPointVolumeRayCastCompositeShadeHelper13GenerateImageEiiP9vtkVolumeP32vtkFixedPointVolumeRayCastMapper__ZN46vtkFixedPointVolumeRayCastCompositeShadeHelper3NewEv__ZN48vtkFixedPointVolumeRayCastCompositeGOShadeHelper13GenerateImageEiiP9vtkVolumeP32vtkFixedPointVolumeRayCastMapper__ZN48vtkFixedPointVolumeRayCastCompositeGOShadeHelper3NewEv__ZdaPv___cxa_atexit___gxx_personality_v0___stack_chk_fail___stack_chk_guard_exp_strcmpdyld_stub_binder__ZL30PyvtkDirectionEncoder_IsTypeOfP7_objectS0___ZL25PyvtkDirectionEncoder_IsAP7_objectS0___ZL34PyvtkDirectionEncoder_SafeDownCastP7_objectS0___ZL33PyvtkDirectionEncoder_NewInstanceP7_objectS0___ZL41PyvtkDirectionEncoder_GetEncodedDirectionP7_objectS0___ZL40PyvtkDirectionEncoder_GetDecodedGradientP7_objectS0___ZL50PyvtkDirectionEncoder_GetNumberOfEncodedDirectionsP7_objectS0___ZL45PyvtkDirectionEncoder_GetDecodedGradientTableP7_objectS0___ZL38PyvtkEncodedGradientEstimator_IsTypeOfP7_objectS0___ZL33PyvtkEncodedGradientEstimator_IsAP7_objectS0___ZL42PyvtkEncodedGradientEstimator_SafeDownCastP7_objectS0___ZL41PyvtkEncodedGradientEstimator_NewInstanceP7_objectS0___ZL42PyvtkEncodedGradientEstimator_SetInputDataP7_objectS0___ZL42PyvtkEncodedGradientEstimator_GetInputDataP7_objectS0___ZL55PyvtkEncodedGradientEstimator_SetGradientMagnitudeScaleP7_objectS0___ZL55PyvtkEncodedGradientEstimator_GetGradientMagnitudeScaleP7_objectS0___ZL54PyvtkEncodedGradientEstimator_SetGradientMagnitudeBiasP7_objectS0___ZL54PyvtkEncodedGradientEstimator_GetGradientMagnitudeBiasP7_objectS0___ZL43PyvtkEncodedGradientEstimator_SetBoundsClipP7_objectS0___ZL51PyvtkEncodedGradientEstimator_GetBoundsClipMinValueP7_objectS0___ZL51PyvtkEncodedGradientEstimator_GetBoundsClipMaxValueP7_objectS0___ZL43PyvtkEncodedGradientEstimator_GetBoundsClipP7_objectS0___ZL42PyvtkEncodedGradientEstimator_BoundsClipOnP7_objectS0___ZL43PyvtkEncodedGradientEstimator_BoundsClipOffP7_objectS0___ZL39PyvtkEncodedGradientEstimator_SetBoundsP7_objectS0___ZL39PyvtkEncodedGradientEstimator_GetBoundsP7_objectS0___ZL36PyvtkEncodedGradientEstimator_UpdateP7_objectS0___ZL47PyvtkEncodedGradientEstimator_GetEncodedNormalsP7_objectS0___ZL51PyvtkEncodedGradientEstimator_GetEncodedNormalIndexP7_objectS0___ZL51PyvtkEncodedGradientEstimator_GetGradientMagnitudesP7_objectS0___ZL48PyvtkEncodedGradientEstimator_SetNumberOfThreadsP7_objectS0___ZL56PyvtkEncodedGradientEstimator_GetNumberOfThreadsMinValueP7_objectS0___ZL56PyvtkEncodedGradientEstimator_GetNumberOfThreadsMaxValueP7_objectS0___ZL48PyvtkEncodedGradientEstimator_GetNumberOfThreadsP7_objectS0___ZL49PyvtkEncodedGradientEstimator_SetDirectionEncoderP7_objectS0___ZL49PyvtkEncodedGradientEstimator_GetDirectionEncoderP7_objectS0___ZL58PyvtkEncodedGradientEstimator_SetComputeGradientMagnitudesP7_objectS0___ZL58PyvtkEncodedGradientEstimator_GetComputeGradientMagnitudesP7_objectS0___ZL57PyvtkEncodedGradientEstimator_ComputeGradientMagnitudesOnP7_objectS0___ZL58PyvtkEncodedGradientEstimator_ComputeGradientMagnitudesOffP7_objectS0___ZL45PyvtkEncodedGradientEstimator_SetCylinderClipP7_objectS0___ZL45PyvtkEncodedGradientEstimator_GetCylinderClipP7_objectS0___ZL44PyvtkEncodedGradientEstimator_CylinderClipOnP7_objectS0___ZL45PyvtkEncodedGradientEstimator_CylinderClipOffP7_objectS0___ZL56PyvtkEncodedGradientEstimator_GetLastUpdateTimeInSecondsP7_objectS0___ZL59PyvtkEncodedGradientEstimator_GetLastUpdateTimeInCPUSecondsP7_objectS0___ZL48PyvtkEncodedGradientEstimator_GetUseCylinderClipP7_objectS0___ZL45PyvtkEncodedGradientEstimator_GetCircleLimitsP7_objectS0___ZL52PyvtkEncodedGradientEstimator_SetZeroNormalThresholdP7_objectS0___ZL52PyvtkEncodedGradientEstimator_GetZeroNormalThresholdP7_objectS0___ZL40PyvtkEncodedGradientEstimator_SetZeroPadP7_objectS0___ZL48PyvtkEncodedGradientEstimator_GetZeroPadMinValueP7_objectS0___ZL48PyvtkEncodedGradientEstimator_GetZeroPadMaxValueP7_objectS0___ZL40PyvtkEncodedGradientEstimator_GetZeroPadP7_objectS0___ZL39PyvtkEncodedGradientEstimator_ZeroPadOnP7_objectS0___ZL40PyvtkEncodedGradientEstimator_ZeroPadOffP7_objectS0___ZL42PyvtkEncodedGradientEstimator_GetInputSizeP7_objectS0___ZL44PyvtkEncodedGradientEstimator_GetInputAspectP7_objectS0___ZL36PyvtkEncodedGradientShader_StaticNewv__ZL35PyvtkEncodedGradientShader_IsTypeOfP7_objectS0___ZL30PyvtkEncodedGradientShader_IsAP7_objectS0___ZL39PyvtkEncodedGradientShader_SafeDownCastP7_objectS0___ZL38PyvtkEncodedGradientShader_NewInstanceP7_objectS0___ZL56PyvtkEncodedGradientShader_SetZeroNormalDiffuseIntensityP7_objectS0___ZL64PyvtkEncodedGradientShader_GetZeroNormalDiffuseIntensityMinValueP7_objectS0___ZL64PyvtkEncodedGradientShader_GetZeroNormalDiffuseIntensityMaxValueP7_objectS0___ZL56PyvtkEncodedGradientShader_GetZeroNormalDiffuseIntensityP7_objectS0___ZL57PyvtkEncodedGradientShader_SetZeroNormalSpecularIntensityP7_objectS0___ZL65PyvtkEncodedGradientShader_GetZeroNormalSpecularIntensityMinValueP7_objectS0___ZL65PyvtkEncodedGradientShader_GetZeroNormalSpecularIntensityMaxValueP7_objectS0___ZL57PyvtkEncodedGradientShader_GetZeroNormalSpecularIntensityP7_objectS0___ZL45PyvtkEncodedGradientShader_UpdateShadingTableP7_objectS0___ZL52PyvtkEncodedGradientShader_GetRedDiffuseShadingTableP7_objectS0___ZL54PyvtkEncodedGradientShader_GetGreenDiffuseShadingTableP7_objectS0___ZL53PyvtkEncodedGradientShader_GetBlueDiffuseShadingTableP7_objectS0___ZL53PyvtkEncodedGradientShader_GetRedSpecularShadingTableP7_objectS0___ZL55PyvtkEncodedGradientShader_GetGreenSpecularShadingTableP7_objectS0___ZL54PyvtkEncodedGradientShader_GetBlueSpecularShadingTableP7_objectS0___ZL45PyvtkEncodedGradientShader_SetActiveComponentP7_objectS0___ZL53PyvtkEncodedGradientShader_GetActiveComponentMinValueP7_objectS0___ZL53PyvtkEncodedGradientShader_GetActiveComponentMaxValueP7_objectS0___ZL45PyvtkEncodedGradientShader_GetActiveComponentP7_objectS0___ZL48PyvtkFiniteDifferenceGradientEstimator_StaticNewv__ZL47PyvtkFiniteDifferenceGradientEstimator_IsTypeOfP7_objectS0___ZL42PyvtkFiniteDifferenceGradientEstimator_IsAP7_objectS0___ZL51PyvtkFiniteDifferenceGradientEstimator_SafeDownCastP7_objectS0___ZL50PyvtkFiniteDifferenceGradientEstimator_NewInstanceP7_objectS0___ZL63PyvtkFiniteDifferenceGradientEstimator_SetSampleSpacingInVoxelsP7_objectS0___ZL63PyvtkFiniteDifferenceGradientEstimator_GetSampleSpacingInVoxelsP7_objectS0___ZL37PyvtkFixedPointRayCastImage_StaticNewv__ZL36PyvtkFixedPointRayCastImage_IsTypeOfP7_objectS0___ZL31PyvtkFixedPointRayCastImage_IsAP7_objectS0___ZL40PyvtkFixedPointRayCastImage_SafeDownCastP7_objectS0___ZL39PyvtkFixedPointRayCastImage_NewInstanceP7_objectS0___ZL36PyvtkFixedPointRayCastImage_GetImageP7_objectS0___ZL48PyvtkFixedPointRayCastImage_SetImageViewportSizeP7_objectS0___ZL48PyvtkFixedPointRayCastImage_GetImageViewportSizeP7_objectS0___ZL46PyvtkFixedPointRayCastImage_SetImageMemorySizeP7_objectS0___ZL46PyvtkFixedPointRayCastImage_GetImageMemorySizeP7_objectS0___ZL45PyvtkFixedPointRayCastImage_SetImageInUseSizeP7_objectS0___ZL45PyvtkFixedPointRayCastImage_GetImageInUseSizeP7_objectS0___ZL42PyvtkFixedPointRayCastImage_SetImageOriginP7_objectS0___ZL42PyvtkFixedPointRayCastImage_GetImageOriginP7_objectS0___ZL50PyvtkFixedPointRayCastImage_SetImageSampleDistanceP7_objectS0___ZL50PyvtkFixedPointRayCastImage_GetImageSampleDistanceP7_objectS0___ZL41PyvtkFixedPointRayCastImage_AllocateImageP7_objectS0___ZL38PyvtkFixedPointRayCastImage_ClearImageP7_objectS0___ZL42PyvtkFixedPointRayCastImage_SetZBufferSizeP7_objectS0___ZL42PyvtkFixedPointRayCastImage_GetZBufferSizeP7_objectS0___ZL44PyvtkFixedPointRayCastImage_SetZBufferOriginP7_objectS0___ZL44PyvtkFixedPointRayCastImage_GetZBufferOriginP7_objectS0___ZL41PyvtkFixedPointRayCastImage_SetUseZBufferP7_objectS0___ZL49PyvtkFixedPointRayCastImage_GetUseZBufferMinValueP7_objectS0___ZL49PyvtkFixedPointRayCastImage_GetUseZBufferMaxValueP7_objectS0___ZL41PyvtkFixedPointRayCastImage_GetUseZBufferP7_objectS0___ZL40PyvtkFixedPointRayCastImage_UseZBufferOnP7_objectS0___ZL41PyvtkFixedPointRayCastImage_UseZBufferOffP7_objectS0___ZL43PyvtkFixedPointRayCastImage_GetZBufferValueP7_objectS0___ZL38PyvtkFixedPointRayCastImage_GetZBufferP7_objectS0___ZL43PyvtkFixedPointRayCastImage_AllocateZBufferP7_objectS0___ZL55PyvtkFixedPointVolumeRayCastCompositeGOHelper_StaticNewv__ZL54PyvtkFixedPointVolumeRayCastCompositeGOHelper_IsTypeOfP7_objectS0___ZL49PyvtkFixedPointVolumeRayCastCompositeGOHelper_IsAP7_objectS0___ZL58PyvtkFixedPointVolumeRayCastCompositeGOHelper_SafeDownCastP7_objectS0___ZL57PyvtkFixedPointVolumeRayCastCompositeGOHelper_NewInstanceP7_objectS0___ZL59PyvtkFixedPointVolumeRayCastCompositeGOHelper_GenerateImageP7_objectS0___ZL60PyvtkFixedPointVolumeRayCastCompositeGOShadeHelper_StaticNewv__ZL59PyvtkFixedPointVolumeRayCastCompositeGOShadeHelper_IsTypeOfP7_objectS0___ZL54PyvtkFixedPointVolumeRayCastCompositeGOShadeHelper_IsAP7_objectS0___ZL63PyvtkFixedPointVolumeRayCastCompositeGOShadeHelper_SafeDownCastP7_objectS0___ZL62PyvtkFixedPointVolumeRayCastCompositeGOShadeHelper_NewInstanceP7_objectS0___ZL64PyvtkFixedPointVolumeRayCastCompositeGOShadeHelper_GenerateImageP7_objectS0___ZL53PyvtkFixedPointVolumeRayCastCompositeHelper_StaticNewv__ZL52PyvtkFixedPointVolumeRayCastCompositeHelper_IsTypeOfP7_objectS0___ZL47PyvtkFixedPointVolumeRayCastCompositeHelper_IsAP7_objectS0___ZL56PyvtkFixedPointVolumeRayCastCompositeHelper_SafeDownCastP7_objectS0___ZL55PyvtkFixedPointVolumeRayCastCompositeHelper_NewInstanceP7_objectS0___ZL57PyvtkFixedPointVolumeRayCastCompositeHelper_GenerateImageP7_objectS0___ZL58PyvtkFixedPointVolumeRayCastCompositeShadeHelper_StaticNewv__ZL57PyvtkFixedPointVolumeRayCastCompositeShadeHelper_IsTypeOfP7_objectS0___ZL52PyvtkFixedPointVolumeRayCastCompositeShadeHelper_IsAP7_objectS0___ZL61PyvtkFixedPointVolumeRayCastCompositeShadeHelper_SafeDownCastP7_objectS0___ZL60PyvtkFixedPointVolumeRayCastCompositeShadeHelper_NewInstanceP7_objectS0___ZL62PyvtkFixedPointVolumeRayCastCompositeShadeHelper_GenerateImageP7_objectS0___ZL44PyvtkFixedPointVolumeRayCastHelper_StaticNewv__ZL43PyvtkFixedPointVolumeRayCastHelper_IsTypeOfP7_objectS0___ZL38PyvtkFixedPointVolumeRayCastHelper_IsAP7_objectS0___ZL47PyvtkFixedPointVolumeRayCastHelper_SafeDownCastP7_objectS0___ZL46PyvtkFixedPointVolumeRayCastHelper_NewInstanceP7_objectS0___ZL48PyvtkFixedPointVolumeRayCastHelper_GenerateImageP7_objectS0___ZL47PyvtkFixedPointVolumeRayCastMIPHelper_StaticNewv__ZL46PyvtkFixedPointVolumeRayCastMIPHelper_IsTypeOfP7_objectS0___ZL41PyvtkFixedPointVolumeRayCastMIPHelper_IsAP7_objectS0___ZL50PyvtkFixedPointVolumeRayCastMIPHelper_SafeDownCastP7_objectS0___ZL49PyvtkFixedPointVolumeRayCastMIPHelper_NewInstanceP7_objectS0___ZL51PyvtkFixedPointVolumeRayCastMIPHelper_GenerateImageP7_objectS0___ZL44PyvtkFixedPointVolumeRayCastMapper_StaticNewv__ZL43PyvtkFixedPointVolumeRayCastMapper_IsTypeOfP7_objectS0___ZL38PyvtkFixedPointVolumeRayCastMapper_IsAP7_objectS0___ZL47PyvtkFixedPointVolumeRayCastMapper_SafeDownCastP7_objectS0___ZL46PyvtkFixedPointVolumeRayCastMapper_NewInstanceP7_objectS0___ZL52PyvtkFixedPointVolumeRayCastMapper_SetSampleDistanceP7_objectS0___ZL52PyvtkFixedPointVolumeRayCastMapper_GetSampleDistanceP7_objectS0___ZL63PyvtkFixedPointVolumeRayCastMapper_SetInteractiveSampleDistanceP7_objectS0___ZL63PyvtkFixedPointVolumeRayCastMapper_GetInteractiveSampleDistanceP7_objectS0___ZL57PyvtkFixedPointVolumeRayCastMapper_SetImageSampleDistanceP7_objectS0___ZL65PyvtkFixedPointVolumeRayCastMapper_GetImageSampleDistanceMinValueP7_objectS0___ZL65PyvtkFixedPointVolumeRayCastMapper_GetImageSampleDistanceMaxValueP7_objectS0___ZL57PyvtkFixedPointVolumeRayCastMapper_GetImageSampleDistanceP7_objectS0___ZL64PyvtkFixedPointVolumeRayCastMapper_SetMinimumImageSampleDistanceP7_objectS0___ZL72PyvtkFixedPointVolumeRayCastMapper_GetMinimumImageSampleDistanceMinValueP7_objectS0___ZL72PyvtkFixedPointVolumeRayCastMapper_GetMinimumImageSampleDistanceMaxValueP7_objectS0___ZL64PyvtkFixedPointVolumeRayCastMapper_GetMinimumImageSampleDistanceP7_objectS0___ZL64PyvtkFixedPointVolumeRayCastMapper_SetMaximumImageSampleDistanceP7_objectS0___ZL72PyvtkFixedPointVolumeRayCastMapper_GetMaximumImageSampleDistanceMinValueP7_objectS0___ZL72PyvtkFixedPointVolumeRayCastMapper_GetMaximumImageSampleDistanceMaxValueP7_objectS0___ZL64PyvtkFixedPointVolumeRayCastMapper_GetMaximumImageSampleDistanceP7_objectS0___ZL63PyvtkFixedPointVolumeRayCastMapper_SetAutoAdjustSampleDistancesP7_objectS0___ZL71PyvtkFixedPointVolumeRayCastMapper_GetAutoAdjustSampleDistancesMinValueP7_objectS0___ZL71PyvtkFixedPointVolumeRayCastMapper_GetAutoAdjustSampleDistancesMaxValueP7_objectS0___ZL63PyvtkFixedPointVolumeRayCastMapper_GetAutoAdjustSampleDistancesP7_objectS0___ZL62PyvtkFixedPointVolumeRayCastMapper_AutoAdjustSampleDistancesOnP7_objectS0___ZL63PyvtkFixedPointVolumeRayCastMapper_AutoAdjustSampleDistancesOffP7_objectS0___ZL70PyvtkFixedPointVolumeRayCastMapper_SetLockSampleDistanceToInputSpacingP7_objectS0___ZL78PyvtkFixedPointVolumeRayCastMapper_GetLockSampleDistanceToInputSpacingMinValueP7_objectS0___ZL78PyvtkFixedPointVolumeRayCastMapper_GetLockSampleDistanceToInputSpacingMaxValueP7_objectS0___ZL70PyvtkFixedPointVolumeRayCastMapper_GetLockSampleDistanceToInputSpacingP7_objectS0___ZL69PyvtkFixedPointVolumeRayCastMapper_LockSampleDistanceToInputSpacingOnP7_objectS0___ZL70PyvtkFixedPointVolumeRayCastMapper_LockSampleDistanceToInputSpacingOffP7_objectS0___ZL53PyvtkFixedPointVolumeRayCastMapper_SetNumberOfThreadsP7_objectS0___ZL53PyvtkFixedPointVolumeRayCastMapper_GetNumberOfThreadsP7_objectS0___ZL66PyvtkFixedPointVolumeRayCastMapper_SetIntermixIntersectingGeometryP7_objectS0___ZL74PyvtkFixedPointVolumeRayCastMapper_GetIntermixIntersectingGeometryMinValueP7_objectS0___ZL74PyvtkFixedPointVolumeRayCastMapper_GetIntermixIntersectingGeometryMaxValueP7_objectS0___ZL66PyvtkFixedPointVolumeRayCastMapper_GetIntermixIntersectingGeometryP7_objectS0___ZL65PyvtkFixedPointVolumeRayCastMapper_IntermixIntersectingGeometryOnP7_objectS0___ZL66PyvtkFixedPointVolumeRayCastMapper_IntermixIntersectingGeometryOffP7_objectS0___ZL69PyvtkFixedPointVolumeRayCastMapper_ComputeRequiredImageSampleDistanceP7_objectS0___ZL41PyvtkFixedPointVolumeRayCastMapper_RenderP7_objectS0___ZL55PyvtkFixedPointVolumeRayCastMapper_ToFixedPointPositionP7_objectS0___ZL56PyvtkFixedPointVolumeRayCastMapper_ToFixedPointDirectionP7_objectS0___ZL54PyvtkFixedPointVolumeRayCastMapper_FixedPointIncrementP7_objectS0___ZL60PyvtkFixedPointVolumeRayCastMapper_GetFloatTripleFromPointerP7_objectS0___ZL59PyvtkFixedPointVolumeRayCastMapper_GetUIntTripleFromPointerP7_objectS0___ZL50PyvtkFixedPointVolumeRayCastMapper_ShiftVectorDownP7_objectS0___ZL56PyvtkFixedPointVolumeRayCastMapper_CheckMinMaxVolumeFlagP7_objectS0___ZL59PyvtkFixedPointVolumeRayCastMapper_CheckMIPMinMaxVolumeFlagP7_objectS0___ZL48PyvtkFixedPointVolumeRayCastMapper_LookupColorUCP7_objectS0___ZL57PyvtkFixedPointVolumeRayCastMapper_LookupDependentColorUCP7_objectS0___ZL49PyvtkFixedPointVolumeRayCastMapper_CheckIfCroppedP7_objectS0___ZL50PyvtkFixedPointVolumeRayCastMapper_GetRenderWindowP7_objectS0___ZL47PyvtkFixedPointVolumeRayCastMapper_GetMIPHelperP7_objectS0___ZL53PyvtkFixedPointVolumeRayCastMapper_GetCompositeHelperP7_objectS0___ZL55PyvtkFixedPointVolumeRayCastMapper_GetCompositeGOHelperP7_objectS0___ZL60PyvtkFixedPointVolumeRayCastMapper_GetCompositeGOShadeHelperP7_objectS0___ZL58PyvtkFixedPointVolumeRayCastMapper_GetCompositeShadeHelperP7_objectS0___ZL48PyvtkFixedPointVolumeRayCastMapper_GetTableShiftP7_objectS0___ZL48PyvtkFixedPointVolumeRayCastMapper_GetTableScaleP7_objectS0___ZL53PyvtkFixedPointVolumeRayCastMapper_GetShadingRequiredP7_objectS0___ZL61PyvtkFixedPointVolumeRayCastMapper_GetGradientOpacityRequiredP7_objectS0___ZL52PyvtkFixedPointVolumeRayCastMapper_GetCurrentScalarsP7_objectS0___ZL53PyvtkFixedPointVolumeRayCastMapper_GetPreviousScalarsP7_objectS0___ZL47PyvtkFixedPointVolumeRayCastMapper_GetRowBoundsP7_objectS0___ZL48PyvtkFixedPointVolumeRayCastMapper_GetColorTableP7_objectS0___ZL56PyvtkFixedPointVolumeRayCastMapper_GetScalarOpacityTableP7_objectS0___ZL58PyvtkFixedPointVolumeRayCastMapper_GetGradientOpacityTableP7_objectS0___ZL44PyvtkFixedPointVolumeRayCastMapper_GetVolumeP7_objectS0___ZL57PyvtkFixedPointVolumeRayCastMapper_GetDiffuseShadingTableP7_objectS0___ZL58PyvtkFixedPointVolumeRayCastMapper_GetSpecularShadingTableP7_objectS0___ZL49PyvtkFixedPointVolumeRayCastMapper_ComputeRayInfoP7_objectS0___ZL52PyvtkFixedPointVolumeRayCastMapper_InitializeRayInfoP7_objectS0___ZL72PyvtkFixedPointVolumeRayCastMapper_ShouldUseNearestNeighborInterpolationP7_objectS0___ZL50PyvtkFixedPointVolumeRayCastMapper_SetRayCastImageP7_objectS0___ZL50PyvtkFixedPointVolumeRayCastMapper_GetRayCastImageP7_objectS0___ZL57PyvtkFixedPointVolumeRayCastMapper_PerImageInitializationP7_objectS0___ZL58PyvtkFixedPointVolumeRayCastMapper_PerVolumeInitializationP7_objectS0___ZL61PyvtkFixedPointVolumeRayCastMapper_PerSubVolumeInitializationP7_objectS0___ZL50PyvtkFixedPointVolumeRayCastMapper_RenderSubVolumeP7_objectS0___ZL55PyvtkFixedPointVolumeRayCastMapper_DisplayRenderedImageP7_objectS0___ZL46PyvtkFixedPointVolumeRayCastMapper_AbortRenderP7_objectS0___ZL54PyvtkFixedPointVolumeRayCastMapper_CreateCanonicalViewP7_objectS0___ZL57PyvtkFixedPointVolumeRayCastMapper_GetEstimatedRenderTimeP7_objectS0___ZL54PyvtkFixedPointVolumeRayCastMapper_SetFinalColorWindowP7_objectS0___ZL54PyvtkFixedPointVolumeRayCastMapper_GetFinalColorWindowP7_objectS0___ZL53PyvtkFixedPointVolumeRayCastMapper_SetFinalColorLevelP7_objectS0___ZL53PyvtkFixedPointVolumeRayCastMapper_GetFinalColorLevelP7_objectS0___ZL55PyvtkFixedPointVolumeRayCastMapper_GetFlipMIPComparisonP7_objectS0___ZL59PyvtkFixedPointVolumeRayCastMapper_ReleaseGraphicsResourcesP7_objectS0___ZN23vtkAbstractVolumeMapper8IsTypeOfEPKc__ZL37PyvtkGPUVolumeRayCastMapper_StaticNewv__ZL36PyvtkGPUVolumeRayCastMapper_IsTypeOfP7_objectS0___ZL31PyvtkGPUVolumeRayCastMapper_IsAP7_objectS0___ZL40PyvtkGPUVolumeRayCastMapper_SafeDownCastP7_objectS0___ZL39PyvtkGPUVolumeRayCastMapper_NewInstanceP7_objectS0___ZL56PyvtkGPUVolumeRayCastMapper_SetAutoAdjustSampleDistancesP7_objectS0___ZL64PyvtkGPUVolumeRayCastMapper_GetAutoAdjustSampleDistancesMinValueP7_objectS0___ZL64PyvtkGPUVolumeRayCastMapper_GetAutoAdjustSampleDistancesMaxValueP7_objectS0___ZL56PyvtkGPUVolumeRayCastMapper_GetAutoAdjustSampleDistancesP7_objectS0___ZL55PyvtkGPUVolumeRayCastMapper_AutoAdjustSampleDistancesOnP7_objectS0___ZL56PyvtkGPUVolumeRayCastMapper_AutoAdjustSampleDistancesOffP7_objectS0___ZL63PyvtkGPUVolumeRayCastMapper_SetLockSampleDistanceToInputSpacingP7_objectS0___ZL71PyvtkGPUVolumeRayCastMapper_GetLockSampleDistanceToInputSpacingMinValueP7_objectS0___ZL71PyvtkGPUVolumeRayCastMapper_GetLockSampleDistanceToInputSpacingMaxValueP7_objectS0___ZL63PyvtkGPUVolumeRayCastMapper_GetLockSampleDistanceToInputSpacingP7_objectS0___ZL62PyvtkGPUVolumeRayCastMapper_LockSampleDistanceToInputSpacingOnP7_objectS0___ZL63PyvtkGPUVolumeRayCastMapper_LockSampleDistanceToInputSpacingOffP7_objectS0___ZL43PyvtkGPUVolumeRayCastMapper_SetUseJitteringP7_objectS0___ZL51PyvtkGPUVolumeRayCastMapper_GetUseJitteringMinValueP7_objectS0___ZL51PyvtkGPUVolumeRayCastMapper_GetUseJitteringMaxValueP7_objectS0___ZL43PyvtkGPUVolumeRayCastMapper_GetUseJitteringP7_objectS0___ZL42PyvtkGPUVolumeRayCastMapper_UseJitteringOnP7_objectS0___ZL43PyvtkGPUVolumeRayCastMapper_UseJitteringOffP7_objectS0___ZL43PyvtkGPUVolumeRayCastMapper_SetUseDepthPassP7_objectS0___ZL51PyvtkGPUVolumeRayCastMapper_GetUseDepthPassMinValueP7_objectS0___ZL51PyvtkGPUVolumeRayCastMapper_GetUseDepthPassMaxValueP7_objectS0___ZL43PyvtkGPUVolumeRayCastMapper_GetUseDepthPassP7_objectS0___ZL42PyvtkGPUVolumeRayCastMapper_UseDepthPassOnP7_objectS0___ZL43PyvtkGPUVolumeRayCastMapper_UseDepthPassOffP7_objectS0___ZL53PyvtkGPUVolumeRayCastMapper_GetDepthPassContourValuesP7_objectS0___ZL45PyvtkGPUVolumeRayCastMapper_SetSampleDistanceP7_objectS0___ZL45PyvtkGPUVolumeRayCastMapper_GetSampleDistanceP7_objectS0___ZL50PyvtkGPUVolumeRayCastMapper_SetImageSampleDistanceP7_objectS0___ZL58PyvtkGPUVolumeRayCastMapper_GetImageSampleDistanceMinValueP7_objectS0___ZL58PyvtkGPUVolumeRayCastMapper_GetImageSampleDistanceMaxValueP7_objectS0___ZL50PyvtkGPUVolumeRayCastMapper_GetImageSampleDistanceP7_objectS0___ZL57PyvtkGPUVolumeRayCastMapper_SetMinimumImageSampleDistanceP7_objectS0___ZL65PyvtkGPUVolumeRayCastMapper_GetMinimumImageSampleDistanceMinValueP7_objectS0___ZL65PyvtkGPUVolumeRayCastMapper_GetMinimumImageSampleDistanceMaxValueP7_objectS0___ZL57PyvtkGPUVolumeRayCastMapper_GetMinimumImageSampleDistanceP7_objectS0___ZL57PyvtkGPUVolumeRayCastMapper_SetMaximumImageSampleDistanceP7_objectS0___ZL65PyvtkGPUVolumeRayCastMapper_GetMaximumImageSampleDistanceMinValueP7_objectS0___ZL65PyvtkGPUVolumeRayCastMapper_GetMaximumImageSampleDistanceMaxValueP7_objectS0___ZL57PyvtkGPUVolumeRayCastMapper_GetMaximumImageSampleDistanceP7_objectS0___ZL47PyvtkGPUVolumeRayCastMapper_SetFinalColorWindowP7_objectS0___ZL47PyvtkGPUVolumeRayCastMapper_GetFinalColorWindowP7_objectS0___ZL46PyvtkGPUVolumeRayCastMapper_SetFinalColorLevelP7_objectS0___ZL46PyvtkGPUVolumeRayCastMapper_GetFinalColorLevelP7_objectS0___ZL47PyvtkGPUVolumeRayCastMapper_SetMaxMemoryInBytesP7_objectS0___ZL47PyvtkGPUVolumeRayCastMapper_GetMaxMemoryInBytesP7_objectS0___ZL48PyvtkGPUVolumeRayCastMapper_SetMaxMemoryFractionP7_objectS0___ZL56PyvtkGPUVolumeRayCastMapper_GetMaxMemoryFractionMinValueP7_objectS0___ZL56PyvtkGPUVolumeRayCastMapper_GetMaxMemoryFractionMaxValueP7_objectS0___ZL48PyvtkGPUVolumeRayCastMapper_GetMaxMemoryFractionP7_objectS0___ZL45PyvtkGPUVolumeRayCastMapper_SetReportProgressP7_objectS0___ZL45PyvtkGPUVolumeRayCastMapper_GetReportProgressP7_objectS0___ZL45PyvtkGPUVolumeRayCastMapper_IsRenderSupportedP7_objectS0___ZL47PyvtkGPUVolumeRayCastMapper_CreateCanonicalViewP7_objectS0___ZL40PyvtkGPUVolumeRayCastMapper_SetMaskInputP7_objectS0___ZL40PyvtkGPUVolumeRayCastMapper_GetMaskInputP7_objectS0___ZL39PyvtkGPUVolumeRayCastMapper_SetMaskTypeP7_objectS0___ZL39PyvtkGPUVolumeRayCastMapper_GetMaskTypeP7_objectS0___ZL47PyvtkGPUVolumeRayCastMapper_SetMaskTypeToBinaryP7_objectS0___ZL49PyvtkGPUVolumeRayCastMapper_SetMaskTypeToLabelMapP7_objectS0___ZL46PyvtkGPUVolumeRayCastMapper_SetMaskBlendFactorP7_objectS0___ZL54PyvtkGPUVolumeRayCastMapper_GetMaskBlendFactorMinValueP7_objectS0___ZL54PyvtkGPUVolumeRayCastMapper_GetMaskBlendFactorMaxValueP7_objectS0___ZL46PyvtkGPUVolumeRayCastMapper_GetMaskBlendFactorP7_objectS0___ZL44PyvtkGPUVolumeRayCastMapper_SetRenderToImageP7_objectS0___ZL44PyvtkGPUVolumeRayCastMapper_GetRenderToImageP7_objectS0___ZL43PyvtkGPUVolumeRayCastMapper_RenderToImageOnP7_objectS0___ZL44PyvtkGPUVolumeRayCastMapper_RenderToImageOffP7_objectS0___ZL51PyvtkGPUVolumeRayCastMapper_SetDepthImageScalarTypeP7_objectS0___ZL51PyvtkGPUVolumeRayCastMapper_GetDepthImageScalarTypeP7_objectS0___ZL65PyvtkGPUVolumeRayCastMapper_SetDepthImageScalarTypeToUnsignedCharP7_objectS0___ZL66PyvtkGPUVolumeRayCastMapper_SetDepthImageScalarTypeToUnsignedShortP7_objectS0___ZL58PyvtkGPUVolumeRayCastMapper_SetDepthImageScalarTypeToFloatP7_objectS0___ZL51PyvtkGPUVolumeRayCastMapper_SetClampDepthToBackfaceP7_objectS0___ZL51PyvtkGPUVolumeRayCastMapper_GetClampDepthToBackfaceP7_objectS0___ZL50PyvtkGPUVolumeRayCastMapper_ClampDepthToBackfaceOnP7_objectS0___ZL51PyvtkGPUVolumeRayCastMapper_ClampDepthToBackfaceOffP7_objectS0___ZL41PyvtkGPUVolumeRayCastMapper_GetDepthImageP7_objectS0___ZL41PyvtkGPUVolumeRayCastMapper_GetColorImageP7_objectS0___ZL34PyvtkGPUVolumeRayCastMapper_RenderP7_objectS0___ZL37PyvtkGPUVolumeRayCastMapper_GPURenderP7_objectS0___ZL52PyvtkGPUVolumeRayCastMapper_ReleaseGraphicsResourcesP7_objectS0___ZL45PyvtkGPUVolumeRayCastMapper_GetReductionRatioP7_objectS0___ZL45PyvtkGPUVolumeRayCastMapper_SetColorRangeTypeP7_objectS0___ZL45PyvtkGPUVolumeRayCastMapper_GetColorRangeTypeP7_objectS0___ZL53PyvtkGPUVolumeRayCastMapper_SetScalarOpacityRangeTypeP7_objectS0___ZL53PyvtkGPUVolumeRayCastMapper_GetScalarOpacityRangeTypeP7_objectS0___ZL55PyvtkGPUVolumeRayCastMapper_SetGradientOpacityRangeTypeP7_objectS0___ZL55PyvtkGPUVolumeRayCastMapper_GetGradientOpacityRangeTypeP7_objectS0___ZL36PyvtkOSPRayVolumeInterface_StaticNewv__ZL35PyvtkOSPRayVolumeInterface_IsTypeOfP7_objectS0___ZL30PyvtkOSPRayVolumeInterface_IsAP7_objectS0___ZL39PyvtkOSPRayVolumeInterface_SafeDownCastP7_objectS0___ZL38PyvtkOSPRayVolumeInterface_NewInstanceP7_objectS0___ZL33PyvtkOSPRayVolumeInterface_RenderP7_objectS0___ZL40PyvtkProjectedTetrahedraMapper_StaticNewv__ZL39PyvtkProjectedTetrahedraMapper_IsTypeOfP7_objectS0___ZL34PyvtkProjectedTetrahedraMapper_IsAP7_objectS0___ZL43PyvtkProjectedTetrahedraMapper_SafeDownCastP7_objectS0___ZL42PyvtkProjectedTetrahedraMapper_NewInstanceP7_objectS0___ZL48PyvtkProjectedTetrahedraMapper_SetVisibilitySortP7_objectS0___ZL48PyvtkProjectedTetrahedraMapper_GetVisibilitySortP7_objectS0___ZL49PyvtkProjectedTetrahedraMapper_MapScalarsToColorsP7_objectS0___ZL46PyvtkProjectedTetrahedraMapper_TransformPointsP7_objectS0___ZL42PyvtkProjectedTetrahedraMapper_IsSupportedP7_objectS0___ZL40PyvtkRayCastImageDisplayHelper_StaticNewv__ZL39PyvtkRayCastImageDisplayHelper_IsTypeOfP7_objectS0___ZL34PyvtkRayCastImageDisplayHelper_IsAP7_objectS0___ZL43PyvtkRayCastImageDisplayHelper_SafeDownCastP7_objectS0___ZL42PyvtkRayCastImageDisplayHelper_NewInstanceP7_objectS0___ZL44PyvtkRayCastImageDisplayHelper_RenderTextureP7_objectS0___ZL53PyvtkRayCastImageDisplayHelper_SetPreMultipliedColorsP7_objectS0___ZL61PyvtkRayCastImageDisplayHelper_GetPreMultipliedColorsMinValueP7_objectS0___ZL61PyvtkRayCastImageDisplayHelper_GetPreMultipliedColorsMaxValueP7_objectS0___ZL53PyvtkRayCastImageDisplayHelper_GetPreMultipliedColorsP7_objectS0___ZL52PyvtkRayCastImageDisplayHelper_PreMultipliedColorsOnP7_objectS0___ZL53PyvtkRayCastImageDisplayHelper_PreMultipliedColorsOffP7_objectS0___ZL44PyvtkRayCastImageDisplayHelper_SetPixelScaleP7_objectS0___ZL44PyvtkRayCastImageDisplayHelper_GetPixelScaleP7_objectS0___ZL55PyvtkRayCastImageDisplayHelper_ReleaseGraphicsResourcesP7_objectS0___ZL47PyvtkRayCastImageDisplayHelper_RenderTexture_s1P7_objectS0___ZL47PyvtkRayCastImageDisplayHelper_RenderTexture_s2P7_objectS0___ZL46PyvtkRecursiveSphereDirectionEncoder_StaticNewv__ZL45PyvtkRecursiveSphereDirectionEncoder_IsTypeOfP7_objectS0___ZL40PyvtkRecursiveSphereDirectionEncoder_IsAP7_objectS0___ZL49PyvtkRecursiveSphereDirectionEncoder_SafeDownCastP7_objectS0___ZL48PyvtkRecursiveSphereDirectionEncoder_NewInstanceP7_objectS0___ZL56PyvtkRecursiveSphereDirectionEncoder_GetEncodedDirectionP7_objectS0___ZL55PyvtkRecursiveSphereDirectionEncoder_GetDecodedGradientP7_objectS0___ZL65PyvtkRecursiveSphereDirectionEncoder_GetNumberOfEncodedDirectionsP7_objectS0___ZL60PyvtkRecursiveSphereDirectionEncoder_GetDecodedGradientTableP7_objectS0___ZL54PyvtkRecursiveSphereDirectionEncoder_SetRecursionDepthP7_objectS0___ZL62PyvtkRecursiveSphereDirectionEncoder_GetRecursionDepthMinValueP7_objectS0___ZL62PyvtkRecursiveSphereDirectionEncoder_GetRecursionDepthMaxValueP7_objectS0___ZL54PyvtkRecursiveSphereDirectionEncoder_GetRecursionDepthP7_objectS0___ZL40PyvtkSphericalDirectionEncoder_StaticNewv__ZL39PyvtkSphericalDirectionEncoder_IsTypeOfP7_objectS0___ZL34PyvtkSphericalDirectionEncoder_IsAP7_objectS0___ZL43PyvtkSphericalDirectionEncoder_SafeDownCastP7_objectS0___ZL42PyvtkSphericalDirectionEncoder_NewInstanceP7_objectS0___ZL50PyvtkSphericalDirectionEncoder_GetEncodedDirectionP7_objectS0___ZL49PyvtkSphericalDirectionEncoder_GetDecodedGradientP7_objectS0___ZL59PyvtkSphericalDirectionEncoder_GetNumberOfEncodedDirectionsP7_objectS0___ZL54PyvtkSphericalDirectionEncoder_GetDecodedGradientTableP7_objectS0___ZL51PyvtkUnstructuredGridBunykRayCastFunction_StaticNewv__ZL50PyvtkUnstructuredGridBunykRayCastFunction_IsTypeOfP7_objectS0___ZL45PyvtkUnstructuredGridBunykRayCastFunction_IsAP7_objectS0___ZL54PyvtkUnstructuredGridBunykRayCastFunction_SafeDownCastP7_objectS0___ZL53PyvtkUnstructuredGridBunykRayCastFunction_NewInstanceP7_objectS0___ZL52PyvtkUnstructuredGridBunykRayCastFunction_InitializeP7_objectS0___ZL50PyvtkUnstructuredGridBunykRayCastFunction_FinalizeP7_objectS0___ZL53PyvtkUnstructuredGridBunykRayCastFunction_NewIteratorP7_objectS0___ZL51PyvtkUnstructuredGridBunykRayCastFunction_GetPointsP7_objectS0___ZL62PyvtkUnstructuredGridBunykRayCastFunction_GetViewToWorldMatrixP7_objectS0___ZL56PyvtkUnstructuredGridBunykRayCastFunction_GetImageOriginP7_objectS0___ZL62PyvtkUnstructuredGridBunykRayCastFunction_GetImageViewportSizeP7_objectS0___ZL55PyvtkUnstructuredGridHomogeneousRayIntegrator_StaticNewv__ZL54PyvtkUnstructuredGridHomogeneousRayIntegrator_IsTypeOfP7_objectS0___ZL49PyvtkUnstructuredGridHomogeneousRayIntegrator_IsAP7_objectS0___ZL58PyvtkUnstructuredGridHomogeneousRayIntegrator_SafeDownCastP7_objectS0___ZL57PyvtkUnstructuredGridHomogeneousRayIntegrator_NewInstanceP7_objectS0___ZL56PyvtkUnstructuredGridHomogeneousRayIntegrator_InitializeP7_objectS0___ZL55PyvtkUnstructuredGridHomogeneousRayIntegrator_IntegrateP7_objectS0___ZL74PyvtkUnstructuredGridHomogeneousRayIntegrator_SetTransferFunctionTableSizeP7_objectS0___ZL74PyvtkUnstructuredGridHomogeneousRayIntegrator_GetTransferFunctionTableSizeP7_objectS0___ZL50PyvtkUnstructuredGridLinearRayIntegrator_StaticNewv__ZL49PyvtkUnstructuredGridLinearRayIntegrator_IsTypeOfP7_objectS0___ZL44PyvtkUnstructuredGridLinearRayIntegrator_IsAP7_objectS0___ZL53PyvtkUnstructuredGridLinearRayIntegrator_SafeDownCastP7_objectS0___ZL52PyvtkUnstructuredGridLinearRayIntegrator_NewInstanceP7_objectS0___ZL51PyvtkUnstructuredGridLinearRayIntegrator_InitializeP7_objectS0___ZL50PyvtkUnstructuredGridLinearRayIntegrator_IntegrateP7_objectS0___ZL53PyvtkUnstructuredGridLinearRayIntegrator_IntegrateRayP7_objectS0___ZL44PyvtkUnstructuredGridLinearRayIntegrator_PsiP7_objectS0___ZL56PyvtkUnstructuredGridLinearRayIntegrator_IntegrateRay_s1P7_objectS0___ZL56PyvtkUnstructuredGridLinearRayIntegrator_IntegrateRay_s2P7_objectS0___ZL52PyvtkUnstructuredGridPartialPreIntegration_StaticNewv__ZL51PyvtkUnstructuredGridPartialPreIntegration_IsTypeOfP7_objectS0___ZL46PyvtkUnstructuredGridPartialPreIntegration_IsAP7_objectS0___ZL55PyvtkUnstructuredGridPartialPreIntegration_SafeDownCastP7_objectS0___ZL54PyvtkUnstructuredGridPartialPreIntegration_NewInstanceP7_objectS0___ZL53PyvtkUnstructuredGridPartialPreIntegration_InitializeP7_objectS0___ZL52PyvtkUnstructuredGridPartialPreIntegration_IntegrateP7_objectS0___ZL55PyvtkUnstructuredGridPartialPreIntegration_IntegrateRayP7_objectS0___ZL46PyvtkUnstructuredGridPartialPreIntegration_PsiP7_objectS0___ZL54PyvtkUnstructuredGridPartialPreIntegration_GetPsiTableP7_objectS0___ZL56PyvtkUnstructuredGridPartialPreIntegration_BuildPsiTableP7_objectS0___ZL58PyvtkUnstructuredGridPartialPreIntegration_IntegrateRay_s1P7_objectS0___ZL58PyvtkUnstructuredGridPartialPreIntegration_IntegrateRay_s2P7_objectS0___ZL45PyvtkUnstructuredGridPreIntegration_StaticNewv__ZL44PyvtkUnstructuredGridPreIntegration_IsTypeOfP7_objectS0___ZL39PyvtkUnstructuredGridPreIntegration_IsAP7_objectS0___ZL48PyvtkUnstructuredGridPreIntegration_SafeDownCastP7_objectS0___ZL47PyvtkUnstructuredGridPreIntegration_NewInstanceP7_objectS0___ZL46PyvtkUnstructuredGridPreIntegration_InitializeP7_objectS0___ZL45PyvtkUnstructuredGridPreIntegration_IntegrateP7_objectS0___ZL49PyvtkUnstructuredGridPreIntegration_GetIntegratorP7_objectS0___ZL49PyvtkUnstructuredGridPreIntegration_SetIntegratorP7_objectS0___ZL71PyvtkUnstructuredGridPreIntegration_SetIntegrationTableScalarResolutionP7_objectS0___ZL71PyvtkUnstructuredGridPreIntegration_GetIntegrationTableScalarResolutionP7_objectS0___ZL71PyvtkUnstructuredGridPreIntegration_SetIntegrationTableLengthResolutionP7_objectS0___ZL71PyvtkUnstructuredGridPreIntegration_GetIntegrationTableLengthResolutionP7_objectS0___ZL66PyvtkUnstructuredGridPreIntegration_GetIntegrationTableScalarShiftP7_objectS0___ZL66PyvtkUnstructuredGridPreIntegration_GetIntegrationTableScalarScaleP7_objectS0___ZL66PyvtkUnstructuredGridPreIntegration_GetIntegrationTableLengthScaleP7_objectS0___ZL64PyvtkUnstructuredGridPreIntegration_GetIncrementalPreIntegrationP7_objectS0___ZL64PyvtkUnstructuredGridPreIntegration_SetIncrementalPreIntegrationP7_objectS0___ZL63PyvtkUnstructuredGridPreIntegration_IncrementalPreIntegrationOnP7_objectS0___ZL64PyvtkUnstructuredGridPreIntegration_IncrementalPreIntegrationOffP7_objectS0___ZL58PyvtkUnstructuredGridPreIntegration_GetPreIntegrationTableP7_objectS0___ZL49PyvtkUnstructuredGridPreIntegration_GetTableEntryP7_objectS0___ZL56PyvtkUnstructuredGridPreIntegration_GetIndexedTableEntryP7_objectS0___ZL42PyvtkUnstructuredGridVolumeMapper_IsTypeOfP7_objectS0___ZL37PyvtkUnstructuredGridVolumeMapper_IsAP7_objectS0___ZL46PyvtkUnstructuredGridVolumeMapper_SafeDownCastP7_objectS0___ZL45PyvtkUnstructuredGridVolumeMapper_NewInstanceP7_objectS0___ZL46PyvtkUnstructuredGridVolumeMapper_SetInputDataP7_objectS0___ZL42PyvtkUnstructuredGridVolumeMapper_GetInputP7_objectS0___ZL46PyvtkUnstructuredGridVolumeMapper_SetBlendModeP7_objectS0___ZL57PyvtkUnstructuredGridVolumeMapper_SetBlendModeToCompositeP7_objectS0___ZL64PyvtkUnstructuredGridVolumeMapper_SetBlendModeToMaximumIntensityP7_objectS0___ZL46PyvtkUnstructuredGridVolumeMapper_GetBlendModeP7_objectS0___ZL40PyvtkUnstructuredGridVolumeMapper_RenderP7_objectS0___ZL58PyvtkUnstructuredGridVolumeMapper_ReleaseGraphicsResourcesP7_objectS0___ZL49PyvtkUnstructuredGridVolumeMapper_SetInputData_s1P7_objectS0___ZL49PyvtkUnstructuredGridVolumeMapper_SetInputData_s2P7_objectS0___ZL51PyvtkUnstructuredGridVolumeRayCastFunction_IsTypeOfP7_objectS0___ZL46PyvtkUnstructuredGridVolumeRayCastFunction_IsAP7_objectS0___ZL55PyvtkUnstructuredGridVolumeRayCastFunction_SafeDownCastP7_objectS0___ZL54PyvtkUnstructuredGridVolumeRayCastFunction_NewInstanceP7_objectS0___ZL53PyvtkUnstructuredGridVolumeRayCastFunction_InitializeP7_objectS0___ZL51PyvtkUnstructuredGridVolumeRayCastFunction_FinalizeP7_objectS0___ZL54PyvtkUnstructuredGridVolumeRayCastFunction_NewIteratorP7_objectS0___ZL51PyvtkUnstructuredGridVolumeRayCastIterator_IsTypeOfP7_objectS0___ZL46PyvtkUnstructuredGridVolumeRayCastIterator_IsAP7_objectS0___ZL55PyvtkUnstructuredGridVolumeRayCastIterator_SafeDownCastP7_objectS0___ZL54PyvtkUnstructuredGridVolumeRayCastIterator_NewInstanceP7_objectS0___ZL53PyvtkUnstructuredGridVolumeRayCastIterator_InitializeP7_objectS0___ZL63PyvtkUnstructuredGridVolumeRayCastIterator_GetNextIntersectionsP7_objectS0___ZL52PyvtkUnstructuredGridVolumeRayCastIterator_SetBoundsP7_objectS0___ZL52PyvtkUnstructuredGridVolumeRayCastIterator_GetBoundsP7_objectS0___ZL70PyvtkUnstructuredGridVolumeRayCastIterator_SetMaxNumberOfIntersectionsP7_objectS0___ZL70PyvtkUnstructuredGridVolumeRayCastIterator_GetMaxNumberOfIntersectionsP7_objectS0___ZL50PyvtkUnstructuredGridVolumeRayCastMapper_StaticNewv__ZL49PyvtkUnstructuredGridVolumeRayCastMapper_IsTypeOfP7_objectS0___ZL44PyvtkUnstructuredGridVolumeRayCastMapper_IsAP7_objectS0___ZL53PyvtkUnstructuredGridVolumeRayCastMapper_SafeDownCastP7_objectS0___ZL52PyvtkUnstructuredGridVolumeRayCastMapper_NewInstanceP7_objectS0___ZL63PyvtkUnstructuredGridVolumeRayCastMapper_SetImageSampleDistanceP7_objectS0___ZL71PyvtkUnstructuredGridVolumeRayCastMapper_GetImageSampleDistanceMinValueP7_objectS0___ZL71PyvtkUnstructuredGridVolumeRayCastMapper_GetImageSampleDistanceMaxValueP7_objectS0___ZL63PyvtkUnstructuredGridVolumeRayCastMapper_GetImageSampleDistanceP7_objectS0___ZL70PyvtkUnstructuredGridVolumeRayCastMapper_SetMinimumImageSampleDistanceP7_objectS0___ZL78PyvtkUnstructuredGridVolumeRayCastMapper_GetMinimumImageSampleDistanceMinValueP7_objectS0___ZL78PyvtkUnstructuredGridVolumeRayCastMapper_GetMinimumImageSampleDistanceMaxValueP7_objectS0___ZL70PyvtkUnstructuredGridVolumeRayCastMapper_GetMinimumImageSampleDistanceP7_objectS0___ZL70PyvtkUnstructuredGridVolumeRayCastMapper_SetMaximumImageSampleDistanceP7_objectS0___ZL78PyvtkUnstructuredGridVolumeRayCastMapper_GetMaximumImageSampleDistanceMinValueP7_objectS0___ZL78PyvtkUnstructuredGridVolumeRayCastMapper_GetMaximumImageSampleDistanceMaxValueP7_objectS0___ZL70PyvtkUnstructuredGridVolumeRayCastMapper_GetMaximumImageSampleDistanceP7_objectS0___ZL69PyvtkUnstructuredGridVolumeRayCastMapper_SetAutoAdjustSampleDistancesP7_objectS0___ZL77PyvtkUnstructuredGridVolumeRayCastMapper_GetAutoAdjustSampleDistancesMinValueP7_objectS0___ZL77PyvtkUnstructuredGridVolumeRayCastMapper_GetAutoAdjustSampleDistancesMaxValueP7_objectS0___ZL69PyvtkUnstructuredGridVolumeRayCastMapper_GetAutoAdjustSampleDistancesP7_objectS0___ZL68PyvtkUnstructuredGridVolumeRayCastMapper_AutoAdjustSampleDistancesOnP7_objectS0___ZL69PyvtkUnstructuredGridVolumeRayCastMapper_AutoAdjustSampleDistancesOffP7_objectS0___ZL59PyvtkUnstructuredGridVolumeRayCastMapper_SetNumberOfThreadsP7_objectS0___ZL59PyvtkUnstructuredGridVolumeRayCastMapper_GetNumberOfThreadsP7_objectS0___ZL72PyvtkUnstructuredGridVolumeRayCastMapper_SetIntermixIntersectingGeometryP7_objectS0___ZL80PyvtkUnstructuredGridVolumeRayCastMapper_GetIntermixIntersectingGeometryMinValueP7_objectS0___ZL80PyvtkUnstructuredGridVolumeRayCastMapper_GetIntermixIntersectingGeometryMaxValueP7_objectS0___ZL72PyvtkUnstructuredGridVolumeRayCastMapper_GetIntermixIntersectingGeometryP7_objectS0___ZL71PyvtkUnstructuredGridVolumeRayCastMapper_IntermixIntersectingGeometryOnP7_objectS0___ZL72PyvtkUnstructuredGridVolumeRayCastMapper_IntermixIntersectingGeometryOffP7_objectS0___ZL59PyvtkUnstructuredGridVolumeRayCastMapper_SetRayCastFunctionP7_objectS0___ZL59PyvtkUnstructuredGridVolumeRayCastMapper_GetRayCastFunctionP7_objectS0___ZL57PyvtkUnstructuredGridVolumeRayCastMapper_SetRayIntegratorP7_objectS0___ZL57PyvtkUnstructuredGridVolumeRayCastMapper_GetRayIntegratorP7_objectS0___ZL47PyvtkUnstructuredGridVolumeRayCastMapper_RenderP7_objectS0___ZL65PyvtkUnstructuredGridVolumeRayCastMapper_ReleaseGraphicsResourcesP7_objectS0___ZL58PyvtkUnstructuredGridVolumeRayCastMapper_GetImageInUseSizeP7_objectS0___ZL55PyvtkUnstructuredGridVolumeRayCastMapper_GetImageOriginP7_objectS0___ZL61PyvtkUnstructuredGridVolumeRayCastMapper_GetImageViewportSizeP7_objectS0___ZL49PyvtkUnstructuredGridVolumeRayCastMapper_CastRaysP7_objectS0___ZL49PyvtkUnstructuredGridVolumeRayIntegrator_IsTypeOfP7_objectS0___ZL44PyvtkUnstructuredGridVolumeRayIntegrator_IsAP7_objectS0___ZL53PyvtkUnstructuredGridVolumeRayIntegrator_SafeDownCastP7_objectS0___ZL52PyvtkUnstructuredGridVolumeRayIntegrator_NewInstanceP7_objectS0___ZL51PyvtkUnstructuredGridVolumeRayIntegrator_InitializeP7_objectS0___ZL50PyvtkUnstructuredGridVolumeRayIntegrator_IntegrateP7_objectS0___ZL49PyvtkUnstructuredGridVolumeZSweepMapper_StaticNewv__ZL48PyvtkUnstructuredGridVolumeZSweepMapper_IsTypeOfP7_objectS0___ZL43PyvtkUnstructuredGridVolumeZSweepMapper_IsAP7_objectS0___ZL52PyvtkUnstructuredGridVolumeZSweepMapper_SafeDownCastP7_objectS0___ZL51PyvtkUnstructuredGridVolumeZSweepMapper_NewInstanceP7_objectS0___ZL62PyvtkUnstructuredGridVolumeZSweepMapper_SetImageSampleDistanceP7_objectS0___ZL70PyvtkUnstructuredGridVolumeZSweepMapper_GetImageSampleDistanceMinValueP7_objectS0___ZL70PyvtkUnstructuredGridVolumeZSweepMapper_GetImageSampleDistanceMaxValueP7_objectS0___ZL62PyvtkUnstructuredGridVolumeZSweepMapper_GetImageSampleDistanceP7_objectS0___ZL69PyvtkUnstructuredGridVolumeZSweepMapper_SetMinimumImageSampleDistanceP7_objectS0___ZL77PyvtkUnstructuredGridVolumeZSweepMapper_GetMinimumImageSampleDistanceMinValueP7_objectS0___ZL77PyvtkUnstructuredGridVolumeZSweepMapper_GetMinimumImageSampleDistanceMaxValueP7_objectS0___ZL69PyvtkUnstructuredGridVolumeZSweepMapper_GetMinimumImageSampleDistanceP7_objectS0___ZL69PyvtkUnstructuredGridVolumeZSweepMapper_SetMaximumImageSampleDistanceP7_objectS0___ZL77PyvtkUnstructuredGridVolumeZSweepMapper_GetMaximumImageSampleDistanceMinValueP7_objectS0___ZL77PyvtkUnstructuredGridVolumeZSweepMapper_GetMaximumImageSampleDistanceMaxValueP7_objectS0___ZL69PyvtkUnstructuredGridVolumeZSweepMapper_GetMaximumImageSampleDistanceP7_objectS0___ZL68PyvtkUnstructuredGridVolumeZSweepMapper_SetAutoAdjustSampleDistancesP7_objectS0___ZL76PyvtkUnstructuredGridVolumeZSweepMapper_GetAutoAdjustSampleDistancesMinValueP7_objectS0___ZL76PyvtkUnstructuredGridVolumeZSweepMapper_GetAutoAdjustSampleDistancesMaxValueP7_objectS0___ZL68PyvtkUnstructuredGridVolumeZSweepMapper_GetAutoAdjustSampleDistancesP7_objectS0___ZL67PyvtkUnstructuredGridVolumeZSweepMapper_AutoAdjustSampleDistancesOnP7_objectS0___ZL68PyvtkUnstructuredGridVolumeZSweepMapper_AutoAdjustSampleDistancesOffP7_objectS0___ZL71PyvtkUnstructuredGridVolumeZSweepMapper_SetIntermixIntersectingGeometryP7_objectS0___ZL79PyvtkUnstructuredGridVolumeZSweepMapper_GetIntermixIntersectingGeometryMinValueP7_objectS0___ZL79PyvtkUnstructuredGridVolumeZSweepMapper_GetIntermixIntersectingGeometryMaxValueP7_objectS0___ZL71PyvtkUnstructuredGridVolumeZSweepMapper_GetIntermixIntersectingGeometryP7_objectS0___ZL70PyvtkUnstructuredGridVolumeZSweepMapper_IntermixIntersectingGeometryOnP7_objectS0___ZL71PyvtkUnstructuredGridVolumeZSweepMapper_IntermixIntersectingGeometryOffP7_objectS0___ZL59PyvtkUnstructuredGridVolumeZSweepMapper_GetMaxPixelListSizeP7_objectS0___ZL59PyvtkUnstructuredGridVolumeZSweepMapper_SetMaxPixelListSizeP7_objectS0___ZL56PyvtkUnstructuredGridVolumeZSweepMapper_SetRayIntegratorP7_objectS0___ZL56PyvtkUnstructuredGridVolumeZSweepMapper_GetRayIntegratorP7_objectS0___ZL46PyvtkUnstructuredGridVolumeZSweepMapper_RenderP7_objectS0___ZL57PyvtkUnstructuredGridVolumeZSweepMapper_GetImageInUseSizeP7_objectS0___ZL54PyvtkUnstructuredGridVolumeZSweepMapper_GetImageOriginP7_objectS0___ZL60PyvtkUnstructuredGridVolumeZSweepMapper_GetImageViewportSizeP7_objectS0___ZL26PyvtkVolumeMapper_IsTypeOfP7_objectS0___ZL21PyvtkVolumeMapper_IsAP7_objectS0___ZL30PyvtkVolumeMapper_SafeDownCastP7_objectS0___ZL29PyvtkVolumeMapper_NewInstanceP7_objectS0___ZL30PyvtkVolumeMapper_SetInputDataP7_objectS0___ZL26PyvtkVolumeMapper_GetInputP7_objectS0___ZL30PyvtkVolumeMapper_SetBlendModeP7_objectS0___ZL41PyvtkVolumeMapper_SetBlendModeToCompositeP7_objectS0___ZL48PyvtkVolumeMapper_SetBlendModeToMaximumIntensityP7_objectS0___ZL48PyvtkVolumeMapper_SetBlendModeToMinimumIntensityP7_objectS0___ZL48PyvtkVolumeMapper_SetBlendModeToAverageIntensityP7_objectS0___ZL40PyvtkVolumeMapper_SetBlendModeToAdditiveP7_objectS0___ZL30PyvtkVolumeMapper_GetBlendModeP7_objectS0___ZL41PyvtkVolumeMapper_SetAverageIPScalarRangeP7_objectS0___ZL41PyvtkVolumeMapper_GetAverageIPScalarRangeP7_objectS0___ZL29PyvtkVolumeMapper_SetCroppingP7_objectS0___ZL37PyvtkVolumeMapper_GetCroppingMinValueP7_objectS0___ZL37PyvtkVolumeMapper_GetCroppingMaxValueP7_objectS0___ZL29PyvtkVolumeMapper_GetCroppingP7_objectS0___ZL28PyvtkVolumeMapper_CroppingOnP7_objectS0___ZL29PyvtkVolumeMapper_CroppingOffP7_objectS0___ZL41PyvtkVolumeMapper_SetCroppingRegionPlanesP7_objectS0___ZL41PyvtkVolumeMapper_GetCroppingRegionPlanesP7_objectS0___ZL46PyvtkVolumeMapper_GetVoxelCroppingRegionPlanesP7_objectS0___ZL40PyvtkVolumeMapper_SetCroppingRegionFlagsP7_objectS0___ZL48PyvtkVolumeMapper_GetCroppingRegionFlagsMinValueP7_objectS0___ZL48PyvtkVolumeMapper_GetCroppingRegionFlagsMaxValueP7_objectS0___ZL40PyvtkVolumeMapper_GetCroppingRegionFlagsP7_objectS0___ZL51PyvtkVolumeMapper_SetCroppingRegionFlagsToSubVolumeP7_objectS0___ZL47PyvtkVolumeMapper_SetCroppingRegionFlagsToFenceP7_objectS0___ZL55PyvtkVolumeMapper_SetCroppingRegionFlagsToInvertedFenceP7_objectS0___ZL47PyvtkVolumeMapper_SetCroppingRegionFlagsToCrossP7_objectS0___ZL55PyvtkVolumeMapper_SetCroppingRegionFlagsToInvertedCrossP7_objectS0___ZL24PyvtkVolumeMapper_RenderP7_objectS0___ZL42PyvtkVolumeMapper_ReleaseGraphicsResourcesP7_objectS0___ZL33PyvtkVolumeMapper_SetInputData_s1P7_objectS0___ZL33PyvtkVolumeMapper_SetInputData_s2P7_objectS0___ZL34PyvtkVolumeOutlineSource_StaticNewv__ZL33PyvtkVolumeOutlineSource_IsTypeOfP7_objectS0___ZL28PyvtkVolumeOutlineSource_IsAP7_objectS0___ZL37PyvtkVolumeOutlineSource_SafeDownCastP7_objectS0___ZL36PyvtkVolumeOutlineSource_NewInstanceP7_objectS0___ZL40PyvtkVolumeOutlineSource_SetVolumeMapperP7_objectS0___ZL40PyvtkVolumeOutlineSource_GetVolumeMapperP7_objectS0___ZL43PyvtkVolumeOutlineSource_SetGenerateScalarsP7_objectS0___ZL42PyvtkVolumeOutlineSource_GenerateScalarsOnP7_objectS0___ZL43PyvtkVolumeOutlineSource_GenerateScalarsOffP7_objectS0___ZL43PyvtkVolumeOutlineSource_GetGenerateScalarsP7_objectS0___ZL43PyvtkVolumeOutlineSource_SetGenerateOutlineP7_objectS0___ZL42PyvtkVolumeOutlineSource_GenerateOutlineOnP7_objectS0___ZL43PyvtkVolumeOutlineSource_GenerateOutlineOffP7_objectS0___ZL43PyvtkVolumeOutlineSource_GetGenerateOutlineP7_objectS0___ZL41PyvtkVolumeOutlineSource_SetGenerateFacesP7_objectS0___ZL40PyvtkVolumeOutlineSource_GenerateFacesOnP7_objectS0___ZL41PyvtkVolumeOutlineSource_GenerateFacesOffP7_objectS0___ZL41PyvtkVolumeOutlineSource_GetGenerateFacesP7_objectS0___ZL33PyvtkVolumeOutlineSource_SetColorP7_objectS0___ZL33PyvtkVolumeOutlineSource_GetColorP7_objectS0___ZL41PyvtkVolumeOutlineSource_SetActivePlaneIdP7_objectS0___ZL41PyvtkVolumeOutlineSource_GetActivePlaneIdP7_objectS0___ZL44PyvtkVolumeOutlineSource_SetActivePlaneColorP7_objectS0___ZL44PyvtkVolumeOutlineSource_GetActivePlaneColorP7_objectS0___GLOBAL__sub_I_vtkVolumeOutlineSourcePython.cxx__ZL27PyvtkVolumePicker_StaticNewv__ZL26PyvtkVolumePicker_IsTypeOfP7_objectS0___ZL21PyvtkVolumePicker_IsAP7_objectS0___ZL30PyvtkVolumePicker_SafeDownCastP7_objectS0___ZL29PyvtkVolumePicker_NewInstanceP7_objectS0___ZL39PyvtkVolumePicker_SetPickCroppingPlanesP7_objectS0___ZL38PyvtkVolumePicker_PickCroppingPlanesOnP7_objectS0___ZL39PyvtkVolumePicker_PickCroppingPlanesOffP7_objectS0___ZL39PyvtkVolumePicker_GetPickCroppingPlanesP7_objectS0___ZL36PyvtkVolumePicker_GetCroppingPlaneIdP7_objectS0___ZN13vtkCellPicker8IsTypeOfEPKc__ZL51PyvtkVolumeRayCastSpaceLeapingImageFilter_StaticNewv__ZL50PyvtkVolumeRayCastSpaceLeapingImageFilter_IsTypeOfP7_objectS0___ZL45PyvtkVolumeRayCastSpaceLeapingImageFilter_IsAP7_objectS0___ZL54PyvtkVolumeRayCastSpaceLeapingImageFilter_SafeDownCastP7_objectS0___ZL53PyvtkVolumeRayCastSpaceLeapingImageFilter_NewInstanceP7_objectS0___ZL59PyvtkVolumeRayCastSpaceLeapingImageFilter_SetCurrentScalarsP7_objectS0___ZL59PyvtkVolumeRayCastSpaceLeapingImageFilter_GetCurrentScalarsP7_objectS0___ZL66PyvtkVolumeRayCastSpaceLeapingImageFilter_SetIndependentComponentsP7_objectS0___ZL66PyvtkVolumeRayCastSpaceLeapingImageFilter_GetIndependentComponentsP7_objectS0___ZL67PyvtkVolumeRayCastSpaceLeapingImageFilter_SetComputeGradientOpacityP7_objectS0___ZL67PyvtkVolumeRayCastSpaceLeapingImageFilter_GetComputeGradientOpacityP7_objectS0___ZL66PyvtkVolumeRayCastSpaceLeapingImageFilter_ComputeGradientOpacityOnP7_objectS0___ZL67PyvtkVolumeRayCastSpaceLeapingImageFilter_ComputeGradientOpacityOffP7_objectS0___ZL58PyvtkVolumeRayCastSpaceLeapingImageFilter_SetComputeMinMaxP7_objectS0___ZL58PyvtkVolumeRayCastSpaceLeapingImageFilter_GetComputeMinMaxP7_objectS0___ZL57PyvtkVolumeRayCastSpaceLeapingImageFilter_ComputeMinMaxOnP7_objectS0___ZL58PyvtkVolumeRayCastSpaceLeapingImageFilter_ComputeMinMaxOffP7_objectS0___ZL71PyvtkVolumeRayCastSpaceLeapingImageFilter_SetUpdateGradientOpacityFlagsP7_objectS0___ZL71PyvtkVolumeRayCastSpaceLeapingImageFilter_GetUpdateGradientOpacityFlagsP7_objectS0___ZL70PyvtkVolumeRayCastSpaceLeapingImageFilter_UpdateGradientOpacityFlagsOnP7_objectS0___ZL71PyvtkVolumeRayCastSpaceLeapingImageFilter_UpdateGradientOpacityFlagsOffP7_objectS0___ZL64PyvtkVolumeRayCastSpaceLeapingImageFilter_GetLastMinMaxBuildTimeP7_objectS0___ZL63PyvtkVolumeRayCastSpaceLeapingImageFilter_GetLastMinMaxFlagTimeP7_objectS0___ZL55PyvtkVolumeRayCastSpaceLeapingImageFilter_SetTableShiftP7_objectS0___ZL55PyvtkVolumeRayCastSpaceLeapingImageFilter_GetTableShiftP7_objectS0___ZL55PyvtkVolumeRayCastSpaceLeapingImageFilter_SetTableScaleP7_objectS0___ZL55PyvtkVolumeRayCastSpaceLeapingImageFilter_GetTableScaleP7_objectS0___ZL54PyvtkVolumeRayCastSpaceLeapingImageFilter_SetTableSizeP7_objectS0___ZL54PyvtkVolumeRayCastSpaceLeapingImageFilter_GetTableSizeP7_objectS0___ZL74PyvtkVolumeRayCastSpaceLeapingImageFilter_GetNumberOfIndependentComponentsP7_objectS0___ZL57PyvtkVolumeRayCastSpaceLeapingImageFilter_GetMinMaxVolumeP7_objectS0___ZL50PyvtkVolumeRayCastSpaceLeapingImageFilter_SetCacheP7_objectS0___ZL70PyvtkVolumeRayCastSpaceLeapingImageFilter_ComputeInputExtentsForOutputP7_objectS0___ZL66PyvtkVolumeRayCastSpaceLeapingImageFilter_GetMinNonZeroScalarIndexP7_objectS0___ZL77PyvtkVolumeRayCastSpaceLeapingImageFilter_GetMinNonZeroGradientMagnitudeIndexP7_objectS0___ZL63PyvtkVolumeRayCastSpaceLeapingImageFilter_SetScalarOpacityTableP7_objectS0___ZL65PyvtkVolumeRayCastSpaceLeapingImageFilter_SetGradientOpacityTableP7_objectS0___ZL55PyvtkVolumeRayCastSpaceLeapingImageFilter_ComputeOffsetP7_objectS0___ZN39vtkVolumeRayCastSpaceLeapingImageFilter8IsTypeOfEPKcGCC_except_table48GCC_except_table49GCC_except_table53GCC_except_table54GCC_except_table75GCC_except_table80GCC_except_table17GCC_except_table18GCC_except_table37GCC_except_table38__dyld_private__ZL26PyvtkDirectionEncoder_Type__ZL29PyvtkDirectionEncoder_Methods__ZL34PyvtkEncodedGradientEstimator_Type__ZL37PyvtkEncodedGradientEstimator_Methods__ZL31PyvtkEncodedGradientShader_Type__ZL34PyvtkEncodedGradientShader_Methods__ZL43PyvtkFiniteDifferenceGradientEstimator_Type__ZL46PyvtkFiniteDifferenceGradientEstimator_Methods__ZL32PyvtkFixedPointRayCastImage_Type__ZL35PyvtkFixedPointRayCastImage_Methods__ZL50PyvtkFixedPointVolumeRayCastCompositeGOHelper_Type__ZL53PyvtkFixedPointVolumeRayCastCompositeGOHelper_Methods__ZL55PyvtkFixedPointVolumeRayCastCompositeGOShadeHelper_Type__ZL58PyvtkFixedPointVolumeRayCastCompositeGOShadeHelper_Methods__ZL48PyvtkFixedPointVolumeRayCastCompositeHelper_Type__ZL51PyvtkFixedPointVolumeRayCastCompositeHelper_Methods__ZL53PyvtkFixedPointVolumeRayCastCompositeShadeHelper_Type__ZL56PyvtkFixedPointVolumeRayCastCompositeShadeHelper_Methods__ZL39PyvtkFixedPointVolumeRayCastHelper_Type__ZL42PyvtkFixedPointVolumeRayCastHelper_Methods__ZL42PyvtkFixedPointVolumeRayCastMIPHelper_Type__ZL45PyvtkFixedPointVolumeRayCastMIPHelper_Methods__ZL39PyvtkFixedPointVolumeRayCastMapper_Type__ZL42PyvtkFixedPointVolumeRayCastMapper_Methods__ZL44PyvtkGPUVolumeRayCastMapper_TFRangeType_Type__ZL32PyvtkGPUVolumeRayCastMapper_Type__ZL35PyvtkGPUVolumeRayCastMapper_Methods__ZL31PyvtkOSPRayVolumeInterface_Type__ZL34PyvtkOSPRayVolumeInterface_Methods__ZL35PyvtkProjectedTetrahedraMapper_Type__ZL38PyvtkProjectedTetrahedraMapper_Methods__ZL35PyvtkRayCastImageDisplayHelper_Type__ZL38PyvtkRayCastImageDisplayHelper_Methods__ZL52PyvtkRayCastImageDisplayHelper_RenderTexture_Methods__ZL41PyvtkRecursiveSphereDirectionEncoder_Type__ZL44PyvtkRecursiveSphereDirectionEncoder_Methods__ZL35PyvtkSphericalDirectionEncoder_Type__ZL38PyvtkSphericalDirectionEncoder_Methods__ZL46PyvtkUnstructuredGridBunykRayCastFunction_Type__ZL49PyvtkUnstructuredGridBunykRayCastFunction_Methods__ZL50PyvtkUnstructuredGridHomogeneousRayIntegrator_Type__ZL53PyvtkUnstructuredGridHomogeneousRayIntegrator_Methods__ZL45PyvtkUnstructuredGridLinearRayIntegrator_Type__ZL48PyvtkUnstructuredGridLinearRayIntegrator_Methods__ZL61PyvtkUnstructuredGridLinearRayIntegrator_IntegrateRay_Methods__ZL47PyvtkUnstructuredGridPartialPreIntegration_Type__ZL50PyvtkUnstructuredGridPartialPreIntegration_Methods__ZL63PyvtkUnstructuredGridPartialPreIntegration_IntegrateRay_Methods__ZL40PyvtkUnstructuredGridPreIntegration_Type__ZL43PyvtkUnstructuredGridPreIntegration_Methods__ZL38PyvtkUnstructuredGridVolumeMapper_Type__ZL41PyvtkUnstructuredGridVolumeMapper_Methods__ZL54PyvtkUnstructuredGridVolumeMapper_SetInputData_Methods__ZL47PyvtkUnstructuredGridVolumeRayCastFunction_Type__ZL50PyvtkUnstructuredGridVolumeRayCastFunction_Methods__ZL47PyvtkUnstructuredGridVolumeRayCastIterator_Type__ZL50PyvtkUnstructuredGridVolumeRayCastIterator_Methods__ZL45PyvtkUnstructuredGridVolumeRayCastMapper_Type__ZL48PyvtkUnstructuredGridVolumeRayCastMapper_Methods__ZL45PyvtkUnstructuredGridVolumeRayIntegrator_Type__ZL48PyvtkUnstructuredGridVolumeRayIntegrator_Methods__ZL44PyvtkUnstructuredGridVolumeZSweepMapper_Type__ZL47PyvtkUnstructuredGridVolumeZSweepMapper_Methods__ZL33PyvtkVolumeMapper_BlendModes_Type__ZL22PyvtkVolumeMapper_Type__ZL25PyvtkVolumeMapper_Methods__ZL38PyvtkVolumeMapper_SetInputData_Methods__ZL32PyvtkVolumeOutlineSource_Methods__ZL22PyvtkVolumePicker_Type__ZL25PyvtkVolumePicker_Methods__ZL46PyvtkVolumeRayCastSpaceLeapingImageFilter_Type__ZL49PyvtkVolumeRayCastSpaceLeapingImageFilter_Methods__ZL33PyvtkRenderingVolumePython_Module__ZL28vtkDebugLeaksManagerInstance__ZL39vtkObjectFactoryRegistryCleanupInstance__ZL29PyvtkVolumeOutlineSource_Type__ZL34PyvtkRenderingVolumePython_Methods