Go to the documentation of this file.
16 #ifndef vtkVolumeShaderComposer_h
17 #define vtkVolumeShaderComposer_h
34 for (
auto& item : inputs)
47 for (
auto& item : inputs)
50 const bool lighting = volProp->
GetShade() == 1;
57 bool UseClippedVoxelIntensity(
60 for (
auto& item : inputs)
63 const bool useClippedVoxelIntensity =
65 if (useClippedVoxelIntensity)
75 const std::string base = arrayName.substr(0, arrayName.length() - 3);
94 " //Transform vertex (data coordinates) to clip coordinates\n"
95 " // p_clip = T_ProjViewModel * T_dataToWorld * p_data\n"
96 " vec4 pos = in_projectionMatrix * in_modelViewMatrix * in_volumeMatrix[0] *\n"
97 " vec4(in_vertexPos.xyz, 1.0);\n"
98 " gl_Position = pos;\n");
107 " // Transform vertex (data coordinates) to texture coordinates.\n"
108 " // p_texture = T_dataToTex * p_data\n"
109 " vec3 uvx = sign(in_cellSpacing[0]) * (in_inverseTextureDatasetMatrix[0] *\n"
110 " vec4(in_vertexPos, 1.0)).xyz;\n"
112 " // For point dataset, we offset the texture coordinate\n"
113 " // to account for OpenGL treating voxel at the center of the cell.\n"
114 " // Transform cell tex-coordinates to point tex-coordinates (cellToPoint\n"
115 " // is an identity matrix in the case of cell data).\n"
116 " ip_textureCoords = (in_cellToPoint[0] * vec4(uvx, 1.0)).xyz;\n"
117 " ip_inverseTextureDataAdjusted = in_cellToPoint[0] * in_inverseTextureDatasetMatrix[0];\n");
127 const int numInputs = gpuMapper->GetInputCount();
129 std::ostringstream ss;
131 "uniform vec3 in_cellSpacing["<< numInputs <<
"];\n"
132 "uniform mat4 in_modelViewMatrix;\n"
133 "uniform mat4 in_projectionMatrix;\n";
135 const int numTransf = multipleInputs ? numInputs + 1 : 1;
137 "uniform mat4 in_volumeMatrix[" << numTransf <<
"];\n"
138 "uniform mat4 in_inverseTextureDatasetMatrix[" << numTransf <<
"];\n"
139 "uniform mat4 in_cellToPoint[" << numTransf <<
"];\n"
141 "//This variable could be 'invariant varying' but it is declared\n"
142 "//as 'varying' to avoid compiler compatibility issues.\n"
143 "out mat4 ip_inverseTextureDataAdjusted;\n";
152 int vtkNotUsed(numberOfLights),
153 int lightingComplexity,
155 int independentComponents)
157 const int numInputs = static_cast<int>(inputs.size());
159 std::ostringstream toShaderStr;
161 "uniform sampler3D in_volume[" << numInputs <<
"];\n";
164 "uniform vec4 in_volume_scale[" << numInputs <<
"];\n"
165 "uniform vec4 in_volume_bias[" << numInputs <<
"];\n";
168 "uniform int in_noOfComponents;\n"
169 "uniform int in_independentComponents;\n"
171 "uniform sampler2D in_noiseSampler;\n"
173 "uniform sampler2D in_depthSampler;\n"
176 "// Camera position\n"
177 "uniform vec3 in_cameraPos;\n";
181 const int numTransf = (numInputs > 1) ? numInputs + 1 :
184 "uniform mat4 in_volumeMatrix[" << numTransf <<
"];\n"
185 "uniform mat4 in_inverseVolumeMatrix[" << numTransf <<
"];\n"
186 "uniform mat4 in_textureDatasetMatrix[" << numTransf <<
"];\n"
187 "uniform mat4 in_inverseTextureDatasetMatrix[" << numTransf <<
"];\n"
188 "uniform mat4 in_textureToEye[" << numTransf <<
"];\n"
189 "uniform vec3 in_texMin[" << numTransf <<
"];\n"
190 "uniform vec3 in_texMax[" << numTransf <<
"];\n"
191 "uniform mat4 in_cellToPoint[" << numTransf <<
"];\n";
194 "// view and model matrices\n"
195 "uniform mat4 in_projectionMatrix;\n"
196 "uniform mat4 in_inverseProjectionMatrix;\n"
197 "uniform mat4 in_modelViewMatrix;\n"
198 "uniform mat4 in_inverseModelViewMatrix;\n"
199 "in mat4 ip_inverseTextureDataAdjusted;\n"
202 "uniform vec3 in_cellStep[" << numInputs <<
"];\n";
205 "uniform vec2 in_scalarsRange[" << numInputs * 4 <<
"];\n"
206 "uniform vec3 in_cellSpacing[" << numInputs <<
"];\n"
208 "// Sample distance\n"
209 "uniform float in_sampleDistance;\n"
212 "uniform vec2 in_windowLowerLeftCorner;\n"
213 "uniform vec2 in_inverseOriginalWindowSize;\n"
214 "uniform vec2 in_inverseWindowSize;\n"
215 "uniform vec3 in_textureExtentsMax;\n"
216 "uniform vec3 in_textureExtentsMin;\n"
218 "// Material and lighting\n"
219 "uniform vec3 in_diffuse[4];\n"
220 "uniform vec3 in_ambient[4];\n"
221 "uniform vec3 in_specular[4];\n"
222 "uniform float in_shininess[4];\n"
225 "uniform bool in_useJittering;\n"
226 "vec3 g_rayJitter = vec3(0.0);\n"
228 "uniform vec2 in_averageIPRange;\n";
230 const bool hasGradientOpacity = HasGradientOpacity(inputs);
231 if (lightingComplexity > 0 || hasGradientOpacity)
234 "uniform bool in_twoSidedLighting;\n";
237 if (lightingComplexity == 3)
240 "vec4 g_fragWorldPos;\n"
241 "uniform int in_numberOfLights;\n"
242 "uniform vec3 in_lightAmbientColor[6];\n"
243 "uniform vec3 in_lightDiffuseColor[6];\n"
244 "uniform vec3 in_lightSpecularColor[6];\n"
245 "uniform vec3 in_lightDirection[6];\n"
246 "uniform vec3 in_lightPosition[6];\n"
247 "uniform vec3 in_lightAttenuation[6];\n"
248 "uniform float in_lightConeAngle[6];\n"
249 "uniform float in_lightExponent[6];\n"
250 "uniform int in_lightPositional[6];\n";
252 else if (lightingComplexity == 2)
255 "vec4 g_fragWorldPos;\n"
256 "uniform int in_numberOfLights;\n"
257 "uniform vec3 in_lightAmbientColor[6];\n"
258 "uniform vec3 in_lightDiffuseColor[6];\n"
259 "uniform vec3 in_lightSpecularColor[6];\n"
260 "uniform vec3 in_lightDirection[6];\n";
265 "uniform vec3 in_lightAmbientColor[1];\n"
266 "uniform vec3 in_lightDiffuseColor[1];\n"
267 "uniform vec3 in_lightSpecularColor[1];\n"
268 "vec4 g_lightPosObj;\n"
274 if (noOfComponents > 1 && independentComponents)
277 "uniform vec4 in_componentWeight;\n";
286 "uniform sampler2D in_depthPassSampler;\n";
292 "#if NUMBER_OF_CONTOURS\n"
293 "uniform float in_isosurfacesValues[NUMBER_OF_CONTOURS];\n"
295 "int findIsoSurfaceIndex(float scalar, float array[NUMBER_OF_CONTOURS+2])\n"
297 " int index = NUMBER_OF_CONTOURS >> 1;\n"
298 " while (scalar > array[index]) ++index;\n"
299 " while (scalar < array[index]) --index;\n"
305 return toShaderStr.str();
312 int lightingComplexity)
316 vtkVolume* vol = inputs.begin()->second.Volume;
325 \n vec2 fragTexCoord2 = (gl_FragCoord.xy - in_windowLowerLeftCorner) *\
326 \n in_inverseWindowSize;\
327 \n vec4 depthValue = texture2D(in_depthPassSampler, fragTexCoord2);\
328 \n vec4 rayOrigin = WindowToNDC(gl_FragCoord.x, gl_FragCoord.y, depthValue.x);\
330 \n // From normalized device coordinates to eye coordinates.\
331 \n // in_projectionMatrix is inversed because of way VT\
332 \n // From eye coordinates to texture coordinates\
333 \n rayOrigin = in_inverseTextureDatasetMatrix[0] *\
334 \n in_inverseVolumeMatrix[0] *\
335 \n in_inverseModelViewMatrix *\
336 \n in_inverseProjectionMatrix *\
338 \n rayOrigin /= rayOrigin.w;\
339 \n g_rayOrigin = rayOrigin.xyz;"
345 \n // Get the 3D texture coordinates for lookup into the in_volume dataset\
346 \n g_rayOrigin = ip_textureCoords.xyz;"
352 \n // Eye position in dataset space\
353 \n g_eyePosObj = in_inverseVolumeMatrix[0] * vec4(in_cameraPos, 1.0);\
355 \n // Getting the ray marching direction (in dataset space);\
356 \n vec3 rayDir = computeRayDirection();\
358 \n // Multiply the raymarching direction with the step size to get the\
359 \n // sub-step size we need to take at each raymarching step\
360 \n g_dirStep = (ip_inverseTextureDataAdjusted *\
361 \n vec4(rayDir, 0.0)).xyz * in_sampleDistance;\
363 \n // 2D Texture fragment coordinates [0,1] from fragment coordinates.\
364 \n // The frame buffer texture has the size of the plain buffer but \
365 \n // we use a fraction of it. The texture coordinate is less than 1 if\
366 \n // the reduction factor is less than 1.\
367 \n // Device coordinates are between -1 and 1. We need texture\
368 \n // coordinates between 0 and 1. The in_depthSampler\
369 \n // buffer has the original size buffer.\
370 \n vec2 fragTexCoord = (gl_FragCoord.xy - in_windowLowerLeftCorner) *\
371 \n in_inverseWindowSize;\
373 \n if (in_useJittering)\
375 \n float jitterValue = texture2D(in_noiseSampler, gl_FragCoord.xy / textureSize(in_noiseSampler, 0)).x;\
376 \n g_rayJitter = g_dirStep * jitterValue;\
380 \n g_rayJitter = g_dirStep;\
382 \n g_rayOrigin += g_rayJitter;\
384 \n // Flag to deternmine if voxel should be considered for the rendering\
385 \n g_skip = false;");
390 \n // Light position in dataset space\
391 \n g_lightPosObj = (in_inverseVolumeMatrix[0] *\
392 \n vec4(in_cameraPos, 1.0));\
393 \n g_ldir = normalize(g_lightPosObj.xyz - ip_vertexPos);\
394 \n g_vdir = normalize(g_eyePosObj.xyz - ip_vertexPos);\
395 \n g_h = normalize(g_ldir + g_vdir);"
422 int noOfComponents,
int independentComponents,
423 std::map<int, std::string> gradientTableMap)
425 std::ostringstream ss;
426 ss <<
"uniform sampler2D " << ArrayBaseName(gradientTableMap[0])
427 <<
"[" << noOfComponents <<
"];\n";
431 (noOfComponents == 1 || !independentComponents))
434 \nfloat computeGradientOpacity(vec4 grad)\
436 \n return texture2D("+gradientTableMap[0]+
", vec2(grad.w, 0.0)).r;\
440 else if (noOfComponents > 1 && independentComponents &&
444 \nfloat computeGradientOpacity(vec4 grad, int component)\
447 for (
int i = 0; i < noOfComponents; ++i)
449 std::ostringstream toString;
452 \n if (component == " + toString.str() +
")");
456 \n return texture2D("+ gradientTableMap[i] +
", vec2(grad.w, 0.0)).r;\
473 const bool hasLighting = HasLighting(inputs);
474 const bool hasGradientOp = HasGradientOpacity(inputs);
477 if (hasLighting || hasGradientOp)
480 "// c is short for component\n"
481 "vec4 computeGradient(in vec3 texPos, in int c, in sampler3D volume,in int index)\n"
483 " // Approximate Nabla(F) derivatives with central differences.\n"
484 " vec3 g1; // F_front\n"
485 " vec3 g2; // F_back\n"
486 " vec3 xvec = vec3(in_cellStep[index].x, 0.0, 0.0);\n"
487 " vec3 yvec = vec3(0.0, in_cellStep[index].y, 0.0);\n"
488 " vec3 zvec = vec3(0.0, 0.0, in_cellStep[index].z);\n"
489 " vec3 texPosPvec[3];\n"
490 " texPosPvec[0] = texPos + xvec;\n"
491 " texPosPvec[1] = texPos + yvec;\n"
492 " texPosPvec[2] = texPos + zvec;\n"
493 " vec3 texPosNvec[3];\n"
494 " texPosNvec[0] = texPos - xvec;\n"
495 " texPosNvec[1] = texPos - yvec;\n"
496 " texPosNvec[2] = texPos - zvec;\n"
497 " g1.x = texture3D(volume, vec3(texPosPvec[0]))[c];\n"
498 " g1.y = texture3D(volume, vec3(texPosPvec[1]))[c];\n"
499 " g1.z = texture3D(volume, vec3(texPosPvec[2]))[c];\n"
500 " g2.x = texture3D(volume, vec3(texPosNvec[0]))[c];\n"
501 " g2.y = texture3D(volume, vec3(texPosNvec[1]))[c];\n"
502 " g2.z = texture3D(volume, vec3(texPosNvec[2]))[c];\n"
507 " vec4 g1ObjDataPos[3], g2ObjDataPos[3];\n"
508 " for (int i = 0; i < 3; ++i)\n"
510 " g1ObjDataPos[i] = clip_texToObjMat * vec4(texPosPvec[i], 1.0);\n"
511 " if (g1ObjDataPos[i].w != 0.0)\n"
513 " g1ObjDataPos[i] /= g1ObjDataPos[i].w;\n"
515 " g2ObjDataPos[i] = clip_texToObjMat * vec4(texPosNvec[i], 1.0);\n"
516 " if (g2ObjDataPos[i].w != 0.0)\n"
518 " g2ObjDataPos[i] /= g2ObjDataPos[i].w;\n"
522 " for (int i = 0; i < clip_numPlanes && !g_skip; i = i + 6)\n"
524 " vec3 planeOrigin = vec3(in_clippingPlanes[i + 1],\n"
525 " in_clippingPlanes[i + 2],\n"
526 " in_clippingPlanes[i + 3]);\n"
527 " vec3 planeNormal = normalize(vec3(in_clippingPlanes[i + 4],\n"
528 " in_clippingPlanes[i + 5],\n"
529 " in_clippingPlanes[i + 6]));\n"
530 " for (int j = 0; j < 3; ++j)\n"
532 " if (dot(vec3(planeOrigin - g1ObjDataPos[j].xyz), planeNormal) > 0)\n"
534 " g1[j] = in_clippedVoxelIntensity;\n"
536 " if (dot(vec3(planeOrigin - g2ObjDataPos[j].xyz), planeNormal) > 0)\n"
538 " g2[j] = in_clippedVoxelIntensity;\n"
545 " // Apply scale and bias to the fetched values.\n"
546 " g1 = g1 * in_volume_scale[index][c] + in_volume_bias[index][c];\n"
547 " g2 = g2 * in_volume_scale[index][c] + in_volume_bias[index][c];\n"
552 " // Central differences: (F_front - F_back) / 2h\n"
553 " // This version of computeGradient() is only used for lighting\n"
554 " // calculations (only direction matters), hence the difference is\n"
555 " // not scaled by 2h and a dummy gradient mag is returned (-1.).\n"
556 " return vec4((g1 - g2) / in_cellSpacing[index], -1.0);\n"
562 " // Scale values the actual scalar range.\n"
563 " float range = in_scalarsRange[c][1] - in_scalarsRange[c][0];\n"
564 " g1 = in_scalarsRange[c][0] + range * g1;\n"
565 " g2 = in_scalarsRange[c][0] + range * g2;\n"
567 " // Central differences: (F_front - F_back) / 2h\n"
570 " float avgSpacing = (in_cellSpacing[index].x +\n"
571 " in_cellSpacing[index].y + in_cellSpacing[index].z) / 3.0;\n"
572 " vec3 aspect = in_cellSpacing[index] * 2.0 / avgSpacing;\n"
574 " float grad_mag = length(g2);\n"
576 " // Handle normalizing with grad_mag == 0.0\n"
577 " g2 = grad_mag > 0.0 ? normalize(g2) : vec3(0.0);\n"
579 " // Since the actual range of the gradient magnitude is unknown,\n"
580 " // assume it is in the range [0, 0.25 * dataRange].\n"
581 " range = range != 0 ? range : 1.0;\n"
582 " grad_mag = grad_mag / (0.25 * range);\n"
583 " grad_mag = clamp(grad_mag, 0.0, 1.0);\n"
585 " return vec4(g2.xyz, grad_mag);\n"
592 "vec4 computeGradient(in vec3 texPos, in int c, in sampler3D volume, in int index)\n"
594 " return vec4(0.0);\n"
606 int independentComponents,
607 int vtkNotUsed(numberOfLights),
608 int lightingComplexity)
612 \nvec4 computeLighting(vec4 color, int component)\
614 \n vec4 finalColor = vec4(0.0);"
618 int const shadeReqd = volProperty->
GetShade() &&
626 switch (transferMode)
629 " // Compute gradient function only once\n"
630 " vec4 gradient = computeGradient(g_dataPos, component, in_volume[0], 0);\n");
634 " // TransferFunction2D is enabled so the gradient for\n"
635 " // each component has already been cached\n"
636 " vec4 gradient = g_gradients_0[component];\n");
643 if (lightingComplexity == 1)
646 \n vec3 diffuse = vec3(0.0);\
647 \n vec3 specular = vec3(0.0);\
648 \n vec3 normal = gradient.xyz;\
649 \n float normalLength = length(normal);\
650 \n if (normalLength > 0.0)\
652 \n normal = normalize(normal);\
656 \n normal = vec3(0.0, 0.0, 0.0);\
658 \n float nDotL = dot(normal, g_ldir);\
659 \n float nDotH = dot(normal, g_h);\
660 \n if (nDotL < 0.0 && in_twoSidedLighting)\
664 \n if (nDotH < 0.0 && in_twoSidedLighting)\
670 \n diffuse = nDotL * in_diffuse[component] *\
671 \n in_lightDiffuseColor[0] * color.rgb;\
673 \n specular = pow(nDotH, in_shininess[component]) *\
674 \n in_specular[component] *\
675 \n in_lightSpecularColor[0];\
676 \n // For the headlight, ignore the light's ambient color\
677 \n // for now as it is causing the old mapper tests to fail\
678 \n finalColor.xyz = in_ambient[component] * color.rgb +\
679 \n diffuse + specular;\
682 else if (lightingComplexity == 2)
685 \n g_fragWorldPos = in_modelViewMatrix * in_volumeMatrix[0] *\
686 \n in_textureDatasetMatrix[0] * vec4(-g_dataPos, 1.0);\
687 \n if (g_fragWorldPos.w != 0.0)\
689 \n g_fragWorldPos /= g_fragWorldPos.w;\
691 \n vec3 vdir = normalize(g_fragWorldPos.xyz);\
692 \n vec3 normal = gradient.xyz;\
693 \n vec3 ambient = vec3(0.0);\
694 \n vec3 diffuse = vec3(0.0);\
695 \n vec3 specular = vec3(0.0);\
696 \n float normalLength = length(normal);\
697 \n if (normalLength > 0.0)\
699 \n normal = normalize((in_textureToEye[0] * vec4(normal, 0.0)).xyz);\
703 \n normal = vec3(0.0, 0.0, 0.0);\
705 \n for (int lightNum = 0; lightNum < in_numberOfLights; lightNum++)\
707 \n vec3 ldir = in_lightDirection[lightNum].xyz;\
708 \n vec3 h = normalize(ldir + vdir);\
709 \n float nDotH = dot(normal, h);\
710 \n if (nDotH < 0.0 && in_twoSidedLighting)\
714 \n float nDotL = dot(normal, ldir);\
715 \n if (nDotL < 0.0 && in_twoSidedLighting)\
721 \n diffuse += in_lightDiffuseColor[lightNum] * nDotL;\
725 \n specular = in_lightSpecularColor[lightNum] *\
726 \n pow(nDotH, in_shininess[component]);\
728 \n ambient += in_lightAmbientColor[lightNum];\
730 \n finalColor.xyz = in_ambient[component] * ambient +\
731 \n in_diffuse[component] * diffuse * color.rgb +\
732 \n in_specular[component] * specular;"
735 else if (lightingComplexity == 3)
738 \n g_fragWorldPos = in_modelViewMatrix * in_volumeMatrix[0] *\
739 \n in_textureDatasetMatrix[0] * vec4(g_dataPos, 1.0);\
740 \n if (g_fragWorldPos.w != 0.0)\
742 \n g_fragWorldPos /= g_fragWorldPos.w;\
744 \n vec3 viewDirection = normalize(-g_fragWorldPos.xyz);\
745 \n vec3 ambient = vec3(0,0,0);\
746 \n vec3 diffuse = vec3(0,0,0);\
747 \n vec3 specular = vec3(0,0,0);\
748 \n vec3 vertLightDirection;\
749 \n vec3 normal = normalize((in_textureToEye[0] * vec4(gradient.xyz, 0.0)).xyz);\
751 \n for (int lightNum = 0; lightNum < in_numberOfLights; lightNum++)\
753 \n float attenuation = 1.0;\
755 \n lightDir = in_lightDirection[lightNum];\
756 \n if (in_lightPositional[lightNum] == 0)\
758 \n vertLightDirection = lightDir;\
762 \n vertLightDirection = (g_fragWorldPos.xyz - in_lightPosition[lightNum]);\
763 \n float distance = length(vertLightDirection);\
764 \n vertLightDirection = normalize(vertLightDirection);\
765 \n attenuation = 1.0 /\
766 \n (in_lightAttenuation[lightNum].x\
767 \n + in_lightAttenuation[lightNum].y * distance\
768 \n + in_lightAttenuation[lightNum].z * distance * distance);\
769 \n // per OpenGL standard cone angle is 90 or less for a spot light\
770 \n if (in_lightConeAngle[lightNum] <= 90.0)\
772 \n float coneDot = dot(vertLightDirection, lightDir);\
773 \n // if inside the cone\
774 \n if (coneDot >= cos(radians(in_lightConeAngle[lightNum])))\
776 \n attenuation = attenuation * pow(coneDot, in_lightExponent[lightNum]);\
780 \n attenuation = 0.0;\
784 \n // diffuse and specular lighting\
785 \n float nDotL = dot(normal, vertLightDirection);\
786 \n if (nDotL < 0.0 && in_twoSidedLighting)\
792 \n float df = max(0.0, attenuation * nDotL);\
793 \n diffuse += (df * in_lightDiffuseColor[lightNum]);\
795 \n vec3 h = normalize(vertLightDirection + viewDirection);\
796 \n float nDotH = dot(normal, h);\
797 \n if (nDotH < 0.0 && in_twoSidedLighting)\
803 \n float sf = attenuation * pow(nDotH, in_shininess[component]);\
804 \n specular += (sf * in_lightSpecularColor[lightNum]);\
806 \n ambient += in_lightAmbientColor[lightNum];\
808 \n finalColor.xyz = in_ambient[component] * ambient +\
809 \n in_diffuse[component] * diffuse * color.rgb +\
810 \n in_specular[component] * specular;\
817 "\n finalColor = vec4(color.rgb, 0.0);"
827 glMapper->GetInputCount() == 1)
830 (noOfComponents == 1 || !independentComponents))
833 \n if (gradient.w >= 0.0)\
835 \n color.a = color.a *\
836 \n computeGradientOpacity(gradient);\
840 else if (noOfComponents > 1 && independentComponents &&
844 \n if (gradient.w >= 0.0)\
846 \n for (int i = 0; i < in_noOfComponents; ++i)\
848 \n color.a = color.a *\
849 \n computeGradientOpacity(gradient, i) * in_componentWeight[i];\
857 \n finalColor.a = color.a;\
858 \n return finalColor;\
869 int vtkNotUsed(noOfComponents))
874 \nvec3 computeRayDirection()\
876 \n return normalize(ip_vertexPos.xyz - g_eyePosObj.xyz);\
882 \nuniform vec3 in_projectionDirection;\
883 \nvec3 computeRayDirection()\
885 \n return normalize((in_inverseVolumeMatrix[0] *\
886 \n vec4(in_projectionDirection, 0.0)).xyz);\
896 int independentComponents,
897 std::map<int, std::string> colorTableMap)
899 std::ostringstream ss;
900 ss <<
"uniform sampler2D " << ArrayBaseName(colorTableMap[0])
901 <<
"[" << noOfComponents <<
"];\n";
904 if (noOfComponents == 1)
907 \nvec4 computeColor(vec4 scalar, float opacity)\
909 \n return computeLighting(vec4(texture2D(" + colorTableMap[0] +
",\
910 \n vec2(scalar.w, 0.0)).xyz, opacity), 0);\
914 else if (noOfComponents > 1 && independentComponents)
916 std::ostringstream toString;
919 \nvec4 computeColor(vec4 scalar, float opacity, int component)\
922 for (
int i = 0; i < noOfComponents; ++i)
926 \n if (component == " + toString.str() +
")");
930 \n return computeLighting(vec4(texture2D(\
931 \n "+colorTableMap[i]);
933 \n scalar[" + toString.str() +
"],0.0)).xyz,\
934 \n opacity),"+toString.str()+
");\
945 else if (noOfComponents == 2 && !independentComponents)
948 \nvec4 computeColor(vec4 scalar, float opacity)\
950 \n return computeLighting(vec4(texture2D(" + colorTableMap[0] +
",\
951 \n vec2(scalar.x, 0.0)).xyz,\
959 \nvec4 computeColor(vec4 scalar, float opacity)\
961 \n return computeLighting(vec4(scalar.xyz, opacity), 0);\
971 std::ostringstream ss;
973 for (
auto& item : inputs)
975 auto prop = item.second.Volume->GetProperty();
979 auto& map = item.second.RGBTablesMap;
980 const auto numComp = map.size();
981 ss <<
"uniform sampler2D " << ArrayBaseName(map[0])
982 <<
"[" << numComp <<
"];\n";
987 "vec3 computeColor(const in float scalar, const in sampler2D colorTF)\n"
989 " return texture2D(colorTF, vec2(scalar, 0)).rgb;\n"
998 std::ostringstream ss;
1000 for (
auto& item : inputs)
1002 auto prop = item.second.Volume->GetProperty();
1006 auto& map = item.second.OpacityTablesMap;
1007 const auto numComp = map.size();
1008 ss <<
"uniform sampler2D " << ArrayBaseName(map[0])
1009 <<
"[" << numComp <<
"];\n";
1014 "float computeOpacity(const in float scalar, const in sampler2D opacityTF)\n"
1016 " return texture2D(opacityTF, vec2(scalar, 0)).r;\n"
1025 std::ostringstream ss;
1028 for (
auto& item : inputs)
1030 auto prop = item.second.Volume->GetProperty();
1032 !prop->HasGradientOpacity())
1035 auto& map = item.second.GradientOpacityTablesMap;
1036 const auto numComp = map.size();
1037 ss <<
"uniform sampler2D " << ArrayBaseName(map[0])
1038 <<
"[" << numComp <<
"];\n";
1043 "float computeGradientOpacity(const in float scalar, const in sampler2D opacityTF)\n"
1045 " return texture2D(opacityTF, vec2(scalar, 0)).r;\n"
1055 int independentComponents,
1056 std::map<int, std::string> opacityTableMap)
1058 std::ostringstream ss;
1059 ss <<
"uniform sampler2D " << ArrayBaseName(opacityTableMap[0])
1060 <<
"[" << noOfComponents <<
"];\n";
1063 if (noOfComponents > 1 && independentComponents)
1066 \nfloat computeOpacity(vec4 scalar, int component)\
1069 for (
int i = 0; i < noOfComponents; ++i)
1071 std::ostringstream toString;
1074 \n if (component == " + toString.str() +
")");
1078 \n return texture2D(" + opacityTableMap[i]);
1080 shaderStr +=
std::string(
",vec2(scalar[" + toString.str() +
"], 0)).r;\
1087 else if (noOfComponents == 2 && !independentComponents)
1090 \nfloat computeOpacity(vec4 scalar)\
1092 \n return texture2D(" + opacityTableMap[0] +
", vec2(scalar.y, 0)).r;\
1099 \nfloat computeOpacity(vec4 scalar)\
1101 \n return texture2D(" + opacityTableMap[0] +
", vec2(scalar.w, 0)).r;\
1112 int independentComponents,
1113 std::map<int, std::string> colorTableMap)
1115 if (noOfComponents == 1)
1119 "vec4 computeColor(vec4 scalar, float opacity)\n"
1121 " vec4 color = texture2D(" + colorTableMap[0] +
",\n"
1122 " vec2(scalar.w, g_gradients_0[0].w));\n"
1123 " return computeLighting(color, 0);\n"
1126 else if (noOfComponents > 1 && independentComponents)
1131 "vec4 computeColor(vec4 scalar, float opacity, int component)\n"
1134 for (
int i = 0; i < noOfComponents; ++i)
1136 std::ostringstream toString;
1140 " if (component == " + num +
")\n"
1142 " vec4 color = texture2D(" + colorTableMap[i] +
",\n"
1143 " vec2(scalar[" + num +
"], g_gradients_0[" + num +
"].w));\n"
1144 " return computeLighting(color, " + num +
");\n"
1151 else if (noOfComponents == 2 && !independentComponents)
1155 "vec4 computeColor(vec4 scalar, float opacity)\n"
1157 " vec4 color = texture2D(" + colorTableMap[0] +
",\n"
1158 " vec2(scalar.x, g_gradients_0[0].w));\n"
1159 " return computeLighting(color, 0);\n"
1165 "vec4 computeColor(vec4 scalar, float opacity)\n"
1167 " return computeLighting(vec4(scalar.xyz, opacity), 0);\n"
1175 std::ostringstream ss;
1177 for (
auto& item : inputs)
1179 auto prop = item.second.Volume->GetProperty();
1183 auto& map = item.second.TransferFunctions2DMap;
1184 const auto numComp = map.size();
1185 ss <<
"uniform sampler2D " << ArrayBaseName(map[0])
1186 <<
"[" << numComp <<
"];\n";
1198 int independentComponents,
1199 std::map<int, std::string> opacityTableMap)
1201 std::ostringstream toString;
1202 if (noOfComponents > 1 && independentComponents)
1206 "float computeOpacity(vec4 scalar, int component)\n"
1209 for (
int i = 0; i < noOfComponents; ++i)
1212 " if (component == " << i <<
")\n"
1214 " return texture2D(" << opacityTableMap[i] <<
",\n"
1215 " vec2(scalar[" << i <<
"], g_gradients_0[" << i <<
"].w)).a;\n"
1221 else if (noOfComponents == 2 && !independentComponents)
1225 "float computeOpacity(vec4 scalar)\n"
1227 " return texture2D(" + opacityTableMap[0] +
",\n"
1228 " vec2(scalar.y, g_gradients_0[0].w)).a;\n"
1235 "float computeOpacity(vec4 scalar)\n"
1237 " return texture2D(" + opacityTableMap[0] +
",\n"
1238 " vec2(scalar.a, g_gradients_0[0].w)).a;\n"
1241 return toString.str();
1260 \n bool l_firstValue;\
1261 \n vec4 l_maxValue;");
1267 \n bool l_firstValue;\
1268 \n vec4 l_minValue;");
1273 \n uvec4 l_numSamples;\
1274 \n vec4 l_avgValue;");
1279 \n vec4 l_sumValue;");
1284 \n int l_initialIndex = 0;\
1285 \n float l_normValues[NUMBER_OF_CONTOURS + 2];");
1301 \n // We get data between 0.0 - 1.0 range\
1302 \n l_firstValue = true;\
1303 \n l_maxValue = vec4(0.0);"
1310 \n //We get data between 0.0 - 1.0 range\
1311 \n l_firstValue = true;\
1312 \n l_minValue = vec4(1.0);"
1318 \n //We get data between 0.0 - 1.0 range\
1319 \n l_avgValue = vec4(0.0);\
1320 \n // Keep track of number of samples\
1321 \n l_numSamples = uvec4(0);"
1327 \n //We get data between 0.0 - 1.0 range\
1328 \n l_sumValue = vec4(0.0);"
1334 \n#if NUMBER_OF_CONTOURS\
1335 \n l_normValues[0] = -1e20; //-infinity\
1336 \n l_normValues[NUMBER_OF_CONTOURS+1] = +1e20; //+infinity\
1337 \n for (int i = 0; i < NUMBER_OF_CONTOURS; i++)\
1339 \n l_normValues[i+1] = (in_isosurfacesValues[i] - in_scalarsRange[0].x) / \
1340 \n (in_scalarsRange[0].y - in_scalarsRange[0].x);\
1355 int independentComponents = 0)
1357 const int numInputs = static_cast<int>(inputs.size());
1358 const int comp = numInputs == 1 ?
1360 (!independentComponents ? 1 : numInputs) :
1364 std::ostringstream toShader;
1365 for (
const auto& item : inputs)
1367 auto& input = item.second;
1368 if (input.Volume->GetProperty()->HasGradientOpacity())
1371 "vec4 " << input.GradientCacheName <<
"[" << comp <<
"];\n";
1375 return toShader.str();
1381 int noOfComponents = 1,
1382 int independentComponents = 0)
1384 std::ostringstream shader;
1385 if (independentComponents)
1387 if (noOfComponents == 1)
1390 "g_gradients_0[0] = computeGradient(g_dataPos, 0, in_volume[0], 0);\n";
1396 "for (int comp = 0; comp < in_noOfComponents; comp++)\n"
1398 " g_gradients_0[comp] = computeGradient(g_dataPos, comp, in_volume[0], 0);\n"
1405 "g_gradients_0[0] = computeGradient(g_dataPos, 0, in_volume[0], 0);\n";
1408 return shader.str();
1415 std::ostringstream toShaderStr;
1427 for (
auto& item : inputs)
1429 auto& input = item.second;
1430 auto property = input.Volume->GetProperty();
1432 const auto idx = i + 1;
1437 " texPos = (in_cellToPoint[" << idx <<
"] * in_inverseTextureDatasetMatrix[" << idx
1438 <<
"] * in_inverseVolumeMatrix[" << idx <<
"] *\n"
1439 " in_volumeMatrix[0] * in_textureDatasetMatrix[0] * vec4(g_dataPos.xyz, 1.0)).xyz;\n"
1440 " if ((all(lessThanEqual(texPos, vec3(1.0))) &&\n"
1441 " all(greaterThanEqual(texPos, vec3(0.0)))))\n"
1443 " vec4 scalar = texture3D(in_volume[" << i <<
"], texPos);\n"
1444 " scalar = scalar * in_volume_scale[" << i <<
"] + in_volume_bias[" << i <<
"];\n"
1445 " scalar = vec4(scalar.r);\n"
1446 " g_srcColor = vec4(0.0);\n";
1451 " g_srcColor.a = computeOpacity(scalar.r," << input.OpacityTablesMap[0] <<
");\n"
1452 " if (g_srcColor.a > 0.0)\n"
1454 " g_srcColor.rgb = computeColor(scalar.r, " << input.RGBTablesMap[0] <<
");\n";
1456 if (property->HasGradientOpacity())
1458 const auto& grad = input.GradientCacheName;
1460 " " << grad <<
"[0] = computeGradient(texPos, 0, " <<
"in_volume[" << i <<
"], " << i <<
");\n"
1461 " if ("<< grad <<
"[0].w >= 0.0)\n"
1463 " g_srcColor.a *= computeGradientOpacity(" << grad <<
"[0].w, "
1464 << input.GradientOpacityTablesMap[0] <<
");\n"
1470 const auto& grad = input.GradientCacheName;
1473 " " << grad <<
"[0] = computeGradient(texPos, 0, " <<
"in_volume[" << i <<
"], " << i <<
");\n"
1474 " g_srcColor = texture2D(" << input.TransferFunctions2DMap[0] <<
", vec2(scalar.r, " << input.GradientCacheName <<
"[0].w));\n"
1475 " if (g_srcColor.a > 0.0)\n"
1480 " g_srcColor.rgb *= g_srcColor.a;\n"
1481 " g_fragColor = (1.0f - g_fragColor.a) * g_srcColor + g_fragColor;\n"
1493 return toShaderStr.str();
1503 int independentComponents = 0)
1509 \n vec4 scalar = texture3D(in_volume[0], g_dataPos);"
1513 if (noOfComponents == 1)
1516 \n scalar.r = scalar.r * in_volume_scale[0].r + in_volume_bias[0].r;\
1517 \n scalar = vec4(scalar.r);"
1524 \n scalar = scalar * in_volume_scale[0] + in_volume_bias[0];"
1530 if (noOfComponents > 1)
1532 if (!independentComponents)
1535 \n if (l_maxValue.w < scalar.w || l_firstValue)\
1537 \n l_maxValue = scalar;\
1540 \n if (l_firstValue)\
1542 \n l_firstValue = false;\
1549 \n for (int i = 0; i < in_noOfComponents; ++i)\
1551 \n if (l_maxValue[i] < scalar[i] || l_firstValue)\
1553 \n l_maxValue[i] = scalar[i];\
1556 \n if (l_firstValue)\
1558 \n l_firstValue = false;\
1566 \n if (l_maxValue.w < scalar.x || l_firstValue)\
1568 \n l_maxValue.w = scalar.x;\
1571 \n if (l_firstValue)\
1573 \n l_firstValue = false;\
1580 if (noOfComponents > 1)
1582 if (!independentComponents)
1585 \n if (l_minValue.w > scalar.w || l_firstValue)\
1587 \n l_minValue = scalar;\
1590 \n if (l_firstValue)\
1592 \n l_firstValue = false;\
1599 \n for (int i = 0; i < in_noOfComponents; ++i)\
1601 \n if (l_minValue[i] < scalar[i] || l_firstValue)\
1603 \n l_minValue[i] = scalar[i];\
1606 \n if (l_firstValue)\
1608 \n l_firstValue = false;\
1616 \n if (l_minValue.w > scalar.x || l_firstValue)\
1618 \n l_minValue.w = scalar.x;\
1621 \n if (l_firstValue)\
1623 \n l_firstValue = false;\
1630 if (noOfComponents > 1 && independentComponents)
1633 \n for (int i = 0; i < in_noOfComponents; ++i)\
1635 \n // Get the intensity in volume scalar range\
1636 \n float intensity = in_scalarsRange[i][0] +\
1637 \n (in_scalarsRange[i][1] -\
1638 \n in_scalarsRange[i][0]) * scalar[i];\
1639 \n if (in_averageIPRange.x <= intensity &&\
1640 \n intensity <= in_averageIPRange.y)\
1642 \n l_avgValue[i] += computeOpacity(scalar, i) * scalar[i];\
1643 \n ++l_numSamples[i];\
1651 \n // Get the intensity in volume scalar range\
1652 \n float intensity = in_scalarsRange[0][0] +\
1653 \n (in_scalarsRange[0][1] -\
1654 \n in_scalarsRange[0][0]) * scalar.x;\
1655 \n if (in_averageIPRange.x <= intensity &&\
1656 \n intensity <= in_averageIPRange.y)\
1658 \n l_avgValue.x += computeOpacity(scalar) * scalar.x;\
1659 \n ++l_numSamples.x;\
1666 if (noOfComponents > 1 && independentComponents)
1669 \n for (int i = 0; i < in_noOfComponents; ++i)\
1671 \n float opacity = computeOpacity(scalar, i);\
1672 \n l_sumValue[i] = l_sumValue[i] + opacity * scalar[i];\
1679 \n float opacity = computeOpacity(scalar);\
1680 \n l_sumValue.x = l_sumValue.x + opacity * scalar.x;"
1687 \n#if NUMBER_OF_CONTOURS\
1688 \n int maxComp = 0;");
1691 if (noOfComponents > 1 && independentComponents)
1694 \n for (int i = 1; i < in_noOfComponents; ++i)\
1696 \n if (in_componentWeight[i] > in_componentWeight[maxComp])\
1699 compParamStr =
", maxComp";
1702 \n if (g_currentT == 0)\
1704 \n l_initialIndex = findIsoSurfaceIndex(scalar[maxComp], l_normValues);\
1709 \n bool shade = false;\
1710 \n l_initialIndex = clamp(l_initialIndex, 0, NUMBER_OF_CONTOURS);\
1711 \n if (scalar[maxComp] < l_normValues[l_initialIndex])\
1713 \n s = l_normValues[l_initialIndex];\
1714 \n l_initialIndex--;\
1717 \n if (scalar[maxComp] > l_normValues[l_initialIndex+1])\
1719 \n s = l_normValues[l_initialIndex+1];\
1720 \n l_initialIndex++;\
1723 \n if (shade == true)\
1725 \n vec4 vs = vec4(s);\
1726 \n g_srcColor.a = computeOpacity(vs "+compParamStr+
");\
1727 \n g_srcColor = computeColor(vs, g_srcColor.a "+compParamStr+
");\
1728 \n g_srcColor.rgb *= g_srcColor.a;\
1729 \n g_fragColor = (1.0f - g_fragColor.a) * g_srcColor + g_fragColor;\
1736 if (noOfComponents > 1 && independentComponents)
1739 \n vec4 color[4]; vec4 tmp = vec4(0.0);\
1740 \n float totalAlpha = 0.0;\
1741 \n for (int i = 0; i < in_noOfComponents; ++i)\
1744 if (glMapper->GetUseDepthPass() && glMapper->GetCurrentPass() ==
1748 \n // Data fetching from the red channel of volume texture\
1749 \n float opacity = computeOpacity(scalar, i);\
1750 \n if (opacity > 0.0)\
1752 \n g_srcColor.a = opacity;\
1757 else if (!mask || !maskInput ||
1761 \n // Data fetching from the red channel of volume texture\
1762 \n color[i][3] = computeOpacity(scalar, i);\
1763 \n color[i] = computeColor(scalar, color[i][3], i);\
1764 \n totalAlpha += color[i][3] * in_componentWeight[i];\
1766 \n if (totalAlpha > 0.0)\
1768 \n for (int i = 0; i < in_noOfComponents; ++i)\
1770 \n // Only let visible components contribute to the final color\
1771 \n if (in_componentWeight[i] <= 0) continue;\
1773 \n tmp.x += color[i].x * color[i].w * in_componentWeight[i];\
1774 \n tmp.y += color[i].y * color[i].w * in_componentWeight[i];\
1775 \n tmp.z += color[i].z * color[i].w * in_componentWeight[i];\
1776 \n tmp.w += ((color[i].w * color[i].w)/totalAlpha);\
1779 \n g_fragColor = (1.0f - g_fragColor.a) * tmp + g_fragColor;"
1783 else if (glMapper->GetUseDepthPass() && glMapper->GetCurrentPass() ==
1787 \n g_srcColor = vec4(0.0);\
1788 \n g_srcColor.a = computeOpacity(scalar);"
1793 if (!mask || !maskInput ||
1797 \n g_srcColor = vec4(0.0);\
1798 \n g_srcColor.a = computeOpacity(scalar);\
1799 \n if (g_srcColor.a > 0.0)\
1801 \n g_srcColor = computeColor(scalar, g_srcColor.a);"
1806 \n // Opacity calculation using compositing:\
1807 \n // Here we use front to back compositing scheme whereby\
1808 \n // the current sample value is multiplied to the\
1809 \n // currently accumulated alpha and then this product\
1810 \n // is subtracted from the sample value to get the\
1811 \n // alpha from the previous steps. Next, this alpha is\
1812 \n // multiplied with the current sample colour\
1813 \n // and accumulated to the composited colour. The alpha\
1814 \n // value from the previous steps is then accumulated\
1815 \n // to the composited colour alpha.\
1816 \n g_srcColor.rgb *= g_srcColor.a;\
1817 \n g_fragColor = (1.0f - g_fragColor.a) * g_srcColor + g_fragColor;"
1820 if (!mask || !maskInput ||
1845 \n // Special coloring mode which renders the Prop Id in fragments that\
1846 \n // have accumulated certain level of opacity. Used during the selection\
1847 \n // pass vtkHardwareSelection::ACTOR_PASS.\
1848 \n if (g_fragColor.a > 3.0/ 255.0)\
1850 \n gl_FragData[0] = vec4(in_propId, 1.0);\
1854 \n gl_FragData[0] = vec4(0.0);\
1864 \n // Special coloring mode which renders the voxel index in fragments that\
1865 \n // have accumulated certain level of opacity. Used during the selection\
1866 \n // pass vtkHardwareSelection::ID_LOW24.\
1867 \n if (g_fragColor.a > 3.0/ 255.0)\
1869 \n uvec3 volumeDim = uvec3(in_textureExtentsMax - in_textureExtentsMin);\
1870 \n uvec3 voxelCoords = uvec3(volumeDim * g_dataPos);\
1871 \n // vtkHardwareSelector assumes index 0 to be empty space, so add uint(1).\
1872 \n uint idx = volumeDim.x * volumeDim.y * voxelCoords.z +\
1873 \n volumeDim.x * voxelCoords.y + voxelCoords.x + uint(1);\
1874 \n gl_FragData[0] = vec4(float(idx % uint(256)) / 255.0,\
1875 \n float((idx / uint(256)) % uint(256)) / 255.0,\
1876 \n float((idx / uint(65536)) % uint(256)) / 255.0, 1.0);\
1880 \n gl_FragData[0] = vec4(0.0);\
1890 \n // Special coloring mode which renders the voxel index in fragments that\
1891 \n // have accumulated certain level of opacity. Used during the selection\
1892 \n // pass vtkHardwareSelection::ID_MID24.\
1893 \n if (g_fragColor.a > 3.0/ 255.0)\
1895 \n uvec3 volumeDim = uvec3(in_textureExtentsMax - in_textureExtentsMin);\
1896 \n uvec3 voxelCoords = uvec3(volumeDim * g_dataPos);\
1897 \n // vtkHardwareSelector assumes index 0 to be empty space, so add uint(1).\
1898 \n uint idx = volumeDim.x * volumeDim.y * voxelCoords.z +\
1899 \n volumeDim.x * voxelCoords.y + voxelCoords.x + uint(1);\
1900 \n idx = ((idx & 0xff000000) >> 24);\
1901 \n gl_FragData[0] = vec4(float(idx % uint(256)) / 255.0,\
1902 \n float((idx / uint(256)) % uint(256)) / 255.0,\
1903 \n float(idx / uint(65536)) / 255.0, 1.0);\
1907 \n gl_FragData[0] = vec4(0.0);\
1917 int independentComponents = 0)
1930 if (noOfComponents > 1 && independentComponents)
1933 \n g_srcColor = vec4(0);\
1934 \n for (int i = 0; i < in_noOfComponents; ++i)\
1936 \n vec4 tmp = computeColor(l_maxValue, computeOpacity(l_maxValue, i), i);\
1937 \n g_srcColor[0] += tmp[0] * tmp[3] * in_componentWeight[i];\
1938 \n g_srcColor[1] += tmp[1] * tmp[3] * in_componentWeight[i];\
1939 \n g_srcColor[2] += tmp[2] * tmp[3] * in_componentWeight[i];\
1940 \n g_srcColor[3] += tmp[3] * in_componentWeight[i];\
1942 \n g_fragColor = g_srcColor;"
1948 \n g_srcColor = computeColor(l_maxValue,\
1949 \n computeOpacity(l_maxValue));\
1950 \n g_fragColor.rgb = g_srcColor.rgb * g_srcColor.a;\
1951 \n g_fragColor.a = g_srcColor.a;"
1957 if (noOfComponents > 1 && independentComponents)
1960 \n g_srcColor = vec4(0);\
1961 \n for (int i = 0; i < in_noOfComponents; ++i)\
1963 \n vec4 tmp = computeColor(l_minValue, computeOpacity(l_minValue, i), i);\
1964 \n g_srcColor[0] += tmp[0] * tmp[3] * in_componentWeight[i];\
1965 \n g_srcColor[1] += tmp[1] * tmp[3] * in_componentWeight[i];\
1966 \n g_srcColor[2] += tmp[2] * tmp[3] * in_componentWeight[i];\
1967 \n g_srcColor[2] += tmp[3] * tmp[3] * in_componentWeight[i];\
1969 \n g_fragColor = g_srcColor;"
1975 \n g_srcColor = computeColor(l_minValue,\
1976 \n computeOpacity(l_minValue));\
1977 \n g_fragColor.rgb = g_srcColor.rgb * g_srcColor.a;\
1978 \n g_fragColor.a = g_srcColor.a;"
1984 if (noOfComponents > 1 && independentComponents)
1987 \n for (int i = 0; i < in_noOfComponents; ++i)\
1989 \n if (l_numSamples[i] == uint(0))\
1993 \n l_avgValue[i] = l_avgValue[i] * in_componentWeight[i] /\
1994 \n l_numSamples[i];\
1997 \n l_avgValue[0] += l_avgValue[i];\
2000 \n l_avgValue[0] = clamp(l_avgValue[0], 0.0, 1.0);\
2001 \n g_fragColor = vec4(vec3(l_avgValue[0]), 1.0);"
2007 \n if (l_numSamples.x == uint(0))\
2013 \n l_avgValue.x /= l_numSamples.x;\
2014 \n l_avgValue.x = clamp(l_avgValue.x, 0.0, 1.0);\
2015 \n g_fragColor = vec4(vec3(l_avgValue.x), 1.0);\
2022 if (noOfComponents > 1 && independentComponents)
2026 \n l_sumValue.x *= in_componentWeight.x;\
2027 \n for (int i = 1; i < in_noOfComponents; ++i)\
2029 \n l_sumValue.x += l_sumValue[i] * in_componentWeight[i];\
2031 \n l_sumValue.x = clamp(l_sumValue.x, 0.0, 1.0);\
2032 \n g_fragColor = vec4(vec3(l_sumValue.x), 1.0);"
2038 \n l_sumValue.x = clamp(l_sumValue.x, 0.0, 1.0);\
2039 \n g_fragColor = vec4(vec3(l_sumValue.x), 1.0);"
2063 \n const float g_opacityThreshold = 1.0 - 1.0 / 255.0;");
2071 \n uniform vec3 in_propId;");
2080 \n // Flag to indicate if the raymarch loop should terminate \
2081 \n bool stop = false;\
2083 \n g_terminatePointMax = 0.0;\
2086 \n vec4 l_depthValue = vec4(1.0,1.0,1.0,1.0);\
2088 \n vec4 l_depthValue = texture2D(in_depthSampler, fragTexCoord);\
2091 \n if(gl_FragCoord.z >= l_depthValue.x)\
2096 \n // color buffer or max scalar buffer have a reduced size.\
2097 \n fragTexCoord = (gl_FragCoord.xy - in_windowLowerLeftCorner) *\
2098 \n in_inverseOriginalWindowSize;\
2100 \n // Compute max number of iterations it will take before we hit\
2101 \n // the termination point\
2103 \n // Abscissa of the point on the depth buffer along the ray.\
2104 \n // point in texture coordinates\
2105 \n vec4 rayTermination = WindowToNDC(gl_FragCoord.x, gl_FragCoord.y, l_depthValue.x);\
2107 \n // From normalized device coordinates to eye coordinates.\
2108 \n // in_projectionMatrix is inversed because of way VT\
2109 \n // From eye coordinates to texture coordinates\
2110 \n rayTermination = ip_inverseTextureDataAdjusted *\
2111 \n in_inverseVolumeMatrix[0] *\
2112 \n in_inverseModelViewMatrix *\
2113 \n in_inverseProjectionMatrix *\
2115 \n g_rayTermination = rayTermination.xyz / rayTermination.w;\
2117 \n // Setup the current segment:\
2118 \n g_dataPos = g_rayOrigin;\
2119 \n g_terminatePos = g_rayTermination;\
2121 \n g_terminatePointMax = length(g_terminatePos.xyz - g_dataPos.xyz) /\
2122 \n length(g_dirStep);\
2123 \n g_currentT = 0.0;");
2132 \n if(any(greaterThan(g_dataPos, in_texMax[0])) ||\
2133 \n any(lessThan(g_dataPos, in_texMin[0])))\
2138 \n // Early ray termination\
2139 \n // if the currently composited colour alpha is already fully saturated\
2140 \n // we terminated the loop or if we have hit an obstacle in the\
2141 \n // direction of they ray (using depth buffer) we terminate as well.\
2142 \n if((g_fragColor.a > g_opacityThreshold) || \
2143 \n g_currentT >= g_terminatePointMax)\
2177 \nuniform float in_croppingPlanes[6];\
2178 \nuniform int in_croppingFlags [32];\
2179 \nfloat croppingPlanesTexture[6];\
2181 \n// X: axis = 0, Y: axis = 1, Z: axis = 2\
2182 \n// cp Cropping plane bounds (minX, maxX, minY, maxY, minZ, maxZ)\
2183 \nint computeRegionCoord(float cp[6], vec3 pos, int axis)\
2185 \n int cpmin = axis * 2;\
2186 \n int cpmax = cpmin + 1;\
2188 \n if (pos[axis] < cp[cpmin])\
2192 \n else if (pos[axis] >= cp[cpmin] &&\
2193 \n pos[axis] < cp[cpmax])\
2197 \n else if (pos[axis] >= cp[cpmax])\
2204 \nint computeRegion(float cp[6], vec3 pos)\
2206 \n return (computeRegionCoord(cp, pos, 0) +\
2207 \n (computeRegionCoord(cp, pos, 1) - 1) * 3 +\
2208 \n (computeRegionCoord(cp, pos, 2) - 1) * 9);\
2223 \n // Convert cropping region to texture space\
2224 \n mat4 datasetToTextureMat = in_inverseTextureDatasetMatrix[0];\
2226 \n vec4 tempCrop = vec4(in_croppingPlanes[0], 0.0, 0.0, 1.0);\
2227 \n tempCrop = datasetToTextureMat * tempCrop;\
2228 \n if (tempCrop[3] != 0.0)\
2230 \n tempCrop[0] /= tempCrop[3];\
2232 \n croppingPlanesTexture[0] = tempCrop[0];\
2234 \n tempCrop = vec4(in_croppingPlanes[1], 0.0, 0.0, 1.0);\
2235 \n tempCrop = datasetToTextureMat * tempCrop;\
2236 \n if (tempCrop[3] != 0.0)\
2238 \n tempCrop[0] /= tempCrop[3];\
2240 \n croppingPlanesTexture[1] = tempCrop[0];\
2242 \n tempCrop = vec4(0.0, in_croppingPlanes[2], 0.0, 1.0);\
2243 \n tempCrop = datasetToTextureMat * tempCrop;\
2244 \n if (tempCrop[3] != 0.0)\
2246 \n tempCrop[1] /= tempCrop[3];\
2248 \n croppingPlanesTexture[2] = tempCrop[1];\
2250 \n tempCrop = vec4(0.0, in_croppingPlanes[3], 0.0, 1.0);\
2251 \n tempCrop = datasetToTextureMat * tempCrop;\
2252 \n if (tempCrop[3] != 0.0)\
2254 \n tempCrop[1] /= tempCrop[3];\
2256 \n croppingPlanesTexture[3] = tempCrop[1];\
2258 \n tempCrop = vec4(0.0, 0.0, in_croppingPlanes[4], 1.0);\
2259 \n tempCrop = datasetToTextureMat * tempCrop;\
2260 \n if (tempCrop[3] != 0.0)\
2262 \n tempCrop[2] /= tempCrop[3];\
2264 \n croppingPlanesTexture[4] = tempCrop[2];\
2266 \n tempCrop = vec4(0.0, 0.0, in_croppingPlanes[5], 1.0);\
2267 \n tempCrop = datasetToTextureMat * tempCrop;\
2268 \n if (tempCrop[3] != 0.0)\
2270 \n tempCrop[2] /= tempCrop[3];\
2272 \n croppingPlanesTexture[5] = tempCrop[2];"
2286 \n // Determine region\
2287 \n int regionNo = computeRegion(croppingPlanesTexture, g_dataPos);\
2289 \n // Do & operation with cropping flags\
2290 \n // Pass the flag that its Ok to sample or not to sample\
2291 \n if (in_croppingFlags[regionNo] == 0)\
2293 \n // Skip this voxel\
2326 \n /// We support only 8 clipping planes for now\
2327 \n /// The first value is the size of the data array for clipping\
2328 \n /// planes (origin, normal)\
2329 \n uniform float in_clippingPlanes[49];\
2330 \n uniform float in_clippedVoxelIntensity;\
2332 \n int clip_numPlanes;\
2333 \n vec3 clip_rayDirObj;\
2334 \n mat4 clip_texToObjMat;\
2335 \n mat4 clip_objToTexMat;\
2337 \n// Tighten the sample range as needed to account for clip planes. \
2338 \n// Arguments are in texture coordinates. \
2339 \n// Returns true if the range is at all valid after clipping. If not, \
2340 \n// the fragment should be discarded. \
2341 \nbool AdjustSampleRangeForClipping(inout vec3 startPosTex, inout vec3 stopPosTex) \
2343 \n vec4 startPosObj = vec4(0.0);\
2345 \n startPosObj = clip_texToObjMat * vec4(startPosTex - g_rayJitter, 1.0);\
2346 \n startPosObj = startPosObj / startPosObj.w;\
2347 \n startPosObj.w = 1.0;\
2350 \n vec4 stopPosObj = vec4(0.0);\
2352 \n stopPosObj = clip_texToObjMat * vec4(stopPosTex, 1.0);\
2353 \n stopPosObj = stopPosObj / stopPosObj.w;\
2354 \n stopPosObj.w = 1.0;\
2357 \n for (int i = 0; i < clip_numPlanes; i = i + 6)\
2359 \n vec3 planeOrigin = vec3(in_clippingPlanes[i + 1],\
2360 \n in_clippingPlanes[i + 2],\
2361 \n in_clippingPlanes[i + 3]);\
2362 \n vec3 planeNormal = normalize(vec3(in_clippingPlanes[i + 4],\
2363 \n in_clippingPlanes[i + 5],\
2364 \n in_clippingPlanes[i + 6]));\
2366 \n // Abort if the entire segment is clipped:\
2367 \n // (We can do this before adjusting the term point, since it'll \
2368 \n // only move further into the clipped area)\
2369 \n float startDistance = dot(planeNormal, planeOrigin - startPosObj.xyz);\
2370 \n float stopDistance = dot(planeNormal, planeOrigin - stopPosObj.xyz);\
2371 \n bool startClipped = startDistance > 0.0;\
2372 \n bool stopClipped = stopDistance > 0.0;\
2373 \n if (startClipped && stopClipped)\
2378 \n float rayDotNormal = dot(clip_rayDirObj, planeNormal);\
2379 \n bool frontFace = rayDotNormal > 0;\
2381 \n // Move the start position further from the eye if needed:\
2382 \n if (frontFace && // Observing from the clipped side (plane's front face)\
2383 \n startDistance > 0.0) // Ray-entry lies on the clipped side.\
2385 \n // Scale the point-plane distance to the ray direction and update the\
2387 \n float rayScaledDist = startDistance / rayDotNormal;\
2388 \n startPosObj = vec4(startPosObj.xyz + rayScaledDist * clip_rayDirObj, 1.0);\
2389 \n vec4 newStartPosTex = clip_objToTexMat * vec4(startPosObj.xyz, 1.0);\
2390 \n newStartPosTex /= newStartPosTex.w;\
2391 \n startPosTex = newStartPosTex.xyz;\
2392 \n startPosTex += g_rayJitter;\
2395 \n // Move the end position closer to the eye if needed:\
2396 \n if (!frontFace && // Observing from the unclipped side (plane's back face)\
2397 \n stopDistance > 0.0) // Ray-entry lies on the unclipped side.\
2399 \n // Scale the point-plane distance to the ray direction and update the\
2400 \n // termination point.\
2401 \n float rayScaledDist = stopDistance / rayDotNormal;\
2402 \n stopPosObj = vec4(stopPosObj.xyz + rayScaledDist * clip_rayDirObj, 1.0);\
2403 \n vec4 newStopPosTex = clip_objToTexMat * vec4(stopPosObj.xyz, 1.0);\
2404 \n newStopPosTex /= newStopPosTex.w;\
2405 \n stopPosTex = newStopPosTex.xyz;\
2409 \n if (any(greaterThan(startPosTex, in_texMax[0])) ||\
2410 \n any(lessThan(startPosTex, in_texMin[0])))\
2434 \n vec4 tempClip = in_volumeMatrix[0] * vec4(rayDir, 0.0);\
2435 \n if (tempClip.w != 0.0)\
2437 \n tempClip = tempClip/tempClip.w;\
2438 \n tempClip.w = 1.0;\
2440 \n clip_rayDirObj = normalize(tempClip.xyz);");
2445 clip_rayDirObj = normalize(in_projectionDirection);");
2449 \n clip_numPlanes = int(in_clippingPlanes[0]);\
2450 \n clip_texToObjMat = in_volumeMatrix[0] * in_textureDatasetMatrix[0];\
2451 \n clip_objToTexMat = in_inverseTextureDatasetMatrix[0] * in_inverseVolumeMatrix[0];\
2453 \n // Adjust for clipping.\
2454 \n if (!AdjustSampleRangeForClipping(g_rayOrigin, g_rayTermination))\
2455 \n { // entire ray is clipped.\
2459 \n // Update the segment post-clip:\
2460 \n g_dataPos = g_rayOrigin;\
2461 \n g_terminatePos = g_rayTermination;\
2462 \n g_terminatePointMax = length(g_terminatePos.xyz - g_dataPos.xyz) /\
2463 \n length(g_dirStep);\
2491 int vtkNotUsed(maskType))
2493 if (!mask || !maskInput)
2511 if (!mask || !maskInput ||
2519 \nvec4 maskValue = texture3D(in_mask, g_dataPos);\
2520 \nif(maskValue.r <= 0.0)\
2536 if (!mask || !maskInput ||
2544 \nuniform float in_maskBlendFactor;\
2545 \nuniform sampler2D in_mask1;\
2546 \nuniform sampler2D in_mask2;"
2560 if (!mask || !maskInput ||
2568 \nvec4 scalar = texture3D(in_volume[0], g_dataPos);");
2571 if (noOfComponents == 1)
2574 \n scalar.r = scalar.r * in_volume_scale[0].r + in_volume_bias[0].r;\
2575 \n scalar = vec4(scalar.r);"
2582 \n scalar = scalar * in_volume_scale[0] + in_volume_bias[0];"
2587 \nif (in_maskBlendFactor == 0.0)\
2589 \n g_srcColor = computeColor(scalar, computeOpacity(scalar));\
2593 \n float opacity = computeOpacity(scalar);\
2594 \n // Get the mask value at this same location\
2595 \n vec4 maskValue = texture3D(in_mask, g_dataPos);\
2596 \n if(maskValue.r == 0.0)\
2598 \n g_srcColor = computeColor(scalar, opacity);\
2602 \n if (maskValue.r == 1.0/255.0)\
2604 \n g_srcColor = texture2D(in_mask1, vec2(scalar.w,0.0));\
2608 \n // maskValue.r == 2.0/255.0\
2609 \n g_srcColor = texture2D(in_mask2, vec2(scalar.w,0.0));\
2611 \n g_srcColor.a = 1.0;\
2612 \n if(in_maskBlendFactor < 1.0)\
2614 \n g_srcColor = (1.0 - in_maskBlendFactor) *\
2615 \n computeColor(scalar, opacity) +\
2616 \n in_maskBlendFactor * g_srcColor;\
2619 \n g_srcColor.a = opacity;\
2631 "uniform bool in_clampDepthToBackface;\n"
2632 "vec3 l_opaqueFragPos;\n"
2633 "bool l_updateDepth;\n");
2642 \n l_opaqueFragPos = vec3(-1.0);\
2643 \n if(in_clampDepthToBackface)\
2645 \n l_opaqueFragPos = g_dataPos;\
2647 \n l_updateDepth = true;"
2657 \n if(!g_skip && g_srcColor.a > 0.0 && l_updateDepth)\
2659 \n l_opaqueFragPos = g_dataPos;\
2660 \n l_updateDepth = false;\
2671 \n if (l_opaqueFragPos == vec3(-1.0))\
2673 \n gl_FragData[1] = vec4(1.0);\
2677 \n vec4 depthValue = in_projectionMatrix * in_modelViewMatrix *\
2678 \n in_volumeMatrix[0] * in_textureDatasetMatrix[0] *\
2679 \n vec4(l_opaqueFragPos, 1.0);\
2680 \n depthValue /= depthValue.w;\
2681 \n gl_FragData[1] = vec4(vec3(0.5 * (gl_DepthRange.far -\
2682 \n gl_DepthRange.near) * depthValue.z + 0.5 *\
2683 \n (gl_DepthRange.far + gl_DepthRange.near)), 1.0);\
2694 \n vec3 l_isoPos = g_dataPos;"
2704 \n if(!g_skip && g_srcColor.a > 0.0)\
2706 \n l_isoPos = g_dataPos;\
2707 \n g_exit = true; g_skip = true;\
2718 \n vec4 depthValue = in_projectionMatrix * in_modelViewMatrix *\
2719 \n in_volumeMatrix[0] * in_textureDatasetMatrix[0] *\
2720 \n vec4(l_isoPos, 1.0);\
2721 \n gl_FragData[0] = vec4(l_isoPos, 1.0);\
2722 \n gl_FragData[1] = vec4(vec3((depthValue.z/depthValue.w) * 0.5 + 0.5),\
2733 \n initializeRayCast();\
2734 \n castRay(-1.0, -1.0);\
2735 \n finalizeRayCast();");
2740 const size_t usedNames)
2743 for (
size_t i = 0; i < usedNames; i++)
2745 shader +=
"uniform sampler2D " + varNames[i] +
";\n";
2752 const size_t usedNames)
2755 for (
size_t i = 0; i < usedNames; i++)
2757 std::stringstream ss; ss << i;
2758 shader +=
" gl_FragData[" + ss.str() +
"] = texture2D("+ varNames[i] +
2761 shader +=
" return;\n";
2766 #endif // vtkVolumeShaderComposer_h
static vtkGPUVolumeRayCastMapper * SafeDownCast(vtkObjectBase *o)
std::string RenderToImageDeclarationFragment(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string PreComputeGradientsImpl(vtkRenderer *vtkNotUsed(ren), vtkVolume *vtkNotUsed(vol), int noOfComponents=1, int independentComponents=0)
Abstract class for a volume mapper.
std::string BaseDeclarationFragment(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *mapper, vtkOpenGLGPUVolumeRayCastMapper::VolumeInputMap &inputs, int vtkNotUsed(numberOfLights), int lightingComplexity, int noOfComponents, int independentComponents)
std::string ShadingMultipleInputs(vtkVolumeMapper *mapper, vtkOpenGLGPUVolumeRayCastMapper::VolumeInputMap &inputs)
std::string DepthPassInit(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
virtual vtkTypeBool GetCropping()
represents a volume (data & properties) in a rendered scene
std::string PickingActorPassExit(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string ComputeColorMultiDeclaration(vtkOpenGLGPUVolumeRayCastMapper::VolumeInputMap &inputs)
virtual int GetCurrentPass()
std::string ShadingDeclarationFragment(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *mapper, vtkVolume *vtkNotUsed(vol))
std::string ShadingInit(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *mapper, vtkVolume *vtkNotUsed(vol))
std::string TerminationImplementation(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string DepthPassExit(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string BinaryMaskImplementation(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol), vtkImageData *maskInput, vtkVolumeTexture *mask, int maskType)
virtual int GetBlendMode()
std::string ShadingSingleInput(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *mapper, vtkVolume *vtkNotUsed(vol), vtkImageData *maskInput, vtkVolumeTexture *mask, int maskType, int noOfComponents, int independentComponents=0)
std::string ClippingExit(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string BaseInit(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *mapper, vtkOpenGLGPUVolumeRayCastMapper::VolumeInputMap &inputs, int lightingComplexity)
std::string TerminationInit(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string ClippingDeclarationVertex(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string WorkerImplementation(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string CroppingInit(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *mapper, vtkVolume *vtkNotUsed(vol))
std::string ClippingImplementation(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string CroppingDeclarationFragment(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *mapper, vtkVolume *vtkNotUsed(vol))
std::string ShadingExit(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *mapper, vtkVolume *vtkNotUsed(vol), int noOfComponents, int independentComponents=0)
virtual vtkVolumeProperty * GetProperty()
virtual vtkTypeBool GetParallelProjection()
std::string CroppingDeclarationVertex(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string TerminationDeclarationFragment(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
virtual int GetTransferFunctionMode()
vtkCamera * GetActiveCamera()
Get the current camera.
std::string ComputeColor2DDeclaration(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol), int noOfComponents, int independentComponents, std::map< int, std::string > colorTableMap)
std::string CroppingImplementation(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *mapper, vtkVolume *vtkNotUsed(vol))
bool HasGradientOpacity(int index=0)
Check whether or not we have the gradient opacity.
std::string ComputeClipPositionImplementation(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string BinaryMaskDeclaration(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol), vtkImageData *maskInput, vtkVolumeTexture *mask, int vtkNotUsed(maskType))
std::string GradientCacheDec(vtkRenderer *vtkNotUsed(ren), vtkVolume *vtkNotUsed(vol), vtkOpenGLGPUVolumeRayCastMapper::VolumeInputMap &inputs, int independentComponents=0)
virtual vtkPlaneCollection * GetClippingPlanes()
static vtkOpenGLGPUVolumeRayCastMapper * SafeDownCast(vtkObjectBase *o)
topologically and geometrically regular array of data
std::string Transfer2DDeclaration(vtkOpenGLGPUVolumeRayCastMapper::VolumeInputMap &inputs)
std::string ComputeGradientOpacity1DDecl(vtkVolume *vol, int noOfComponents, int independentComponents, std::map< int, std::string > gradientTableMap)
std::string BaseExit(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string ClippingDeclarationFragment(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *mapper, vtkVolume *vtkNotUsed(vol))
virtual int GetDisableGradientOpacity(int index)
std::string BaseDeclarationVertex(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *mapper, vtkVolume *vtkNotUsed(vol), bool multipleInputs)
std::string ComputeColorDeclaration(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol), int noOfComponents, int independentComponents, std::map< int, std::string > colorTableMap)
std::string DepthPassImplementation(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string CompositeMaskDeclarationFragment(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol), vtkImageData *maskInput, vtkVolumeTexture *mask, int maskType)
std::string RenderToImageInit(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string PickingActorPassDeclaration(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string ComputeOpacity2DDeclaration(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol), int noOfComponents, int independentComponents, std::map< int, std::string > opacityTableMap)
std::string ComputeRayDirectionDeclaration(vtkRenderer *ren, vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol), int vtkNotUsed(noOfComponents))
std::string CroppingExit(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
Creates and manages the volume texture rendered by vtkOpenGLGPUVolumeRayCastMapper.
OpenGL implementation of volume rendering through ray-casting.
std::string BaseImplementation(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string ClippingInit(vtkRenderer *ren, vtkVolumeMapper *mapper, vtkVolume *vtkNotUsed(vol))
std::string RenderToImageImplementation(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string ComputeLightingDeclaration(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *mapper, vtkVolume *vol, int noOfComponents, int independentComponents, int vtkNotUsed(numberOfLights), int lightingComplexity)
std::string ImageSampleDeclarationFrag(const std::vector< std::string > &varNames, const size_t usedNames)
std::string RenderToImageExit(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string PickingIdLow24PassExit(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
abstract specification for renderers
std::string TerminationDeclarationVertex(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string ComputeOpacityDeclaration(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol), int noOfComponents, int independentComponents, std::map< int, std::string > opacityTableMap)
std::string TerminationExit(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::map< int, vtkVolumeInputHelper > VolumeInputMap
std::string ComputeTextureCoordinates(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string PickingIdHigh24PassExit(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string ComputeOpacityMultiDeclaration(vtkOpenGLGPUVolumeRayCastMapper::VolumeInputMap &inputs)
std::string CompositeMaskImplementation(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol), vtkImageData *maskInput, vtkVolumeTexture *mask, int maskType, int noOfComponents)
std::string ComputeGradientOpacityMulti1DDecl(vtkOpenGLGPUVolumeRayCastMapper::VolumeInputMap &inputs)
std::string ImageSampleImplementationFrag(const std::vector< std::string > &varNames, const size_t usedNames)
virtual int GetUseClippedVoxelIntensity()
virtual vtkTypeBool GetUseDepthPass()
std::string ComputeGradientDeclaration(vtkOpenGLGPUVolumeRayCastMapper *mapper, vtkOpenGLGPUVolumeRayCastMapper::VolumeInputMap &inputs)
represents the common properties for rendering a volume.
std::string ShadingDeclarationVertex(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))