16 #ifndef vtkVolumeShaderComposer_h 17 #define vtkVolumeShaderComposer_h 44 \n vec4 pos = in_projectionMatrix * in_modelViewMatrix *\ 45 \n in_volumeMatrix * vec4(in_vertexPos.xyz, 1.0);\ 46 \n gl_Position = pos;" 56 "\n // For point dataset, we offset the texture coordinate\ 57 \n // to account for OpenGL treating voxel at the center of the cell.\ 58 \n vec3 uvx = sign(in_cellSpacing) * (in_vertexPos - in_volumeExtentsMin) /\ 59 \n (in_volumeExtentsMax - in_volumeExtentsMin);\ 63 \n ip_textureCoords = uvx;\ 64 \n ip_inverseTextureDataAdjusted = in_inverseTextureDatasetMatrix;\ 68 \n // Transform cell tex-coordinates to point tex-coordinates\ 69 \n ip_textureCoords = (in_cellToPoint * vec4(uvx, 1.0)).xyz;\ 70 \n ip_inverseTextureDataAdjusted = in_cellToPoint * in_inverseTextureDatasetMatrix;\ 80 \n uniform bool in_cellFlag;\ 81 \n uniform vec3 in_cellSpacing;\ 82 \n uniform mat4 in_modelViewMatrix;\ 83 \n uniform mat4 in_projectionMatrix;\ 84 \n uniform mat4 in_volumeMatrix;\ 86 \n uniform vec3 in_volumeExtentsMin;\ 87 \n uniform vec3 in_volumeExtentsMax;\ 89 \n uniform mat4 in_inverseTextureDatasetMatrix;\ 90 \n uniform mat4 in_cellToPoint;\ 91 \n uniform vec3 in_textureExtentsMax;\ 92 \n uniform vec3 in_textureExtentsMin;\ 94 \n //This variable could be 'invariant varying' but it is declared\ 95 \n //as 'varying' to avoid compiler compatibility issues.\ 96 \n varying mat4 ip_inverseTextureDataAdjusted;");
103 int vtkNotUsed(numberOfLights),
104 int lightingComplexity,
105 bool hasGradientOpacity,
107 int independentComponents)
111 \nuniform sampler3D in_volume;\ 112 \nuniform int in_noOfComponents;\ 113 \nuniform int in_independentComponents;\ 115 \nuniform sampler2D in_noiseSampler;\ 117 \nuniform sampler2D in_depthSampler;\ 120 \n// Camera position\ 121 \nuniform vec3 in_cameraPos;\ 123 \n// view and model matrices\ 124 \nuniform mat4 in_volumeMatrix;\ 125 \nuniform mat4 in_inverseVolumeMatrix;\ 126 \nuniform mat4 in_projectionMatrix;\ 127 \nuniform mat4 in_inverseProjectionMatrix;\ 128 \nuniform mat4 in_modelViewMatrix;\ 129 \nuniform mat4 in_inverseModelViewMatrix;\ 130 \nuniform mat4 in_textureDatasetMatrix;\ 131 \nuniform mat4 in_inverseTextureDatasetMatrix;\ 132 \nvarying mat4 ip_inverseTextureDataAdjusted;\ 133 \nuniform vec3 in_texMin;\ 134 \nuniform vec3 in_texMax;\ 135 \nuniform mat4 in_textureToEye;\ 138 \nuniform vec3 in_cellStep;\ 139 \nuniform vec2 in_scalarsRange[4];\ 140 \nuniform vec3 in_cellSpacing;\ 142 \n// Sample distance\ 143 \nuniform float in_sampleDistance;\ 146 \nuniform vec3 in_cellScale;\ 147 \nuniform vec2 in_windowLowerLeftCorner;\ 148 \nuniform vec2 in_inverseOriginalWindowSize;\ 149 \nuniform vec2 in_inverseWindowSize;\ 150 \nuniform vec3 in_textureExtentsMax;\ 151 \nuniform vec3 in_textureExtentsMin;\ 153 \n// Material and lighting\ 154 \nuniform vec3 in_diffuse[4];\ 155 \nuniform vec3 in_ambient[4];\ 156 \nuniform vec3 in_specular[4];\ 157 \nuniform float in_shininess[4];\ 160 \nuniform bool in_cellFlag;\ 161 \nuniform bool in_useJittering;\ 162 \nvec3 g_rayJitter = vec3(0.0);\ 163 \nuniform bool in_clampDepthToBackface;\ 165 \nuniform vec2 in_averageIPRange;" 168 if (lightingComplexity > 0 || hasGradientOpacity)
171 \nuniform bool in_twoSidedLighting;\ 177 if (hasGradientOpacity)
181 \nvec3 g_cellSpacing;\ 182 \nfloat g_avgSpacing;");
185 if (lightingComplexity == 3)
188 \nvec4 g_fragWorldPos;\ 189 \nuniform int in_numberOfLights;\ 190 \nuniform vec3 in_lightAmbientColor[6];\ 191 \nuniform vec3 in_lightDiffuseColor[6];\ 192 \nuniform vec3 in_lightSpecularColor[6];\ 193 \nuniform vec3 in_lightDirection[6];\ 194 \nuniform vec3 in_lightPosition[6];\ 195 \nuniform vec3 in_lightAttenuation[6];\ 196 \nuniform float in_lightConeAngle[6];\ 197 \nuniform float in_lightExponent[6];\ 198 \nuniform int in_lightPositional[6];\ 201 else if (lightingComplexity == 2)
204 \nvec4 g_fragWorldPos;\ 205 \nuniform int in_numberOfLights;\ 206 \nuniform vec3 in_lightAmbientColor[6];\ 207 \nuniform vec3 in_lightDiffuseColor[6];\ 208 \nuniform vec3 in_lightSpecularColor[6];\ 209 \nuniform vec3 in_lightDirection[6];\ 215 \nuniform vec3 in_lightAmbientColor[1];\ 216 \nuniform vec3 in_lightDiffuseColor[1];\ 217 \nuniform vec3 in_lightSpecularColor[1];\ 218 \nvec4 g_lightPosObj;\ 224 if (noOfComponents > 1 && independentComponents)
227 \nuniform vec4 in_componentWeight;");
236 \nuniform sampler2D in_depthPassSampler;");
246 int lightingComplexity)
252 \n bool l_adjustTextureExtents = !in_cellFlag;" 261 \n vec2 fragTexCoord2 = (gl_FragCoord.xy - in_windowLowerLeftCorner) *\ 262 \n in_inverseWindowSize;\ 263 \n vec4 depthValue = texture2D(in_depthPassSampler, fragTexCoord2);\ 264 \n vec4 dataPos = WindowToNDC(gl_FragCoord.x, gl_FragCoord.y, depthValue.x);\ 266 \n // From normalized device coordinates to eye coordinates.\ 267 \n // in_projectionMatrix is inversed because of way VT\ 268 \n // From eye coordinates to texture coordinates\ 269 \n dataPos = in_inverseTextureDatasetMatrix *\ 270 \n in_inverseVolumeMatrix *\ 271 \n in_inverseModelViewMatrix *\ 272 \n in_inverseProjectionMatrix *\ 274 \n dataPos /= dataPos.w;\ 275 \n g_dataPos = dataPos.xyz;\ 276 \n l_adjustTextureExtents = true;" 282 \n // Get the 3D texture coordinates for lookup into the in_volume dataset\ 283 \n g_dataPos = ip_textureCoords.xyz;" 289 \n // Eye position in dataset space\ 290 \n g_eyePosObj = (in_inverseVolumeMatrix * vec4(in_cameraPos, 1.0));\ 291 \n if (g_eyePosObj.w != 0.0)\ 293 \n g_eyePosObj.x /= g_eyePosObj.w;\ 294 \n g_eyePosObj.y /= g_eyePosObj.w;\ 295 \n g_eyePosObj.z /= g_eyePosObj.w;\ 296 \n g_eyePosObj.w = 1.0;\ 299 \n // Getting the ray marching direction (in dataset space);\ 300 \n vec3 rayDir = computeRayDirection();\ 302 \n // Multiply the raymarching direction with the step size to get the\ 303 \n // sub-step size we need to take at each raymarching step\ 304 \n g_dirStep = (ip_inverseTextureDataAdjusted *\ 305 \n vec4(rayDir, 0.0)).xyz * in_sampleDistance;\ 307 \n // 2D Texture fragment coordinates [0,1] from fragment coordinates.\ 308 \n // The frame buffer texture has the size of the plain buffer but \ 309 \n // we use a fraction of it. The texture coordinate is less than 1 if\ 310 \n // the reduction factor is less than 1.\ 311 \n // Device coordinates are between -1 and 1. We need texture\ 312 \n // coordinates between 0 and 1. The in_noiseSampler and in_depthSampler\ 313 \n // buffers have the original size buffer.\ 314 \n vec2 fragTexCoord = (gl_FragCoord.xy - in_windowLowerLeftCorner) *\ 315 \n in_inverseWindowSize;\ 317 \n if (in_useJittering)\ 319 \n float jitterValue = texture2D(in_noiseSampler, fragTexCoord).x;\ 320 \n g_rayJitter = g_dirStep * jitterValue;\ 321 \n g_dataPos += g_rayJitter;\ 325 \n g_dataPos += g_dirStep;\ 328 \n // Flag to deternmine if voxel should be considered for the rendering\ 329 \n g_skip = false;");
334 \n // Light position in dataset space\ 335 \n g_lightPosObj = (in_inverseVolumeMatrix *\ 336 \n vec4(in_cameraPos, 1.0));\ 337 \n if (g_lightPosObj.w != 0.0)\ 339 \n g_lightPosObj.x /= g_lightPosObj.w;\ 340 \n g_lightPosObj.y /= g_lightPosObj.w;\ 341 \n g_lightPosObj.z /= g_lightPosObj.w;\ 342 \n g_lightPosObj.w = 1.0;\ 344 \n g_ldir = normalize(g_lightPosObj.xyz - ip_vertexPos);\ 345 \n g_vdir = normalize(g_eyePosObj.xyz - ip_vertexPos);\ 346 \n g_h = normalize(g_ldir + g_vdir);" 354 \n g_xvec = vec3(in_cellStep[0], 0.0, 0.0);\ 355 \n g_yvec = vec3(0.0, in_cellStep[1], 0.0);\ 356 \n g_zvec = vec3(0.0, 0.0, in_cellStep[2]);" 363 \n g_cellSpacing = vec3(in_cellSpacing[0],\ 364 \n in_cellSpacing[1],\ 365 \n in_cellSpacing[2]);\ 366 \n g_avgSpacing = (g_cellSpacing[0] +\ 367 \n g_cellSpacing[1] +\ 368 \n g_cellSpacing[2])/3.0;\ 369 \n // Adjust the aspect\ 370 \n g_aspect.x = g_cellSpacing[0] * 2.0 / g_avgSpacing;\ 371 \n g_aspect.y = g_cellSpacing[1] * 2.0 / g_avgSpacing;\ 372 \n g_aspect.z = g_cellSpacing[2] * 2.0 / g_avgSpacing;" 399 int noOfComponents,
int independentComponents,
400 std::map<int, std::string> gradientTableMap)
404 (noOfComponents == 1 || !independentComponents))
407 \nuniform sampler2D in_gradientTransferFunc;\ 408 \nfloat computeGradientOpacity(vec4 grad)\ 410 \n return texture2D("+gradientTableMap[0]+
", vec2(grad.w, 0.0)).r;\ 414 else if (noOfComponents > 1 && independentComponents &&
417 std::ostringstream toString;
418 for (
int i = 0; i < noOfComponents; ++i)
420 shaderStr +=
std::string(
"\n uniform sampler2D ") +
425 \nfloat computeGradientOpacity(vec4 grad, int component)\ 428 for (
int i = 0; i < noOfComponents; ++i)
432 \n if (component == " + toString.str() +
")");
436 \n return texture2D("+ gradientTableMap[i] +
", vec2(grad.w, 0.0)).r;\ 460 \n// c is short for component\ 461 \nvec4 computeGradient(int c)\ 463 \n // Approximate Nabla(F) derivatives with central differences.\ 464 \n vec3 g1; // F_front\ 465 \n vec3 g2; // F_back\ 466 \n g1.x = texture3D(in_volume, vec3(g_dataPos + g_xvec))[c];\ 467 \n g1.y = texture3D(in_volume, vec3(g_dataPos + g_yvec))[c];\ 468 \n g1.z = texture3D(in_volume, vec3(g_dataPos + g_zvec))[c];\ 469 \n g2.x = texture3D(in_volume, vec3(g_dataPos - g_xvec))[c];\ 470 \n g2.y = texture3D(in_volume, vec3(g_dataPos - g_yvec))[c];\ 471 \n g2.z = texture3D(in_volume, vec3(g_dataPos - g_zvec))[c];\ 473 \n // Apply scale and bias to the fetched values.\ 474 \n g1 = g1 * in_volume_scale[c] + in_volume_bias[c];\ 475 \n g2 = g2 * in_volume_scale[c] + in_volume_bias[c];\ 477 \n // Central differences: (F_front - F_back) / 2h\ 478 \n // This version of computeGradient() is only used for lighting\ 479 \n // calculations (only direction matters), hence the difference is\ 480 \n // not scaled by 2h and a dummy gradient mag is returned (-1.).\ 481 \n return vec4((g1 - g2), -1.0);\ 488 \n// c is short for component\ 489 \nvec4 computeGradient(int c)\ 491 \n // Approximate Nabla(F) derivatives with central differences.\ 492 \n vec3 g1; // F_front\ 493 \n vec3 g2; // F_back\ 494 \n g1.x = texture3D(in_volume, vec3(g_dataPos + g_xvec))[c];\ 495 \n g1.y = texture3D(in_volume, vec3(g_dataPos + g_yvec))[c];\ 496 \n g1.z = texture3D(in_volume, vec3(g_dataPos + g_zvec))[c];\ 497 \n g2.x = texture3D(in_volume, vec3(g_dataPos - g_xvec))[c];\ 498 \n g2.y = texture3D(in_volume, vec3(g_dataPos - g_yvec))[c];\ 499 \n g2.z = texture3D(in_volume, vec3(g_dataPos - g_zvec))[c];\ 501 \n // Apply scale and bias to the fetched values.\ 502 \n g1 = g1 * in_volume_scale[c] + in_volume_bias[c];\ 503 \n g2 = g2 * in_volume_scale[c] + in_volume_bias[c];\ 505 \n // Scale values the actual scalar range.\ 506 \n float range = in_scalarsRange[c][1] - in_scalarsRange[c][0];\ 507 \n g1 = in_scalarsRange[c][0] + range * g1;\ 508 \n g2 = in_scalarsRange[c][0] + range * g2;\ 510 \n // Central differences: (F_front - F_back) / 2h\ 513 \n float grad_mag = length(g2);\ 515 \n // Handle normalizing with grad_mag == 0.0\ 516 \n g2 = grad_mag > 0.0 ? normalize(g2) : vec3(0.0);\ 518 \n // Since the actual range of the gradient magnitude is unknown,\ 519 \n // assume it is in the range [0, 0.25 * dataRange].\ 520 \n range = range != 0 ? range : 1.0;\ 521 \n grad_mag = grad_mag / (0.25 * range);\ 522 \n grad_mag = clamp(grad_mag, 0.0, 1.0);\ 524 \n return vec4(g2.xyz, grad_mag);\ 530 \nvec4 computeGradient(int component)\ 532 \n return vec4(0.0);\ 544 int independentComponents,
545 int vtkNotUsed(numberOfLights),
546 int lightingComplexity)
550 \nvec4 computeLighting(vec4 color, int component)\ 552 \n vec4 finalColor = vec4(0.0);" 556 int const shadeReqd = volProperty->
GetShade() &&
563 switch (transferMode)
566 " // Compute gradient function only once\n" 567 " vec4 gradient = computeGradient(component);\n");
571 " // TransferFunction2D is enabled so the gradient for\n" 572 " // each component has already been cached\n");
573 if (independentComponents && noOfComponents > 1)
576 " vec4 gradient = g_gradients[component];\n";
581 " vec4 gradient = g_gradients;\n";
589 if (lightingComplexity == 1)
592 \n vec3 diffuse = vec3(0.0);\ 593 \n vec3 specular = vec3(0.0);\ 594 \n vec3 normal = gradient.xyz / in_cellSpacing;\ 595 \n float normalLength = length(normal);\ 596 \n if (normalLength > 0.0)\ 598 \n normal = normalize(normal);\ 602 \n normal = vec3(0.0, 0.0, 0.0);\ 604 \n float nDotL = dot(normal, g_ldir);\ 605 \n float nDotH = dot(normal, g_h);\ 606 \n if (nDotL < 0.0 && in_twoSidedLighting)\ 610 \n if (nDotH < 0.0 && in_twoSidedLighting)\ 616 \n diffuse = nDotL * in_diffuse[component] *\ 617 \n in_lightDiffuseColor[0] * color.rgb;\ 619 \n specular = pow(nDotH, in_shininess[component]) *\ 620 \n in_specular[component] *\ 621 \n in_lightSpecularColor[0];\ 622 \n // For the headlight, ignore the light's ambient color\ 623 \n // for now as it is causing the old mapper tests to fail\ 624 \n finalColor.xyz = in_ambient[component] * color.rgb +\ 625 \n diffuse + specular;" 628 else if (lightingComplexity == 2)
631 \n g_fragWorldPos = in_modelViewMatrix * in_volumeMatrix *\ 632 \n in_textureDatasetMatrix * vec4(-g_dataPos, 1.0);\ 633 \n if (g_fragWorldPos.w != 0.0)\ 635 \n g_fragWorldPos /= g_fragWorldPos.w;\ 637 \n vec3 vdir = normalize(g_fragWorldPos.xyz);\ 638 \n vec3 normal = gradient.xyz;\ 639 \n vec3 ambient = vec3(0.0);\ 640 \n vec3 diffuse = vec3(0.0);\ 641 \n vec3 specular = vec3(0.0);\ 642 \n float normalLength = length(normal);\ 643 \n if (normalLength > 0.0)\ 645 \n normal = normalize((in_textureToEye * vec4(normal, 0.0)).xyz);\ 649 \n normal = vec3(0.0, 0.0, 0.0);\ 651 \n for (int lightNum = 0; lightNum < in_numberOfLights; lightNum++)\ 653 \n vec3 ldir = in_lightDirection[lightNum].xyz;\ 654 \n vec3 h = normalize(ldir + vdir);\ 655 \n float nDotH = dot(normal, h);\ 656 \n if (nDotH < 0.0 && in_twoSidedLighting)\ 660 \n float nDotL = dot(normal, ldir);\ 661 \n if (nDotL < 0.0 && in_twoSidedLighting)\ 667 \n diffuse += in_lightDiffuseColor[lightNum] * nDotL;\ 671 \n specular = in_lightSpecularColor[lightNum] *\ 672 \n pow(nDotH, in_shininess[component]);\ 674 \n ambient += in_lightAmbientColor[lightNum];\ 676 \n finalColor.xyz = in_ambient[component] * ambient +\ 677 \n in_diffuse[component] * diffuse * color.rgb +\ 678 \n in_specular[component] * specular;" 681 else if (lightingComplexity == 3)
684 \n g_fragWorldPos = in_modelViewMatrix * in_volumeMatrix *\ 685 \n in_textureDatasetMatrix * vec4(g_dataPos, 1.0);\ 686 \n if (g_fragWorldPos.w != 0.0)\ 688 \n g_fragWorldPos /= g_fragWorldPos.w;\ 690 \n vec3 viewDirection = normalize(-g_fragWorldPos.xyz);\ 691 \n vec3 ambient = vec3(0,0,0);\ 692 \n vec3 diffuse = vec3(0,0,0);\ 693 \n vec3 specular = vec3(0,0,0);\ 694 \n vec3 vertLightDirection;\ 695 \n vec3 normal = normalize((in_textureToEye * vec4(gradient.xyz, 0.0)).xyz);\ 697 \n for (int lightNum = 0; lightNum < in_numberOfLights; lightNum++)\ 699 \n float attenuation = 1.0;\ 701 \n lightDir = in_lightDirection[lightNum];\ 702 \n if (in_lightPositional[lightNum] == 0)\ 704 \n vertLightDirection = lightDir;\ 708 \n vertLightDirection = (g_fragWorldPos.xyz - in_lightPosition[lightNum]);\ 709 \n float distance = length(vertLightDirection);\ 710 \n vertLightDirection = normalize(vertLightDirection);\ 711 \n attenuation = 1.0 /\ 712 \n (in_lightAttenuation[lightNum].x\ 713 \n + in_lightAttenuation[lightNum].y * distance\ 714 \n + in_lightAttenuation[lightNum].z * distance * distance);\ 715 \n // per OpenGL standard cone angle is 90 or less for a spot light\ 716 \n if (in_lightConeAngle[lightNum] <= 90.0)\ 718 \n float coneDot = dot(vertLightDirection, lightDir);\ 719 \n // if inside the cone\ 720 \n if (coneDot >= cos(radians(in_lightConeAngle[lightNum])))\ 722 \n attenuation = attenuation * pow(coneDot, in_lightExponent[lightNum]);\ 726 \n attenuation = 0.0;\ 730 \n // diffuse and specular lighting\ 731 \n float nDotL = dot(normal, vertLightDirection);\ 732 \n if (nDotL < 0.0 && in_twoSidedLighting)\ 738 \n float df = max(0.0, attenuation * nDotL);\ 739 \n diffuse += (df * in_lightDiffuseColor[lightNum]);\ 741 \n vec3 h = normalize(vertLightDirection + viewDirection);\ 742 \n float nDotH = dot(normal, h);\ 743 \n if (nDotH < 0.0 && in_twoSidedLighting)\ 749 \n float sf = attenuation * pow(nDotH, in_shininess[component]);\ 750 \n specular += (sf * in_lightSpecularColor[lightNum]);\ 752 \n ambient += in_lightAmbientColor[lightNum];\ 754 \n finalColor.xyz = in_ambient[component] * ambient +\ 755 \n in_diffuse[component] * diffuse * color.rgb +\ 756 \n in_specular[component] * specular;\ 763 "\n finalColor = vec4(color.rgb, 0.0);" 771 (noOfComponents == 1 || !independentComponents))
774 \n if (gradient.w >= 0.0)\ 776 \n color.a = color.a *\ 777 \n computeGradientOpacity(gradient);\ 781 else if (noOfComponents > 1 && independentComponents &&
785 \n if (gradient.w >= 0.0)\ 787 \n for (int i = 0; i < in_noOfComponents; ++i)\ 789 \n color.a = color.a *\ 790 \n computeGradientOpacity(gradient, i) * in_componentWeight[i];\ 798 \n finalColor.a = color.a;\ 799 \n return finalColor;\ 810 int vtkNotUsed(noOfComponents))
815 \nvec3 computeRayDirection()\ 817 \n return normalize(ip_vertexPos.xyz - g_eyePosObj.xyz);\ 823 \nuniform vec3 in_projectionDirection;\ 824 \nvec3 computeRayDirection()\ 826 \n return normalize((in_inverseVolumeMatrix *\ 827 \n vec4(in_projectionDirection, 0.0)).xyz);\ 837 int independentComponents,
838 std::map<int, std::string> colorTableMap)
840 if (noOfComponents == 1)
843 \nuniform sampler2D in_colorTransferFunc;\ 844 \nvec4 computeColor(vec4 scalar, float opacity)\ 846 \n return computeLighting(vec4(texture2D(in_colorTransferFunc,\ 847 \n vec2(scalar.w, 0.0)).xyz, opacity), 0);\ 850 else if (noOfComponents > 1 && independentComponents)
853 std::ostringstream toString;
854 for (
int i = 0; i < noOfComponents; ++i)
856 shaderStr +=
std::string(
"\n uniform sampler2D ") +
861 \nvec4 computeColor(vec4 scalar, float opacity, int component)\ 864 for (
int i = 0; i < noOfComponents; ++i)
868 \n if (component == " + toString.str() +
")");
872 \n return computeLighting(vec4(texture2D(\ 873 \n "+colorTableMap[i]);
875 \n scalar[" + toString.str() +
"],0.0)).xyz,\ 876 \n opacity),"+toString.str()+
");\ 887 else if (noOfComponents == 2 && !independentComponents)
890 \nuniform sampler2D in_colorTransferFunc;\ 891 \nvec4 computeColor(vec4 scalar, float opacity)\ 893 \n return computeLighting(vec4(texture2D(in_colorTransferFunc,\ 894 \n vec2(scalar.x, 0.0)).xyz,\ 901 \nvec4 computeColor(vec4 scalar, float opacity)\ 903 \n return computeLighting(vec4(scalar.xyz, opacity), 0);\ 913 int independentComponents,
914 std::map<int, std::string> opacityTableMap)
916 if (noOfComponents > 1 && independentComponents)
919 std::ostringstream toString;
921 for (
int i = 0; i < noOfComponents; ++i)
923 shaderStr +=
std::string(
"\n uniform sampler2D ") +
929 \nfloat computeOpacity(vec4 scalar, int component)\ 932 for (
int i = 0; i < noOfComponents; ++i)
936 \n if (component == " + toString.str() +
")");
940 \n return texture2D(" + opacityTableMap[i]);
942 shaderStr +=
std::string(
",vec2(scalar[" + toString.str() +
"], 0)).r;\ 953 else if (noOfComponents == 2 && !independentComponents)
956 \nuniform sampler2D " + opacityTableMap[0] +
";\ 957 \nfloat computeOpacity(vec4 scalar)\ 959 \n return texture2D(" + opacityTableMap[0] +
", vec2(scalar.y, 0)).r;\ 965 \nuniform sampler2D " + opacityTableMap[0] +
";\ 966 \nfloat computeOpacity(vec4 scalar)\ 968 \n return texture2D(" + opacityTableMap[0] +
", vec2(scalar.w, 0)).r;\ 978 int independentComponents,
979 std::map<int, std::string> colorTableMap)
981 if (noOfComponents == 1)
985 "vec4 computeColor(vec4 scalar, float opacity)\n" 987 " vec4 color = texture2D(" + colorTableMap[0] +
",\n" 988 " vec2(scalar.w, g_gradients.w));\n" 989 " return computeLighting(color, 0);\n" 992 else if (noOfComponents > 1 && independentComponents)
996 std::ostringstream toString;
998 "vec4 computeColor(vec4 scalar, float opacity, int component)\n" 1001 for (
int i = 0; i < noOfComponents; ++i)
1006 " if (component == " + num +
")\n" 1008 " vec4 color = texture2D(" + colorTableMap[i] +
",\n" 1009 " vec2(scalar[" + num +
"], g_gradients[" + num +
"].w));\n" 1010 " return computeLighting(color, " + num +
");\n" 1020 else if (noOfComponents == 2 && !independentComponents)
1024 "vec4 computeColor(vec4 scalar, float opacity)\n" 1026 " vec4 color = texture2D(" + colorTableMap[0] +
",\n" 1027 " vec2(scalar.x, g_gradients.w));\n" 1028 " return computeLighting(color, 0);\n" 1034 "vec4 computeColor(vec4 scalar, float opacity)\n" 1036 " return computeLighting(vec4(scalar.xyz, opacity), 0);\n" 1046 int independentComponents,
1047 std::map<int, std::string> opacityTableMap)
1052 if (noOfComponents > 1 && independentComponents)
1056 std::ostringstream toString;
1058 for (
int i = 0; i < noOfComponents; ++i)
1060 shaderStr +=
std::string(
"\nuniform sampler2D ") +
1066 "float computeOpacity(vec4 scalar, int component)\n" 1069 for (
int i = 0; i < noOfComponents; ++i)
1074 " if (component == " + num +
")\n" 1076 " return texture2D(" + opacityTableMap[i]+
",\n" 1077 " vec2(scalar[" + num +
"], g_gradients[" + num +
"].w)).a;\n" 1087 else if (noOfComponents == 2 && !independentComponents)
1091 "uniform sampler2D " + opacityTableMap[0] +
";\n" 1092 "float computeOpacity(vec4 scalar)\n" 1094 " return texture2D(" + opacityTableMap[0] +
",\n" 1095 " vec2(scalar.y, g_gradients.w)).a;\n" 1102 "uniform sampler2D " + opacityTableMap[0] +
";\n" 1103 "float computeOpacity(vec4 scalar)\n" 1105 " return texture2D(" + opacityTableMap[0] +
",\n" 1106 " vec2(scalar.a, g_gradients.w)).a;\n" 1127 \n bool l_firstValue;\ 1128 \n vec4 l_maxValue;");
1134 \n bool l_firstValue;\ 1135 \n vec4 l_minValue;");
1140 \n uvec4 l_numSamples;\ 1141 \n vec4 l_avgValue;");
1146 \n vec4 l_sumValue;");
1162 \n // We get data between 0.0 - 1.0 range\ 1163 \n l_firstValue = true;\ 1164 \n l_maxValue = vec4(0.0);" 1171 \n //We get data between 0.0 - 1.0 range\ 1172 \n l_firstValue = true;\ 1173 \n l_minValue = vec4(1.0);" 1179 \n //We get data between 0.0 - 1.0 range\ 1180 \n l_avgValue = vec4(0.0);\ 1181 \n // Keep track of number of samples\ 1182 \n l_numSamples = uvec4(0);" 1188 \n //We get data between 0.0 - 1.0 range\ 1189 \n l_sumValue = vec4(0.0);" 1202 int independentComponents = 0)
1205 if (independentComponents)
1207 if (noOfComponents == 1)
1214 std::ostringstream numss;
1215 numss << noOfComponents;
1218 "vec4 g_gradients[" + num +
"];");
1225 if (noOfComponents == 2)
1238 int independentComponents = 0)
1241 if (independentComponents)
1243 if (noOfComponents == 1)
1246 "g_gradients = computeGradient(0);\n");
1252 "for (int comp = 0; comp < in_noOfComponents; comp++)\n" 1254 " g_gradients[comp] = computeGradient(comp); \n" 1262 if (noOfComponents == 2)
1265 "g_gradients = computeGradient(0);\n");
1279 int independentComponents = 0)
1286 \n vec4 scalar = texture3D(in_volume, g_dataPos);" 1290 if (noOfComponents == 1)
1293 \n scalar.r = scalar.r*in_volume_scale.r + in_volume_bias.r;\ 1294 \n scalar = vec4(scalar.r,scalar.r,scalar.r,scalar.r);" 1301 \n scalar = scalar*in_volume_scale + in_volume_bias;" 1307 if (noOfComponents > 1)
1309 if (!independentComponents)
1312 \n if (l_maxValue.w < scalar.w || l_firstValue)\ 1314 \n l_maxValue = scalar;\ 1317 \n if (l_firstValue)\ 1319 \n l_firstValue = false;\ 1326 \n for (int i = 0; i < in_noOfComponents; ++i)\ 1328 \n if (l_maxValue[i] < scalar[i] || l_firstValue)\ 1330 \n l_maxValue[i] = scalar[i];\ 1333 \n if (l_firstValue)\ 1335 \n l_firstValue = false;\ 1343 \n if (l_maxValue.w < scalar.x || l_firstValue)\ 1345 \n l_maxValue.w = scalar.x;\ 1348 \n if (l_firstValue)\ 1350 \n l_firstValue = false;\ 1357 if (noOfComponents > 1)
1359 if (!independentComponents)
1362 \n if (l_minValue.w > scalar.w || l_firstValue)\ 1364 \n l_minValue = scalar;\ 1367 \n if (l_firstValue)\ 1369 \n l_firstValue = false;\ 1376 \n for (int i = 0; i < in_noOfComponents; ++i)\ 1378 \n if (l_minValue[i] < scalar[i] || l_firstValue)\ 1380 \n l_minValue[i] = scalar[i];\ 1383 \n if (l_firstValue)\ 1385 \n l_firstValue = false;\ 1393 \n if (l_minValue.w > scalar.x || l_firstValue)\ 1395 \n l_minValue.w = scalar.x;\ 1398 \n if (l_firstValue)\ 1400 \n l_firstValue = false;\ 1407 if (noOfComponents > 1 && independentComponents)
1410 \n for (int i = 0; i < in_noOfComponents; ++i)\ 1412 \n // Get the intensity in volume scalar range\ 1413 \n float intensity = in_scalarsRange[i][0] +\ 1414 \n (in_scalarsRange[i][1] -\ 1415 \n in_scalarsRange[i][0]) * scalar[i];\ 1416 \n if (in_averageIPRange.x <= intensity &&\ 1417 \n intensity <= in_averageIPRange.y)\ 1419 \n l_avgValue[i] += computeOpacity(scalar, i) * scalar[i];\ 1420 \n ++l_numSamples[i];\ 1428 \n // Get the intensity in volume scalar range\ 1429 \n float intensity = in_scalarsRange[0][0] +\ 1430 \n (in_scalarsRange[0][1] -\ 1431 \n in_scalarsRange[0][0]) * scalar.x;\ 1432 \n if (in_averageIPRange.x <= intensity &&\ 1433 \n intensity <= in_averageIPRange.y)\ 1435 \n l_avgValue.x += computeOpacity(scalar) * scalar.x;\ 1436 \n ++l_numSamples.x;\ 1443 if (noOfComponents > 1 && independentComponents)
1446 \n for (int i = 0; i < in_noOfComponents; ++i)\ 1448 \n float opacity = computeOpacity(scalar, i);\ 1449 \n l_sumValue[i] = l_sumValue[i] + opacity * scalar[i];\ 1456 \n float opacity = computeOpacity(scalar);\ 1457 \n l_sumValue.x = l_sumValue.x + opacity * scalar.x;" 1463 if (noOfComponents > 1 && independentComponents)
1466 \n vec4 color[4]; vec4 tmp = vec4(0.0);\ 1467 \n float totalAlpha = 0.0;\ 1468 \n for (int i = 0; i < in_noOfComponents; ++i)\ 1475 \n // Data fetching from the red channel of volume texture\ 1476 \n float opacity = computeOpacity(scalar, i);\ 1477 \n if (opacity > 0.0)\ 1479 \n g_srcColor.a = opacity;\ 1484 else if (!mask || !maskInput ||
1488 \n // Data fetching from the red channel of volume texture\ 1489 \n color[i][3] = computeOpacity(scalar, i);\ 1490 \n color[i] = computeColor(scalar, color[i][3], i);\ 1491 \n totalAlpha += color[i][3] * in_componentWeight[i];\ 1493 \n if (totalAlpha > 0.0)\ 1495 \n for (int i = 0; i < in_noOfComponents; ++i)\ 1497 \n // Only let visible components contribute to the final color\ 1498 \n if (in_componentWeight[i] <= 0) continue;\ 1500 \n tmp.x += color[i].x * color[i].w * in_componentWeight[i];\ 1501 \n tmp.y += color[i].y * color[i].w * in_componentWeight[i];\ 1502 \n tmp.z += color[i].z * color[i].w * in_componentWeight[i];\ 1503 \n tmp.w += ((color[i].w * color[i].w)/totalAlpha);\ 1506 \n g_fragColor = (1.0f - g_fragColor.a) * tmp + g_fragColor;" 1514 \n g_srcColor = vec4(0.0);\ 1515 \n g_srcColor.a = computeOpacity(scalar);" 1520 if (!mask || !maskInput ||
1524 \n g_srcColor = vec4(0.0);\ 1525 \n g_srcColor.a = computeOpacity(scalar);\ 1526 \n if (g_srcColor.a > 0.0)\ 1528 \n g_srcColor = computeColor(scalar, g_srcColor.a);" 1533 \n // Opacity calculation using compositing:\ 1534 \n // Here we use front to back compositing scheme whereby\ 1535 \n // the current sample value is multiplied to the\ 1536 \n // currently accumulated alpha and then this product\ 1537 \n // is subtracted from the sample value to get the\ 1538 \n // alpha from the previous steps. Next, this alpha is\ 1539 \n // multiplied with the current sample colour\ 1540 \n // and accumulated to the composited colour. The alpha\ 1541 \n // value from the previous steps is then accumulated\ 1542 \n // to the composited colour alpha.\ 1543 \n g_srcColor.rgb *= g_srcColor.a;\ 1544 \n g_fragColor = (1.0f - g_fragColor.a) * g_srcColor + g_fragColor;" 1547 if (!mask || !maskInput ||
1572 \n // Special coloring mode which renders the Prop Id in fragments that\ 1573 \n // have accumulated certain level of opacity. Used during the selection\ 1574 \n // pass vtkHardwareSelection::ACTOR_PASS.\ 1575 \n if (g_fragColor.a > 3.0/ 255.0)\ 1577 \n gl_FragData[0] = vec4(in_propId, 1.0);\ 1581 \n gl_FragData[0] = vec4(0.0);\ 1591 \n // Special coloring mode which renders the voxel index in fragments that\ 1592 \n // have accumulated certain level of opacity. Used during the selection\ 1593 \n // pass vtkHardwareSelection::ID_LOW24.\ 1594 \n if (g_fragColor.a > 3.0/ 255.0)\ 1596 \n uvec3 volumeDim = uvec3(in_textureExtentsMax - in_textureExtentsMin);\ 1597 \n uvec3 voxelCoords = uvec3(volumeDim * g_dataPos);\ 1598 \n // vtkHardwareSelector assumes index 0 to be empty space, so add uint(1).\ 1599 \n uint idx = volumeDim.x * volumeDim.y * voxelCoords.z +\ 1600 \n volumeDim.x * voxelCoords.y + voxelCoords.x + uint(1);\ 1601 \n gl_FragData[0] = vec4(float(idx % uint(256)) / 255.0,\ 1602 \n float((idx / uint(256)) % uint(256)) / 255.0,\ 1603 \n float((idx / uint(65536)) % uint(256)) / 255.0, 1.0);\ 1607 \n gl_FragData[0] = vec4(0.0);\ 1617 \n // Special coloring mode which renders the voxel index in fragments that\ 1618 \n // have accumulated certain level of opacity. Used during the selection\ 1619 \n // pass vtkHardwareSelection::ID_MID24.\ 1620 \n if (g_fragColor.a > 3.0/ 255.0)\ 1622 \n uvec3 volumeDim = uvec3(in_textureExtentsMax - in_textureExtentsMin);\ 1623 \n uvec3 voxelCoords = uvec3(volumeDim * g_dataPos);\ 1624 \n // vtkHardwareSelector assumes index 0 to be empty space, so add uint(1).\ 1625 \n uint idx = volumeDim.x * volumeDim.y * voxelCoords.z +\ 1626 \n volumeDim.x * voxelCoords.y + voxelCoords.x + uint(1);\ 1627 \n idx = ((idx & 0xff000000) >> 24);\ 1628 \n gl_FragData[0] = vec4(float(idx % uint(256)) / 255.0,\ 1629 \n float((idx / uint(256)) % uint(256)) / 255.0,\ 1630 \n float(idx / uint(65536)) / 255.0, 1.0);\ 1634 \n gl_FragData[0] = vec4(0.0);\ 1644 int independentComponents = 0)
1657 if (noOfComponents > 1 && independentComponents)
1660 \n g_srcColor = vec4(0);\ 1661 \n for (int i = 0; i < in_noOfComponents; ++i)\ 1663 \n vec4 tmp = computeColor(l_maxValue, computeOpacity(l_maxValue, i), i);\ 1664 \n g_srcColor[0] += tmp[0] * tmp[3] * in_componentWeight[i];\ 1665 \n g_srcColor[1] += tmp[1] * tmp[3] * in_componentWeight[i];\ 1666 \n g_srcColor[2] += tmp[2] * tmp[3] * in_componentWeight[i];\ 1667 \n g_srcColor[3] += tmp[3] * in_componentWeight[i];\ 1669 \n g_fragColor = g_srcColor;" 1675 \n g_srcColor = computeColor(l_maxValue,\ 1676 \n computeOpacity(l_maxValue));\ 1677 \n g_fragColor.rgb = g_srcColor.rgb * g_srcColor.a;\ 1678 \n g_fragColor.a = g_srcColor.a;" 1684 if (noOfComponents > 1 && independentComponents)
1687 \n g_srcColor = vec4(0);\ 1688 \n for (int i = 0; i < in_noOfComponents; ++i)\ 1690 \n vec4 tmp = computeColor(l_minValue, computeOpacity(l_minValue, i), i);\ 1691 \n g_srcColor[0] += tmp[0] * tmp[3] * in_componentWeight[i];\ 1692 \n g_srcColor[1] += tmp[1] * tmp[3] * in_componentWeight[i];\ 1693 \n g_srcColor[2] += tmp[2] * tmp[3] * in_componentWeight[i];\ 1694 \n g_srcColor[2] += tmp[3] * tmp[3] * in_componentWeight[i];\ 1696 \n g_fragColor = g_srcColor;" 1702 \n g_srcColor = computeColor(l_minValue,\ 1703 \n computeOpacity(l_minValue));\ 1704 \n g_fragColor.rgb = g_srcColor.rgb * g_srcColor.a;\ 1705 \n g_fragColor.a = g_srcColor.a;" 1711 if (noOfComponents > 1 && independentComponents)
1714 \n for (int i = 0; i < in_noOfComponents; ++i)\ 1716 \n if (l_numSamples[i] == uint(0))\ 1720 \n l_avgValue[i] = l_avgValue[i] * in_componentWeight[i] /\ 1721 \n l_numSamples[i];\ 1724 \n l_avgValue[0] += l_avgValue[i];\ 1727 \n l_avgValue[0] = clamp(l_avgValue[0], 0.0, 1.0);\ 1728 \n g_fragColor = vec4(vec3(l_avgValue[0]), 1.0);" 1734 \n if (l_numSamples.x == uint(0))\ 1740 \n l_avgValue.x /= l_numSamples.x;\ 1741 \n l_avgValue.x = clamp(l_avgValue.x, 0.0, 1.0);\ 1742 \n g_fragColor = vec4(vec3(l_avgValue.x), 1.0);\ 1749 if (noOfComponents > 1 && independentComponents)
1753 \n l_sumValue.x *= in_componentWeight.x;\ 1754 \n for (int i = 1; i < in_noOfComponents; ++i)\ 1756 \n l_sumValue.x += l_sumValue[i] * in_componentWeight[i];\ 1758 \n l_sumValue.x = clamp(l_sumValue.x, 0.0, 1.0);\ 1759 \n g_fragColor = vec4(vec3(l_sumValue.x), 1.0);" 1765 \n l_sumValue.x = clamp(l_sumValue.x, 0.0, 1.0);\ 1766 \n g_fragColor = vec4(vec3(l_sumValue.x), 1.0);" 1790 \n const float g_opacityThreshold = 1.0 - 1.0 / 255.0;");
1798 \n uniform vec3 in_propId;");
1807 \n // Flag to indicate if the raymarch loop should terminate \ 1808 \n bool stop = false;\ 1810 \n g_terminatePointMax = 0.0;\ 1813 \n vec4 l_depthValue = vec4(1.0,1.0,1.0,1.0);\ 1815 \n vec4 l_depthValue = texture2D(in_depthSampler, fragTexCoord);\ 1818 \n if(gl_FragCoord.z >= l_depthValue.x)\ 1823 \n // color buffer or max scalar buffer have a reduced size.\ 1824 \n fragTexCoord = (gl_FragCoord.xy - in_windowLowerLeftCorner) *\ 1825 \n in_inverseOriginalWindowSize;\ 1827 \n // Compute max number of iterations it will take before we hit\ 1828 \n // the termination point\ 1830 \n // Abscissa of the point on the depth buffer along the ray.\ 1831 \n // point in texture coordinates\ 1832 \n vec4 terminatePoint = WindowToNDC(gl_FragCoord.x, gl_FragCoord.y, l_depthValue.x);\ 1834 \n // From normalized device coordinates to eye coordinates.\ 1835 \n // in_projectionMatrix is inversed because of way VT\ 1836 \n // From eye coordinates to texture coordinates\ 1837 \n terminatePoint = ip_inverseTextureDataAdjusted *\ 1838 \n in_inverseVolumeMatrix *\ 1839 \n in_inverseModelViewMatrix *\ 1840 \n in_inverseProjectionMatrix *\ 1842 \n terminatePoint /= terminatePoint.w;\ 1844 \n g_terminatePointMax = length(terminatePoint.xyz - g_dataPos.xyz) /\ 1845 \n length(g_dirStep);\ 1846 \n g_currentT = 0.0;");
1855 \n if(any(greaterThan(g_dataPos, in_texMax)) ||\ 1856 \n any(lessThan(g_dataPos, in_texMin)))\ 1861 \n // Early ray termination\ 1862 \n // if the currently composited colour alpha is already fully saturated\ 1863 \n // we terminated the loop or if we have hit an obstacle in the\ 1864 \n // direction of they ray (using depth buffer) we terminate as well.\ 1865 \n if((g_fragColor.a > g_opacityThreshold) || \ 1866 \n g_currentT >= g_terminatePointMax)\ 1900 \nuniform float in_croppingPlanes[6];\ 1901 \nuniform int in_croppingFlags [32];\ 1902 \nfloat croppingPlanesTexture[6];\ 1904 \n// X: axis = 0, Y: axis = 1, Z: axis = 2\ 1905 \n// cp Cropping plane bounds (minX, maxX, minY, maxY, minZ, maxZ)\ 1906 \nint computeRegionCoord(float cp[6], vec3 pos, int axis)\ 1908 \n int cpmin = axis * 2;\ 1909 \n int cpmax = cpmin + 1;\ 1911 \n if (pos[axis] < cp[cpmin])\ 1915 \n else if (pos[axis] >= cp[cpmin] &&\ 1916 \n pos[axis] < cp[cpmax])\ 1920 \n else if (pos[axis] >= cp[cpmax])\ 1927 \nint computeRegion(float cp[6], vec3 pos)\ 1929 \n return (computeRegionCoord(cp, pos, 0) +\ 1930 \n (computeRegionCoord(cp, pos, 1) - 1) * 3 +\ 1931 \n (computeRegionCoord(cp, pos, 2) - 1) * 9);\ 1946 \n // Convert cropping region to texture space\ 1947 \n mat4 datasetToTextureMat = in_inverseTextureDatasetMatrix;\ 1949 \n vec4 tempCrop = vec4(in_croppingPlanes[0], 0.0, 0.0, 1.0);\ 1950 \n tempCrop = datasetToTextureMat * tempCrop;\ 1951 \n if (tempCrop[3] != 0.0)\ 1953 \n tempCrop[0] /= tempCrop[3];\ 1955 \n croppingPlanesTexture[0] = tempCrop[0];\ 1957 \n tempCrop = vec4(in_croppingPlanes[1], 0.0, 0.0, 1.0);\ 1958 \n tempCrop = datasetToTextureMat * tempCrop;\ 1959 \n if (tempCrop[3] != 0.0)\ 1961 \n tempCrop[0] /= tempCrop[3];\ 1963 \n croppingPlanesTexture[1] = tempCrop[0];\ 1965 \n tempCrop = vec4(0.0, in_croppingPlanes[2], 0.0, 1.0);\ 1966 \n tempCrop = datasetToTextureMat * tempCrop;\ 1967 \n if (tempCrop[3] != 0.0)\ 1969 \n tempCrop[1] /= tempCrop[3];\ 1971 \n croppingPlanesTexture[2] = tempCrop[1];\ 1973 \n tempCrop = vec4(0.0, in_croppingPlanes[3], 0.0, 1.0);\ 1974 \n tempCrop = datasetToTextureMat * tempCrop;\ 1975 \n if (tempCrop[3] != 0.0)\ 1977 \n tempCrop[1] /= tempCrop[3];\ 1979 \n croppingPlanesTexture[3] = tempCrop[1];\ 1981 \n tempCrop = vec4(0.0, 0.0, in_croppingPlanes[4], 1.0);\ 1982 \n tempCrop = datasetToTextureMat * tempCrop;\ 1983 \n if (tempCrop[3] != 0.0)\ 1985 \n tempCrop[2] /= tempCrop[3];\ 1987 \n croppingPlanesTexture[4] = tempCrop[2];\ 1989 \n tempCrop = vec4(0.0, 0.0, in_croppingPlanes[5], 1.0);\ 1990 \n tempCrop = datasetToTextureMat * tempCrop;\ 1991 \n if (tempCrop[3] != 0.0)\ 1993 \n tempCrop[2] /= tempCrop[3];\ 1995 \n croppingPlanesTexture[5] = tempCrop[2];" 2009 \n // Determine region\ 2010 \n int regionNo = computeRegion(croppingPlanesTexture, g_dataPos);\ 2012 \n // Do & operation with cropping flags\ 2013 \n // Pass the flag that its Ok to sample or not to sample\ 2014 \n if (in_croppingFlags[regionNo] == 0)\ 2016 \n // Skip this voxel\ 2044 \n int clippingPlanesSize;\ 2046 \n mat4 textureToObjMat;");
2063 vec4 tempClip = in_volumeMatrix * vec4(rayDir, 0.0);\ 2064 \n if (tempClip.w != 0.0)\ 2066 \n tempClip = tempClip/tempClip.w;\ 2067 \n tempClip.w = 1.0;\ 2069 \n objRayDir = tempClip.xyz;");
2074 objRayDir = normalize(in_projectionDirection);");
2078 \n clippingPlanesSize = int(in_clippingPlanes[0]);\ 2079 \n vec4 objDataPos = vec4(0.0);\ 2080 \n textureToObjMat = in_volumeMatrix * in_textureDatasetMatrix;\ 2082 \n vec4 terminatePointObj = textureToObjMat * terminatePoint;\ 2083 \n if (terminatePointObj.w != 0.0)\ 2085 \n terminatePointObj = terminatePointObj/ terminatePointObj.w ;\ 2086 \n terminatePointObj.w = 1.0;\ 2089 \n for (int i = 0; i < clippingPlanesSize; i = i + 6)\ 2091 \n if (in_useJittering)\ 2093 \n objDataPos = textureToObjMat * vec4(g_dataPos - g_rayJitter,\ 2098 \n objDataPos = textureToObjMat * vec4(g_dataPos - g_dirStep, 1.0);\ 2100 \n if (objDataPos.w != 0.0)\ 2102 \n objDataPos = objDataPos/objDataPos.w; objDataPos.w = 1.0;\ 2104 \n vec3 planeOrigin = vec3(in_clippingPlanes[i + 1],\ 2105 \n in_clippingPlanes[i + 2],\ 2106 \n in_clippingPlanes[i + 3]);\ 2107 \n vec3 planeNormal = vec3(in_clippingPlanes[i + 4],\ 2108 \n in_clippingPlanes[i + 5],\ 2109 \n in_clippingPlanes[i + 6]);\ 2110 \n vec3 normalizedPlaneNormal = normalize(planeNormal);\ 2112 \n float rayDotNormal = dot(objRayDir, normalizedPlaneNormal);\ 2113 \n bool frontFace = rayDotNormal > 0;\ 2114 \n float distance = dot(normalizedPlaneNormal, planeOrigin - objDataPos.xyz);\ 2116 \n if (frontFace && // Observing from the clipped side (plane's front face)\ 2117 \n distance > 0.0) // Ray-entry lies on the clipped side.\ 2119 \n // Scale the point-plane distance to the ray direction and update the\ 2121 \n float rayScaledDist = distance / rayDotNormal;\ 2122 \n vec4 newObjDataPos = vec4(objDataPos.xyz + rayScaledDist * objRayDir, 1.0);\ 2123 \n newObjDataPos = in_inverseTextureDatasetMatrix\ 2124 \n * in_inverseVolumeMatrix * vec4(newObjDataPos.xyz, 1.0);\ 2125 \n if (newObjDataPos.w != 0.0)\ 2127 \n newObjDataPos /= newObjDataPos.w;\ 2129 \n if (in_useJittering)\ 2131 \n g_dataPos = newObjDataPos.xyz + g_rayJitter;\ 2135 \n g_dataPos = newObjDataPos.xyz + g_dirStep;\ 2138 \n bool stop = any(greaterThan(g_dataPos, in_texMax)) ||\ 2139 \n any(lessThan(g_dataPos, in_texMin));\ 2142 \n // The ray exits the bounding box before ever intersecting the plane (only\ 2143 \n // the clipped space is hit).\ 2147 \n bool behindGeometry = dot(terminatePointObj.xyz - planeOrigin.xyz, normalizedPlaneNormal) < 0.0;\ 2148 \n if (behindGeometry)\ 2150 \n // Geometry appears in front of the plane.\ 2154 \n // Update the number of ray marching steps to account for the clipped entry point (\ 2155 \n // this is necessary in case the ray hits geometry after marching behind the plane,\ 2156 \n // given that the number of steps was assumed to be from the not-clipped entry).\ 2157 \n g_terminatePointMax = length(terminatePoint.xyz - g_dataPos.xyz) /\ 2158 \n length(g_dirStep);\ 2177 \n for (int i = 0; i < clippingPlanesSize && !g_skip; i = i + 6)\ 2179 \n vec4 objDataPos = textureToObjMat * vec4(g_dataPos, 1.0);\ 2180 \n if (objDataPos.w != 0.0)\ 2182 \n objDataPos /= objDataPos.w;\ 2184 \n vec3 planeOrigin = vec3(in_clippingPlanes[i + 1],\ 2185 \n in_clippingPlanes[i + 2],\ 2186 \n in_clippingPlanes[i + 3]);\ 2187 \n vec3 planeNormal = vec3(in_clippingPlanes[i + 4],\ 2188 \n in_clippingPlanes[i + 5],\ 2189 \n in_clippingPlanes[i + 6]);\ 2190 \n if (dot(vec3(objDataPos.xyz - planeOrigin), planeNormal) < 0 && dot(objRayDir, planeNormal) < 0)\ 2214 int vtkNotUsed(maskType))
2216 if (!mask || !maskInput)
2234 if (!mask || !maskInput ||
2242 \nvec4 maskValue = texture3D(in_mask, g_dataPos);\ 2243 \nif(maskValue.r <= 0.0)\ 2259 if (!mask || !maskInput ||
2267 \nuniform float in_maskBlendFactor;\ 2268 \nuniform sampler2D in_mask1;\ 2269 \nuniform sampler2D in_mask2;" 2283 if (!mask || !maskInput ||
2291 \nvec4 scalar = texture3D(in_volume, g_dataPos);");
2294 if (noOfComponents == 1)
2297 \n scalar.r = scalar.r*in_volume_scale.r + in_volume_bias.r;\ 2298 \n scalar = vec4(scalar.r,scalar.r,scalar.r,scalar.r);" 2305 \n scalar = scalar*in_volume_scale + in_volume_bias;" 2310 \nif (in_maskBlendFactor == 0.0)\ 2312 \n g_srcColor = computeColor(scalar, computeOpacity(scalar));\ 2316 \n float opacity = computeOpacity(scalar);\ 2317 \n // Get the mask value at this same location\ 2318 \n vec4 maskValue = texture3D(in_mask, g_dataPos);\ 2319 \n if(maskValue.r == 0.0)\ 2321 \n g_srcColor = computeColor(scalar, opacity);\ 2325 \n if (maskValue.r == 1.0/255.0)\ 2327 \n g_srcColor = texture2D(in_mask1, vec2(scalar.w,0.0));\ 2331 \n // maskValue.r == 2.0/255.0\ 2332 \n g_srcColor = texture2D(in_mask2, vec2(scalar.w,0.0));\ 2334 \n g_srcColor.a = 1.0;\ 2335 \n if(in_maskBlendFactor < 1.0)\ 2337 \n g_srcColor = (1.0 - in_maskBlendFactor) *\ 2338 \n computeColor(scalar, opacity) +\ 2339 \n in_maskBlendFactor * g_srcColor;\ 2342 \n g_srcColor.a = opacity;\ 2354 \n vec3 l_opaqueFragPos;\ 2355 \n bool l_updateDepth;");
2364 \n l_opaqueFragPos = vec3(-1.0);\ 2365 \n if(in_clampDepthToBackface)\ 2367 \n l_opaqueFragPos = g_dataPos;\ 2369 \n l_updateDepth = true;" 2379 \n if(!g_skip && g_srcColor.a > 0.0 && l_updateDepth)\ 2381 \n l_opaqueFragPos = g_dataPos;\ 2382 \n l_updateDepth = false;\ 2393 \n if (l_opaqueFragPos == vec3(-1.0))\ 2395 \n gl_FragData[1] = vec4(1.0);\ 2399 \n vec4 depthValue = in_projectionMatrix * in_modelViewMatrix *\ 2400 \n in_volumeMatrix * in_textureDatasetMatrix *\ 2401 \n vec4(l_opaqueFragPos, 1.0);\ 2402 \n depthValue /= depthValue.w;\ 2403 \n gl_FragData[1] = vec4(vec3(0.5 * (gl_DepthRange.far -\ 2404 \n gl_DepthRange.near) * depthValue.z + 0.5 *\ 2405 \n (gl_DepthRange.far + gl_DepthRange.near)), 1.0);\ 2416 \n vec3 l_isoPos = g_dataPos;" 2426 \n if(!g_skip && g_srcColor.a > 0.0)\ 2428 \n l_isoPos = g_dataPos;\ 2429 \n g_exit = true; g_skip = true;\ 2440 \n vec4 depthValue = in_projectionMatrix * in_modelViewMatrix *\ 2441 \n in_volumeMatrix * in_textureDatasetMatrix *\ 2442 \n vec4(l_isoPos, 1.0);\ 2443 \n gl_FragData[0] = vec4(l_isoPos, 1.0);\ 2444 \n gl_FragData[1] = vec4(vec3((depthValue.z/depthValue.w) * 0.5 + 0.5),\ 2455 \n initializeRayCast();\ 2456 \n castRay(-1.0, -1.0);\ 2457 \n finalizeRayCast();");
2462 const size_t usedNames)
2465 for (
size_t i = 0; i < usedNames; i++)
2467 shader +=
"uniform sampler2D " + varNames[i] +
";\n";
2474 const size_t usedNames)
2477 for (
size_t i = 0; i < usedNames; i++)
2479 std::stringstream ss; ss << i;
2480 shader +=
" gl_FragData[" + ss.str() +
"] = texture2D("+ varNames[i] +
2483 shader +=
" return;\n";
2488 #endif // vtkVolumeShaderComposer_h std::string ShadingExit(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *mapper, vtkVolume *vtkNotUsed(vol), int noOfComponents, int independentComponents=0)
std::string RenderToImageImplementation(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string BaseImplementation(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
represents a volume (data & properties) in a rendered scene
std::string CroppingDeclarationVertex(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
Abstract class for a volume mapper.
virtual int GetUseDepthPass()
If UseDepthPass is on, the mapper will use two passes.
std::string DepthPassInit(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
virtual int GetBlendMode()
Set/Get the blend mode.
Creates and manages the volume texture rendered by vtkOpenGLGPUVolumeRayCastMapper.
std::string PickingActorPassDeclaration(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string CompositeMaskImplementation(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol), vtkImageData *maskInput, vtkVolumeTexture *mask, int maskType, int noOfComponents)
std::string RenderToImageDeclarationFragment(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string ComputeGradientOpacity1DDecl(vtkVolume *vol, int noOfComponents, int independentComponents, std::map< int, std::string > gradientTableMap)
std::string RenderToImageExit(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
abstract specification for renderers
std::string CroppingDeclarationFragment(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *mapper, vtkVolume *vtkNotUsed(vol))
std::string TerminationExit(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string ShadingDeclarationVertex(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
virtual vtkPlaneCollection * GetClippingPlanes()
Get/Set the vtkPlaneCollection which specifies the clipping planes.
virtual int GetTransferFunctionMode()
Color-opacity transfer function mode.
vtkCamera * GetActiveCamera()
Get the current camera.
int GetShade(int index)
Set/Get the shading of a volume.
std::string PickingActorPassExit(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string ClippingImplementation(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *mapper, vtkVolume *vtkNotUsed(vol))
std::string ComputeLightingDeclaration(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *mapper, vtkVolume *vol, int noOfComponents, int independentComponents, int vtkNotUsed(numberOfLights), int lightingComplexity)
OpenGL subclass that draws the image to the screen.
static vtkOpenGLGPUVolumeRayCastMapper * SafeDownCast(vtkObjectBase *o)
std::string TerminationInit(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string CroppingImplementation(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *mapper, vtkVolume *vtkNotUsed(vol))
std::string TerminationImplementation(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string ClippingDeclarationVertex(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string CompositeMaskDeclarationFragment(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol), vtkImageData *maskInput, vtkVolumeTexture *mask, int maskType)
std::string ClippingExit(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
topologically and geometrically regular array of data
bool HasGradientOpacity(int index=0)
Check whether or not we have the gradient opacity.
std::string ComputeColor2DDeclaration(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol), int noOfComponents, int independentComponents, std::map< int, std::string > colorTableMap)
std::string WorkerImplementation(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
virtual int GetParallelProjection()
Set/Get the value of the ParallelProjection instance variable.
std::string ImageSampleImplementationFrag(const std::vector< std::string > &varNames, const size_t usedNames)
std::string ComputeClipPositionImplementation(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string ComputeOpacity2DDeclaration(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol), int noOfComponents, int independentComponents, std::map< int, std::string > opacityTableMap)
std::string ComputeRayDirectionDeclaration(vtkRenderer *ren, vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol), int vtkNotUsed(noOfComponents))
std::string ShadingInit(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *mapper, vtkVolume *vtkNotUsed(vol))
represents the common properties for rendering a volume.
std::string ShadingDeclarationFragment(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *mapper, vtkVolume *vtkNotUsed(vol))
std::string CroppingExit(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string ClippingInit(vtkRenderer *ren, vtkVolumeMapper *mapper, vtkVolume *vtkNotUsed(vol))
std::string ShadingImplementation(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *mapper, vtkVolume *vtkNotUsed(vol), vtkImageData *maskInput, vtkVolumeTexture *mask, int maskType, int noOfComponents, int independentComponents=0)
std::string ClippingDeclarationFragment(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string ComputeTextureCoordinates(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string BinaryMaskImplementation(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol), vtkImageData *maskInput, vtkVolumeTexture *mask, int maskType)
std::string GradientCacheDec(vtkRenderer *vtkNotUsed(ren), vtkVolume *vtkNotUsed(vol), int noOfComponents, int independentComponents=0)
std::string PickingIdLow24PassExit(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string CroppingInit(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *mapper, vtkVolume *vtkNotUsed(vol))
vtkVolumeProperty * GetProperty()
Set/Get the volume property.
std::string ImageSampleDeclarationFrag(const std::vector< std::string > &varNames, const size_t usedNames)
virtual int GetCurrentPass()
std::string BinaryMaskDeclaration(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol), vtkImageData *maskInput, vtkVolumeTexture *mask, int vtkNotUsed(maskType))
virtual int GetCropping()
Turn On/Off orthogonal cropping.
std::string TerminationDeclarationVertex(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string BaseDeclarationVertex(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string DepthPassExit(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string DepthPassImplementation(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string BaseExit(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string BaseInit(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *mapper, vtkVolume *vol, int lightingComplexity)
std::string BaseDeclarationFragment(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *mapper, vtkVolume *vtkNotUsed(vol), int vtkNotUsed(numberOfLights), int lightingComplexity, bool hasGradientOpacity, int noOfComponents, int independentComponents)
std::string TerminationDeclarationFragment(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string ComputeOpacityDeclaration(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol), int noOfComponents, int independentComponents, std::map< int, std::string > opacityTableMap)
std::string ComputeColorDeclaration(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol), int noOfComponents, int independentComponents, std::map< int, std::string > colorTableMap)
std::string PreComputeGradientsImpl(vtkRenderer *vtkNotUsed(ren), vtkVolume *vtkNotUsed(vol), int noOfComponents, int independentComponents=0)
std::string RenderToImageInit(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string ComputeGradientDeclaration(vtkVolume *vol)
std::string PickingIdMid24PassExit(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))