本文整理匯總了TypeScript中neuroglancer/webgl/shader.ShaderBuilder類的典型用法代碼示例。如果您正苦於以下問題:TypeScript ShaderBuilder類的具體用法?TypeScript ShaderBuilder怎麽用?TypeScript ShaderBuilder使用的例子?那麽, 這裏精選的類代碼示例或許可以為您提供幫助。
在下文中一共展示了ShaderBuilder類的9個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的TypeScript代碼示例。
示例1: ShaderBuilder
return gl.memoize.get('trivialColorShader', () => {
let builder = new ShaderBuilder(gl);
builder.addVarying('vec4', 'vColor');
builder.setFragmentMain('gl_FragColor = vColor;');
builder.addAttribute('vec4', 'aVertexPosition');
builder.addAttribute('vec4', 'aColor');
builder.addUniform('mat4', 'uProjectionMatrix');
builder.setVertexMain('vColor = aColor; gl_Position = uProjectionMatrix * aVertexPosition;');
return builder.build();
});
示例2: ShaderBuilder
return gl.memoize.get('trivialUniformColorShader', () => {
let builder = new ShaderBuilder(gl);
builder.addUniform('mat4', 'uProjectionMatrix');
builder.addAttribute('vec4', 'aVertexPosition');
builder.addUniform('vec4', 'uColor');
builder.addOutputBuffer('vec4', 'v4f_fragColor', null);
builder.setFragmentMain('v4f_fragColor = uColor;');
builder.setVertexMain('gl_Position = uProjectionMatrix * aVertexPosition;');
return builder.build();
});
示例3: defineShader
defineShader(builder: ShaderBuilder) {
super.defineShader(builder);
builder.addUniform('highp vec3', 'uChunkDataSize');
// [ 1.0/dataPointsPerTextureWidth, 1.0/numDataPoints ]
builder.addUniform('highp vec2', 'uUncompressedTextureAccessCoefficients');
builder.addFragmentCode(`
vec3 getPositionWithinChunk () {
return floor(min(vChunkPosition * uChunkDataSize, uChunkDataSize - 1.0));
}
vec2 getDataTextureCoords () {
vec3 chunkDataPosition = getPositionWithinChunk();
float offset = chunkDataPosition.x + uChunkDataSize.x * (chunkDataPosition.y + uChunkDataSize.y * chunkDataPosition.z);
return vec2(fract(offset * uUncompressedTextureAccessCoefficients.x),
offset * uUncompressedTextureAccessCoefficients.y);
}
`);
switch (this.dataType) {
case DataType.UINT8:
case DataType.FLOAT32:
builder.addFragmentCode(`
float getDataValue () {
return texture2D(uVolumeChunkSampler, getDataTextureCoords()).x;
}
`);
break;
case DataType.UINT32:
builder.addFragmentCode(glsl_uint64);
builder.addFragmentCode(`
uint64_t getDataValue () {
uint64_t value;
vec2 texCoords = getDataTextureCoords();
value.low = texture2D(uVolumeChunkSampler, texCoords);
value.high = vec4(0, 0, 0, 0);
return value;
}
`);
break;
case DataType.UINT64:
builder.addFragmentCode(glsl_uint64);
builder.addFragmentCode(`
uint64_t getDataValue () {
uint64_t value;
vec2 texCoords = getDataTextureCoords();
value.low = texture2D(uVolumeChunkSampler, texCoords);
value.high = texture2D(uVolumeChunkSampler, vec2(texCoords.x + 0.5 * uUncompressedTextureAccessCoefficients.x, texCoords.y));
return value;
}
`);
break;
}
}
示例4: defineShader
protected defineShader(builder: ShaderBuilder) {
super.defineShader(builder);
builder.addUniform('highp float', 'uOpacity');
builder.addFragmentCode(`
void emitRGBA(vec4 rgba) {
emit(vec4(rgba.rgb, rgba.a * uOpacity));
}
void emitRGB(vec3 rgb) {
emit(vec4(rgb, uOpacity));
}
void emitGrayscale(float value) {
emit(vec4(value, value, value, uOpacity));
}
void emitTransparent() {
emit(vec4(0.0, 0.0, 0.0, 0.0));
}
`);
builder.addFragmentCode(glsl_COLORMAPS);
builder.setFragmentMainFunction(FRAGMENT_MAIN_START + '\n' + this.fragmentMain.value);
}
示例5: defineShader
defineShader(builder: ShaderBuilder) {
builder.addAttribute('highp vec3', 'aSphereVertex');
builder.addVarying('highp float', 'vLightingFactor');
// projectionMatrix = cameraMatrix * modelViewMat
// normalTransformMatrix = (modelViewMat^{-1})^T
// eff modelViewMat = modelViewMat * scalMat(radii)
// normalTransformMatrix = (modelViewMat * scalMat)^{-1}^T
// = (scalMat^{-1} * modelViewMat^{-1})^T
// = modelViewMat^{-1}^T * (scalMat^{-1})^T
builder.addVertexCode(`
void emitSphere(mat4 projectionMatrix, mat4 normalTransformMatrix, vec3 centerPosition, vec3 radii, vec4 lightDirection) {
vec3 vertexPosition = aSphereVertex * radii + centerPosition;
gl_Position = projectionMatrix * vec4(vertexPosition, 1.0);
vec3 normal = normalize((normalTransformMatrix * vec4(aSphereVertex / radii, 0.0)).xyz);
vLightingFactor = abs(dot(normal, uLightDirection.xyz)) + uLightDirection.w;
}
`);
}
示例6: defineShader
defineShader(builder: ShaderBuilder) {
super.defineShader(builder);
let {textureAccessHelper} = this;
textureAccessHelper.defineShader(builder);
builder.addFragmentCode(
textureAccessHelper.getAccessor('readVolumeData', 'uVolumeChunkSampler', this.dataType));
let {numChannels} = this;
if (numChannels > 1) {
builder.addUniform('highp float', 'uChannelStride');
builder.addFragmentCode(`
float getChannelOffset(int channelIndex) {
return float(channelIndex) * uChannelStride;
}
`);
} else {
builder.addFragmentCode(`float getChannelOffset(int channelIndex) { return 0.0; }`);
}
builder.addFragmentCode(`
float getIndexIntoChunk (int channelIndex) {
vec3 chunkDataPosition = getPositionWithinChunk();
return chunkDataPosition.x + uChunkDataSize.x * (chunkDataPosition.y + uChunkDataSize.y * chunkDataPosition.z) + getChannelOffset(channelIndex);
}
`);
const shaderType = getShaderType(this.dataType);
builder.addFragmentCode(`
${shaderType} getDataValue (int channelIndex) {
return readVolumeData(getIndexIntoChunk(channelIndex));
}
`);
}
示例7: defineShader
defineShader(builder: ShaderBuilder) {
builder.addUniform('highp vec4', 'uColor');
builder.addUniform('highp vec4', 'uColorSelected');
builder.addUniform('highp uint', 'uSelectedIndex');
builder.addVarying('highp vec4', 'vColor');
// Transform from camera to clip coordinates.
builder.addUniform('highp mat4', 'uProjection');
builder.addUniform('highp uint', 'uPickID');
builder.addVarying('highp uint', 'vPickID', 'flat');
builder.addVertexCode(`
highp uint getPickBaseOffset() { return uint(gl_InstanceID) * ${this.pickIdsPerInstance}u; }
`);
builder.addFragmentCode(`
void emitAnnotation(vec4 color) {
emit(color, vPickID);
}
`);
}
示例8: defineShader
defineShader(builder: ShaderBuilder) {
super.defineShader(builder);
this.textureAccessHelper.defineShader(builder);
let local = (x: string) => 'compressedSegmentationChunkFormat_' + x;
builder.addUniform('highp vec3', 'uSubchunkGridSize');
builder.addUniform('highp vec3', 'uSubchunkSize');
builder.addFragmentCode(glsl_getFortranOrderIndexFromNormalized);
const {dataType} = this;
const glslType = GLSL_TYPE_FOR_DATA_TYPE.get(dataType);
if (dataType === DataType.UINT64) {
builder.addFragmentCode(glsl_uint64);
} else {
builder.addFragmentCode(glsl_uint32);
}
let fragmentCode = `
vec4 ${local('readTextureValue')}(float offset) {
vec4 result;
${this.textureAccessHelper.readTextureValue}(uVolumeChunkSampler, offset, result);
return result;
}
float ${local('getChannelOffset')}(int channelIndex) {
if (channelIndex == 0) {
return ${this.numChannels}.0;
}
vec4 v = ${local('readTextureValue')}(float(channelIndex));
return v.x * 255.0 + v.y * 255.0 * 256.0 + v.z * 255.0 * 256.0 * 256.0;
}
${glslType} getDataValue (int channelIndex) {
vec3 chunkPosition = getPositionWithinChunk();
// TODO: maybe premultiply this and store as uniform.
vec3 subchunkGridPosition = floor(chunkPosition / uSubchunkSize);
float subchunkGridOffset = getFortranOrderIndex(subchunkGridPosition, uSubchunkGridSize);
float channelOffset = ${local('getChannelOffset')}(channelIndex);
// TODO: Maybe just combine this offset into subchunkGridStrides.
float subchunkHeaderOffset = subchunkGridOffset * 2.0 + channelOffset;
vec4 subchunkHeader0 = ${local('readTextureValue')}(subchunkHeaderOffset);
vec4 subchunkHeader1 = ${local('readTextureValue')}(subchunkHeaderOffset + 1.0);
float outputValueOffset = dot(subchunkHeader0.xyz, vec3(255, 256 * 255, 256 * 256 * 255)) + channelOffset;
float encodingBits = subchunkHeader0[3] * 255.0;
if (encodingBits > 0.0) {
vec3 subchunkPosition = floor(min(chunkPosition - subchunkGridPosition * uSubchunkSize, uSubchunkSize - 1.0));
float subchunkOffset = getFortranOrderIndex(subchunkPosition, uSubchunkSize);
highp float encodedValueBaseOffset = dot(subchunkHeader1.xyz, vec3(255.0, 256.0 * 255.0, 256.0 * 256.0 * 255.0)) + channelOffset;
highp float encodedValueOffset = floor(encodedValueBaseOffset + subchunkOffset * encodingBits / 32.0);
vec4 encodedValue = ${local('readTextureValue')}(encodedValueOffset);
float wordOffset = mod(subchunkOffset * encodingBits, 32.0);
// If the value is in the first byte, then 0 <= wordOffset < 8.
// We need to mod by 2**encodedBits
float wordShifter = pow(2.0, -wordOffset);
float encodedValueMod = pow(2.0, encodingBits);
float encodedValueShifted;
if (wordOffset < 16.0) {
encodedValueShifted = dot(encodedValue.xy, vec2(255.0, 255.0 * 256.0));
} else {
encodedValueShifted = dot(encodedValue.zw, vec2(255.0 * 256.0 * 256.0, 255.0 * 256.0 * 256.0 * 256.0));
}
encodedValueShifted = floor(encodedValueShifted * wordShifter);
float decodedValue = mod(encodedValueShifted, encodedValueMod);
outputValueOffset += decodedValue * ${this.dataType === DataType.UINT64 ? '2.0' : '1.0'};
}
${glslType} result;
`;
if (dataType === DataType.UINT64) {
fragmentCode += `
result.low = ${local('readTextureValue')}(outputValueOffset);
result.high = ${local('readTextureValue')}(outputValueOffset+1.0);
`;
} else {
fragmentCode += `
result.value = ${local('readTextureValue')}(outputValueOffset);
`;
}
fragmentCode += `
return result;
}
`;
builder.addFragmentCode(fragmentCode);
}
示例9: defineShader
defineShader(builder: ShaderBuilder) {
super.defineShader(builder);
this.textureAccessHelper.defineShader(builder);
let {numChannels} = this;
if (numChannels > 1) {
builder.addUniform('highp float', 'uChannelStride');
builder.addFragmentCode(`
float getChannelOffset(int channelIndex) {
return float(channelIndex) * uChannelStride;
}
`);
} else {
builder.addFragmentCode(`float getChannelOffset(int channelIndex) { return 0.0; }`);
}
builder.addFragmentCode(`
float getIndexIntoChunk (int channelIndex) {
vec3 chunkDataPosition = getPositionWithinChunk();
return chunkDataPosition.x + uChunkDataSize.x * (chunkDataPosition.y + uChunkDataSize.y * chunkDataPosition.z) + getChannelOffset(channelIndex);
}
`);
switch (this.dataType) {
case DataType.UINT8:
builder.addFragmentCode(glsl_uint8);
builder.addFragmentCode(`
uint8_t getDataValue (int channelIndex) {
uint8_t result;
vec4 temp;
${this.textureAccessHelper.readTextureValue}(uVolumeChunkSampler, getIndexIntoChunk(channelIndex), temp);
result.value = temp.x;
return result;
}
`);
break;
case DataType.FLOAT32:
builder.addFragmentCode(glsl_float);
builder.addFragmentCode(`
float getDataValue (int channelIndex) {
vec4 temp;
${this.textureAccessHelper.readTextureValue}(uVolumeChunkSampler, getIndexIntoChunk(channelIndex), temp);
return temp.x;
}
`);
break;
case DataType.UINT16:
builder.addFragmentCode(glsl_uint16);
builder.addFragmentCode(`
uint16_t getDataValue (int channelIndex) {
uint16_t result;
vec4 temp;
${this.textureAccessHelper.readTextureValue}(uVolumeChunkSampler, getIndexIntoChunk(channelIndex), temp);
result.value = temp.xw;
return result;
}
`);
break;
case DataType.UINT32:
builder.addFragmentCode(glsl_uint32);
builder.addFragmentCode(`
uint32_t getDataValue (int channelIndex) {
uint32_t result;
${this.textureAccessHelper.readTextureValue}(uVolumeChunkSampler, getIndexIntoChunk(channelIndex), result.value);
return result;
}
`);
break;
case DataType.UINT64:
builder.addFragmentCode(glsl_uint64);
builder.addFragmentCode(`
uint64_t getDataValue (int channelIndex) {
uint64_t result;
${this.textureAccessHelper.readTextureValue}(uVolumeChunkSampler, getIndexIntoChunk(channelIndex), result.low, result.high);
return result;
}
`);
break;
}
}