当前位置: 首页>>代码示例>>TypeScript>>正文


TypeScript one_dimensional_texture_access.compute1dTextureFormat函数代码示例

本文整理汇总了TypeScript中neuroglancer/webgl/one_dimensional_texture_access.compute1dTextureFormat函数的典型用法代码示例。如果您正苦于以下问题:TypeScript compute1dTextureFormat函数的具体用法?TypeScript compute1dTextureFormat怎么用?TypeScript compute1dTextureFormat使用的例子?那么, 这里精选的函数代码示例或许可以为您提供帮助。


在下文中一共展示了compute1dTextureFormat函数的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的TypeScript代码示例。

示例1: constructor

 constructor(_gl: GL, public dataType: DataType, public numChannels: number, key: string) {
   super(key);
   compute1dTextureFormat(this, dataType);
   this.textureAccessHelper = new OneDimensionalTextureAccessHelper('chunkData');
 }
开发者ID:stephenplaza,项目名称:neuroglancer,代码行数:5,代码来源:uncompressed_chunk_format.ts

示例2: compute1dTextureLayout

    compute1dTextureLayout(this, gl, /*texelsPerElement=*/1, dataLength);
    let subchunkGridSize = this.subchunkGridSize = vec3.create();
    for (let i = 0; i < 3; ++i) {
      subchunkGridSize[i] = Math.ceil(chunkDataSize[i] / subchunkSize[i]);
    }
  }

  static get(gl: GL, chunkDataSize: vec3, subchunkSize: vec3, dataLength: number) {
    return gl.memoize.get(
        `sliceview.CompressedSegmentationTextureLayout:${vec3Key(chunkDataSize)},` +
            `${vec3Key(subchunkSize)},${dataLength}`,
        () => new TextureLayout(gl, chunkDataSize, subchunkSize, dataLength));
  }
}

const textureFormat = compute1dTextureFormat(new OneDimensionalTextureFormat(), DataType.UINT32);

export class ChunkFormat extends SingleTextureChunkFormat<TextureLayout> {
  static get(gl: GL, dataType: DataType, subchunkSize: vec3, numChannels: number) {
    let shaderKey = `sliceview.CompressedSegmentationChunkFormat:${dataType}:${numChannels}`;
    let cacheKey = `${shaderKey}:${vec3Key(subchunkSize)}`;
    return gl.memoize.get(
        cacheKey, () => new ChunkFormat(dataType, subchunkSize, numChannels, shaderKey));
  }

  private textureAccessHelper: OneDimensionalTextureAccessHelper;

  constructor(
      public dataType: DataType, public subchunkSize: vec3, public numChannels: number,
      key: string) {
    super(key);
开发者ID:stephenplaza,项目名称:neuroglancer,代码行数:31,代码来源:chunk_format.ts

示例3: fragmentShaderTest

  fragmentShaderTest(6, tester => {
    let {gl, builder} = tester;
    const dataType = DataType.UINT32;
    const numComponents = 1;
    const format = new OneDimensionalTextureFormat();
    const layout = new OneDimensionalTextureLayout();
    compute1dTextureFormat(format, dataType, numComponents);

    const data = new Uint32Array(dataLength);
    for (let i = 0; i < data.length; ++i) {
      data[i] = i;
    }

    setLayout(layout, gl, format.texelsPerElement);

    const accessHelper = new OneDimensionalTextureAccessHelper('textureAccess');
    const textureUnitSymbol = Symbol('textureUnit');
    accessHelper.defineShader(builder);
    builder.addUniform('highp float', 'uOffset');
    builder.addUniform('highp vec4', 'uExpected');
    builder.addTextureSampler2D('uSampler', textureUnitSymbol);
    builder.addFragmentCode(
        accessHelper.getAccessor('readValue', 'uSampler', dataType, numComponents));
    builder.addFragmentCode(glsl_unnormalizeUint8);
    builder.addFragmentCode(glsl_uintleToFloat);
    builder.addOutputBuffer('vec4', 'v4f_fragData0', 0);
    builder.addOutputBuffer('vec4', 'v4f_fragData1', 1);
    builder.addOutputBuffer('vec4', 'v4f_fragData2', 2);
    builder.addOutputBuffer('vec4', 'v4f_fragData3', 3);
    builder.addOutputBuffer('vec4', 'v4f_fragData4', 4);
    builder.addOutputBuffer('vec4', 'v4f_fragData5', 5);
    builder.setFragmentMain(`
uint32_t value = readValue(uOffset);
v4f_fragData4 = packFloatIntoVec4(uintleToFloat(value.value.xyz));
v4f_fragData5 = packFloatIntoVec4(all(equal(value.value, uExpected)) ? 1.0 : 0.0);
value.value = unnormalizeUint8(value.value);
v4f_fragData0 = packFloatIntoVec4(value.value.x);
v4f_fragData1 = packFloatIntoVec4(value.value.y);
v4f_fragData2 = packFloatIntoVec4(value.value.z);
v4f_fragData3 = packFloatIntoVec4(value.value.w);
`);

    tester.build();
    let {shader} = tester;
    shader.bind();

    accessHelper.setupTextureLayout(gl, shader, layout);

    const textureUnit = shader.textureUnit(textureUnitSymbol);
    let texture = gl.createTexture();
    tester.registerDisposer(() => {
      gl.deleteTexture(texture);
    });
    gl.bindTexture(gl.TEXTURE_2D, texture);
    setOneDimensionalTextureData(gl, layout, format, data);
    gl.bindTexture(gl.TEXTURE_2D, null);

    function testOffset(x: number) {
      let value = data[x];
      gl.uniform1f(shader.uniform('uOffset'), x);
      gl.uniform4fv(shader.uniform('uExpected'), setVec4FromUint32(new Float32Array(4), value));

      gl.activeTexture(gl.TEXTURE0 + textureUnit);
      gl.bindTexture(gl.TEXTURE_2D, texture);
      tester.execute();
      gl.bindTexture(gl.TEXTURE_2D, null);

      let actual = new Float32Array(4);
      let expected = new Float32Array(4);
      for (let i = 0; i < 4; ++i) {
        actual[i] = tester.readFloat(i);
        expected[i] = (value >>> (8 * i)) & 0xFF;
      }
      for (let i = 0; i < 4; ++i) {
        expect(actual[i]).toBe(
            expected[i],
            `offset = ${x}, value = ${x}, actual = ${Array.from(actual)}, expected = ${
                Array.from(expected)}`);
      }
      expect(tester.readFloat(4))
          .toBe(value, `uint24le value != expected, offset = ${x}, value = ${x}`);
      expect(tester.readFloat(5))
          .toBe(1.0, `uExpected != value in shader, offset = ${x}, value = ${x}`);
    }

    testOffset(255 /*+ 256 * 256 * 9*/);
    for (let i = 0; i < 100; ++i) {
      testOffset(i);
    }

    const COUNT = 100;
    for (let i = 0; i < COUNT; ++i) {
      let offset = Math.floor(Math.random() * data.length);
      testOffset(offset);
    }
  });
开发者ID:stephenplaza,项目名称:neuroglancer,代码行数:96,代码来源:one_dimensional_texture_access.spec.ts


注:本文中的neuroglancer/webgl/one_dimensional_texture_access.compute1dTextureFormat函数示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。