本文整理匯總了TypeScript中neuroglancer/util/geom.vec3.fromValues方法的典型用法代碼示例。如果您正苦於以下問題:TypeScript vec3.fromValues方法的具體用法?TypeScript vec3.fromValues怎麽用?TypeScript vec3.fromValues使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類neuroglancer/util/geom.vec3
的用法示例。
在下文中一共展示了vec3.fromValues方法的10個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的TypeScript代碼示例。
示例1: getSources
getSources(vectorGraphicsSourceOptions: VectorGraphicsSourceOptions) {
const voxelSize = this.stackInfo.voxelResolution;
const chunkSize = vec3.subtract(
vec3.create(), this.stackInfo.upperVoxelBound, this.stackInfo.lowerVoxelBound);
vec3.multiply(chunkSize, chunkSize, voxelSize);
chunkSize[2] = voxelSize[2];
const spec = VectorGraphicsChunkSpecification.make({
voxelSize,
chunkSize,
lowerChunkBound: vec3.fromValues(0, 0, this.stackInfo.lowerVoxelBound[2]),
upperChunkBound: vec3.fromValues(1, 1, this.stackInfo.upperVoxelBound[2]),
vectorGraphicsSourceOptions
});
const source = this.chunkManager.getChunkSource(PointMatchSource, {
spec,
parameters: {
'baseUrls': this.baseUrls,
'owner': this.ownerInfo.owner,
'project': this.stackInfo.project,
'stack': this.stack,
'encoding': 'points',
'matchCollection': this.matchCollection,
'zoffset': this.zoffset
}
});
return [[source]];
}
示例2: it
it('parseRGBColorSpecification works', () => {
expect(parseRGBColorSpecification('white')).toEqual(vec3.fromValues(1, 1, 1));
expect(parseRGBColorSpecification('black')).toEqual(vec3.fromValues(0, 0, 0));
expect(parseRGBColorSpecification('red')).toEqual(vec3.fromValues(1, 0, 0));
expect(parseRGBColorSpecification('lime')).toEqual(vec3.fromValues(0, 1, 0));
expect(parseRGBColorSpecification('blue')).toEqual(vec3.fromValues(0, 0, 1));
});
示例3: Error
.then(header => {
let dataTypeInfo = DATA_TYPE_CONVERSIONS.get(header.datatypeCode);
if (dataTypeInfo === undefined) {
throw new Error(
`Unsupported data type: ${NiftiDataType[header.datatypeCode] ||
header.datatypeCode}.`);
}
if (header.dims[4] !== 1) {
throw new Error(`Time series data not supported.`);
}
const spatialUnits = header.xyzt_units & NIFTI1.SPATIAL_UNITS_MASK;
let unitsPerNm = 1;
switch (spatialUnits) {
case NIFTI1.UNITS_METER:
unitsPerNm = 1e9;
break;
case NIFTI1.UNITS_MM:
unitsPerNm = 1e6;
break;
case NIFTI1.UNITS_MICRON:
unitsPerNm = 1e3;
break;
}
const {quatern_b, quatern_c, quatern_d} = header;
const quatern_a = Math.sqrt(
1.0 - quatern_b * quatern_b - quatern_c * quatern_c - quatern_d * quatern_d);
const qfac = header.pixDims[0] === -1 ? -1 : 1;
let info: NiftiVolumeInfo = {
description: header.description,
affine: convertAffine(header.affine),
dataType: dataTypeInfo.dataType,
numChannels: header.dims[5],
volumeType: dataTypeInfo.volumeType,
voxelSize: vec3.fromValues(
unitsPerNm * header.pixDims[1], unitsPerNm * header.pixDims[2],
unitsPerNm * header.pixDims[3]),
volumeSize: vec3.fromValues(header.dims[1], header.dims[2], header.dims[3]),
qoffset: vec3.fromValues(
unitsPerNm * header.qoffset_x, unitsPerNm * header.qoffset_y,
unitsPerNm * header.qoffset_z),
qform_code: header.qform_code,
sform_code: header.sform_code,
qfac: qfac,
quatern: quat.fromValues(quatern_b, quatern_c, quatern_d, quatern_a),
};
return {value: info};
});
示例4: parsePositionString
export function parsePositionString(s: string): vec3|undefined {
const match = s.match(
/^[\[\]{}()\s,]*(\d+(?:\.\d+)?)[,\s]+(\d+(?:\.\d+)?)[,\s]+(\d+(?:\.\d+)?)[\[\]{}()\s,]*$/);
if (match !== null) {
return vec3.fromValues(parseFloat(match[1]), parseFloat(match[2]), parseFloat(match[3]));
}
return undefined;
}
示例5: it
it('getMultiscaleChunksToDraw simple', () => {
const manifest: MultiscaleMeshManifest = {
chunkShape: vec3.fromValues(10, 20, 30),
chunkGridSpatialOrigin: vec3.fromValues(5, 6, -50),
clipLowerBound: vec3.fromValues(20, 23, -50),
clipUpperBound: vec3.fromValues(40, 45, -20),
lodScales: [20, 40],
chunkCoordinates: Uint32Array.from([
0, 0, 0, //
]),
};
const viewportWidth = 640;
const viewportHeight = 480;
const modelViewProjection =
mat4.perspective(mat4.create(), Math.PI / 2, viewportWidth / viewportHeight, 5, 100);
expect(getChunkList(
manifest, modelViewProjection, /*detailCutoff=*/ 1000, viewportWidth, viewportHeight))
.toEqual([{
lod: 1,
renderScale: 960,
beginIndex: 0,
endIndex: 1,
}]);
expect(getChunkList(
manifest, modelViewProjection, /*detailCutoff=*/ 800, viewportWidth, viewportHeight))
.toEqual([
{
lod: 1,
renderScale: 960,
beginIndex: 0,
endIndex: 1,
},
{
lod: 0,
renderScale: 480,
beginIndex: 0,
endIndex: 1,
}
]);
});
示例6: it
it('works on basic cases', () => {
expect(parsePositionString('10 2 3')).toEqual(vec3.fromValues(10, 2, 3));
expect(parsePositionString('[1 2 3')).toEqual(vec3.fromValues(1, 2, 3));
expect(parsePositionString('[1, 2, 3,')).toEqual(vec3.fromValues(1, 2, 3));
expect(parsePositionString('[1, 2, 3]')).toEqual(vec3.fromValues(1, 2, 3));
expect(parsePositionString('1.2 2.4 3')).toEqual(vec3.fromValues(1.2, 2.4, 3));
expect(parsePositionString('{200, 400, 500}')).toEqual(vec3.fromValues(200, 400, 500));
});
示例7: it
it('getNearIsotropicBlockSize', () => {
expect(
getNearIsotropicBlockSize({voxelSize: vec3.fromValues(1, 1, 1), maxVoxelsPerChunkLog2: 18}))
.toEqual(vec3.fromValues(64, 64, 64));
expect(
getNearIsotropicBlockSize({voxelSize: vec3.fromValues(2, 1, 1), maxVoxelsPerChunkLog2: 17}))
.toEqual(vec3.fromValues(32, 64, 64));
expect(
getNearIsotropicBlockSize({voxelSize: vec3.fromValues(3, 3, 30), maxVoxelsPerChunkLog2: 9}))
.toEqual(vec3.fromValues(16, 16, 2));
expect(getNearIsotropicBlockSize({
voxelSize: vec3.fromValues(3, 3, 30),
upperVoxelBound: vec3.fromValues(1, 128, 128),
maxVoxelsPerChunkLog2: 8
})).toEqual(vec3.fromValues(1, 64, 4));
});
示例8: describe
describe('encodeChannels', () => {
it('basic 1-channel 1-block', () => {
const blockSize = [2, 2, 1];
const input = Uint32Array.of(
4, 4, 4, 4 //
);
const volumeSize = [2, 2, 1, 1];
const output = new Uint32ArrayBuilder();
encodeChannels(output, blockSize, input, volumeSize);
expect(output.view)
.toEqual(Uint32Array.of(
1, //
2, 2, 4 //
));
});
for (let blockSize of [vec3.fromValues(2, 2, 2), vec3.fromValues(8, 4, 1), ]) {
for (let volumeSize of [ //
[1, 2, 1, 1], //
[1, 2, 1, 3], //
[2, 2, 2, 1], //
[2, 2, 2, 3], //
[4, 4, 5, 3], //
]) {
it(`round trip ${volumeSize.join(',')} with blockSize ${vec3Key(blockSize)}`, () => {
const numPossibleValues = 15;
const input = makeRandomUint32Array(prod4(volumeSize), numPossibleValues);
const output = new Uint32ArrayBuilder();
encodeChannels(output, blockSize, input, volumeSize);
const decoded = new Uint32Array(input.length);
decodeChannels(decoded, output.view, 0, volumeSize, blockSize);
expect(decoded).toEqual(input);
});
}
}
});
示例9: getSources
getSources(volumeSourceOptions: VolumeSourceOptions) {
let sources: VolumeChunkSource[][] = [];
let numLevels = this.numLevels;
if (numLevels === undefined) {
numLevels = computeStackHierarchy(this.stackInfo, this.dims[0]);
}
for (let level = 0; level < numLevels; level++) {
let voxelSize = vec3.clone(this.stackInfo.voxelResolution);
let chunkDataSize = vec3.fromValues(1, 1, 1);
// tiles are NxMx1
for (let i = 0; i < 2; ++i) {
voxelSize[i] = voxelSize[i] * Math.pow(2, level);
chunkDataSize[i] = this.dims[i];
}
let lowerVoxelBound = vec3.create(), upperVoxelBound = vec3.create();
for (let i = 0; i < 3; i++) {
lowerVoxelBound[i] = Math.floor(
this.stackInfo.lowerVoxelBound[i] * (this.stackInfo.voxelResolution[i] / voxelSize[i]));
upperVoxelBound[i] = Math.ceil(
this.stackInfo.upperVoxelBound[i] * (this.stackInfo.voxelResolution[i] / voxelSize[i]));
}
let spec = VolumeChunkSpecification.make({
voxelSize,
chunkDataSize,
numChannels: this.numChannels,
dataType: this.dataType, lowerVoxelBound, upperVoxelBound, volumeSourceOptions,
});
let source = TileChunkSource.get(this.chunkManager, spec, {
'baseUrls': this.baseUrls,
'owner': this.ownerInfo.owner,
'project': this.stackInfo.project,
'stack': this.stack,
'encoding': this.encoding,
'level': level,
'dims': `${this.dims[0]}_${this.dims[1]}`,
});
sources.push([source]);
}
return sources;
}
示例10: decodeChunk
function decodeChunk(chunk: VolumeChunk, response: ArrayBuffer, encoding: VolumeChunkEncoding) {
const dv = new DataView(response);
const mode = dv.getUint16(0, /*littleEndian=*/ false);
if (mode !== 0) {
throw new Error(`Unsupported mode: ${mode}.`);
}
const numDimensions = dv.getUint16(2, /*littleEndian=*/ false);
if (numDimensions !== 3) {
throw new Error(`Number of dimensions must be 3.`);
}
let offset = 4;
const shape = new Uint32Array(numDimensions);
for (let i = 0; i < numDimensions; ++i) {
shape[i] = dv.getUint32(offset, /*littleEndian=*/ false);
offset += 4;
}
chunk.chunkDataSize = vec3.fromValues(shape[0], shape[1], shape[2]);
let buffer = new Uint8Array(response, offset);
if (encoding === VolumeChunkEncoding.GZIP) {
buffer = inflate(buffer);
}
decodeRawChunk(chunk, buffer.buffer, Endianness.BIG, buffer.byteOffset, buffer.byteLength);
}