本文整理汇总了TypeScript中neuroglancer/util/geom.vec3类的典型用法代码示例。如果您正苦于以下问题:TypeScript vec3类的具体用法?TypeScript vec3怎么用?TypeScript vec3使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了vec3类的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的TypeScript代码示例。
示例1: parseStackInfo
function parseStackInfo(obj: any): StackInfo|undefined {
verifyObject(obj);
let state = verifyObjectProperty(obj, 'state', verifyString);
let channels: string[] = [];
let lowerVoxelBound: vec3 = vec3.create();
let upperVoxelBound: vec3 = vec3.create();
if (VALID_STACK_STATES.has(state)) {
let stackStatsObj = verifyObjectProperty(obj, 'stats', verifyObject);
lowerVoxelBound = parseLowerVoxelBounds(stackStatsObj);
upperVoxelBound = parseUpperVoxelBounds(stackStatsObj);
if (stackStatsObj.hasOwnProperty('channelNames')) {
channels = parseChannelNames(stackStatsObj);
}
} else if (PARTIAL_STACK_STATES.has(state)) {
// Stacks in LOADING state will not have a 'stats' object.
// Values will be populated from command arguments in MultiscaleVolumeChunkSource()
} else {
return undefined;
}
let voxelResolution: vec3 = verifyObjectProperty(obj, 'currentVersion', parseStackVersionInfo);
let project: string = verifyObjectProperty(obj, 'stackId', parseStackProject);
return {lowerVoxelBound, upperVoxelBound, voxelResolution, project, channels};
}
示例2: it
it('parseRGBColorSpecification works', () => {
expect(parseRGBColorSpecification('white')).toEqual(vec3.fromValues(1, 1, 1));
expect(parseRGBColorSpecification('black')).toEqual(vec3.fromValues(0, 0, 0));
expect(parseRGBColorSpecification('red')).toEqual(vec3.fromValues(1, 0, 0));
expect(parseRGBColorSpecification('lime')).toEqual(vec3.fromValues(0, 1, 0));
expect(parseRGBColorSpecification('blue')).toEqual(vec3.fromValues(0, 0, 1));
});
示例3: stableStringify
return this.scales.map(scaleInfo => {
return Array
.from(VolumeChunkSpecification.getDefaults({
voxelSize: scaleInfo.resolution,
dataType: this.dataType,
numChannels: this.numChannels,
lowerVoxelBound: scaleInfo.voxelOffset,
upperVoxelBound: vec3.add(vec3.create(), scaleInfo.voxelOffset, scaleInfo.size),
volumeType: this.volumeType,
chunkDataSizes: scaleInfo.chunkSizes,
compressedSegmentationBlockSize: scaleInfo.compressedSegmentationBlockSize
}))
.map(spec => {
let path = `${this.path}/${scaleInfo.key}`;
let cacheKey = stableStringify({
'spec': spec,
'baseUrls': this.baseUrls,
'path': path,
'encoding': scaleInfo.encoding
});
return chunkManager.getChunkSource(
VolumeChunkSource, cacheKey,
() => new VolumeChunkSource(
chunkManager, spec, this.baseUrls, path, scaleInfo.encoding));
});
});
示例4:
return this.scales.map(scaleInfo => {
return VolumeChunkSpecification
.getDefaults({
voxelSize: scaleInfo.resolution,
dataType: this.dataType,
numChannels: this.numChannels,
transform: mat4.fromTranslation(
mat4.create(),
vec3.multiply(vec3.create(), scaleInfo.resolution, scaleInfo.voxelOffset)),
upperVoxelBound: scaleInfo.size,
volumeType: this.volumeType,
chunkDataSizes: scaleInfo.chunkSizes,
baseVoxelOffset: scaleInfo.voxelOffset,
compressedSegmentationBlockSize: scaleInfo.compressedSegmentationBlockSize,
volumeSourceOptions,
})
.map(spec => this.chunkManager.getChunkSource(PrecomputedVolumeChunkSource, {
spec,
parameters: {
'baseUrls': this.baseUrls,
'path': `${this.path}/${scaleInfo.key}`,
'encoding': scaleInfo.encoding
}
}));
});
示例5: constructor
constructor (response: any) {
if (typeof response !== 'object' || Array.isArray(response)) {
throw new Error('Failed to parse volume metadata.');
}
this.resolution = parseFiniteVec(vec3.create(), response['resolution']);
this.voxelOffset = parseIntVec(vec3.create(), response['voxel_offset']);
this.size = parseIntVec(vec3.create(), response['size']);
this.chunkSizes = parseArray(response['chunk_sizes'], x => parseFiniteVec(vec3.create(), x));
if (this.chunkSizes.length === 0) {
throw new Error('No chunk sizes specified.');
}
let encodingStr = response['encoding'];
let encoding = serverChunkEncodings.get(encodingStr);
if (encoding === undefined) {
throw new Error(`Invalid chunk encoding: ${JSON.stringify(encodingStr)}`);
}
this.encoding = encoding;
if (encoding === VolumeChunkEncoding.COMPRESSED_SEGMENTATION) {
this.compressedSegmentationBlockSize = parseIntVec(vec3.create(), response['compressed_segmentation_block_size']);
}
this.key = response['key'];
if (typeof this.key !== 'string') {
throw new Error('No key specified.');
}
}
示例6: withDefaultCompression
/**
* Returns a VolumeChunkSpecification with default compression specified if suitable for the
* volumeType.
*/
static withDefaultCompression(options: VolumeChunkSpecificationDefaultCompressionOptions&
VolumeChunkSpecificationOptions&
VolumeChunkSpecificationVolumeSourceOptions) {
let {
compressedSegmentationBlockSize,
dataType,
voxelSize,
transform,
lowerVoxelBound,
upperVoxelBound
} = options;
transform = getCombinedTransform(transform, options.volumeSourceOptions);
if (compressedSegmentationBlockSize === undefined &&
options.volumeType === VolumeType.SEGMENTATION &&
(dataType === DataType.UINT32 || dataType === DataType.UINT64)) {
compressedSegmentationBlockSize = getNearIsotropicBlockSize({
voxelSize,
transform,
lowerVoxelBound,
upperVoxelBound,
maxVoxelsPerChunkLog2: 9,
maxBlockSize: vec3.min(
vec3.create(), options.chunkDataSize,
options.maxCompressedSegmentationBlockSize || kInfinityVec),
});
}
return new VolumeChunkSpecification(
Object.assign({}, options, {compressedSegmentationBlockSize, transform}));
}
示例7: getSources
getSources(vectorGraphicsSourceOptions: VectorGraphicsSourceOptions) {
const voxelSize = this.stackInfo.voxelResolution;
const chunkSize = vec3.subtract(
vec3.create(), this.stackInfo.upperVoxelBound, this.stackInfo.lowerVoxelBound);
vec3.multiply(chunkSize, chunkSize, voxelSize);
chunkSize[2] = voxelSize[2];
const spec = VectorGraphicsChunkSpecification.make({
voxelSize,
chunkSize,
lowerChunkBound: vec3.fromValues(0, 0, this.stackInfo.lowerVoxelBound[2]),
upperChunkBound: vec3.fromValues(1, 1, this.stackInfo.upperVoxelBound[2]),
vectorGraphicsSourceOptions
});
const source = this.chunkManager.getChunkSource(PointMatchSource, {
spec,
parameters: {
'baseUrls': this.baseUrls,
'owner': this.ownerInfo.owner,
'project': this.stackInfo.project,
'stack': this.stack,
'encoding': 'points',
'matchCollection': this.matchCollection,
'zoffset': this.zoffset
}
});
return [[source]];
}
示例8: getStaticAnnotations
getStaticAnnotations() {
const baseScale = this.scales[0];
const annotationSet =
new AnnotationSource(mat4.fromScaling(mat4.create(), baseScale.resolution));
annotationSet.readonly = true;
annotationSet.add(makeDataBoundsBoundingBox(
baseScale.voxelOffset, vec3.add(vec3.create(), baseScale.voxelOffset, baseScale.size)));
return annotationSet;
}
示例9: constructor
constructor(options: VolumeChunkSpecificationOptions) {
let {
lowerVoxelBound = kZeroVec,
upperVoxelBound,
chunkDataSize,
voxelSize,
transform,
baseVoxelOffset = kZeroVec
} = options;
let {
lowerClipBound = vec3.multiply(vec3.create(), voxelSize, lowerVoxelBound),
upperClipBound = vec3.multiply(vec3.create(), voxelSize, upperVoxelBound)
} = options;
const chunkSize = vec3.multiply(vec3.create(), chunkDataSize, voxelSize);
let lowerChunkBound = vec3.create();
let upperChunkBound = vec3.create();
for (let i = 0; i < 3; ++i) {
lowerChunkBound[i] = Math.floor(lowerVoxelBound[i] / chunkDataSize[i]);
upperChunkBound[i] = Math.floor((upperVoxelBound[i] - 1) / chunkDataSize[i] + 1);
}
super({voxelSize, transform, lowerChunkBound, upperChunkBound, chunkSize});
this.baseVoxelOffset = baseVoxelOffset;
this.lowerClipBound = lowerClipBound;
this.upperClipBound = upperClipBound;
this.lowerVoxelBound = lowerVoxelBound;
this.upperVoxelBound = upperVoxelBound;
this.chunkDataSize = chunkDataSize;
let dataType = this.dataType = options.dataType;
let numChannels = this.numChannels = options.numChannels;
this.chunkBytes = prod3(chunkDataSize) * DATA_TYPE_BYTES[dataType] * numChannels;
this.compressedSegmentationBlockSize = options.compressedSegmentationBlockSize;
}