本文整理汇总了TypeScript中@tensorflow/tfjs-core.relu函数的典型用法代码示例。如果您正苦于以下问题:TypeScript relu函数的具体用法?TypeScript relu怎么用?TypeScript relu使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了relu函数的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的TypeScript代码示例。
示例1: residual
export function residual(x: tf.Tensor4D, params: ResidualLayerParams): tf.Tensor4D {
let out = conv(x, params.conv1)
out = convNoRelu(out, params.conv2)
out = tf.add(out, x)
out = tf.relu(out)
return out
}
示例2: residualDown
export function residualDown(x: tf.Tensor4D, params: ResidualLayerParams): tf.Tensor4D {
let out = convDown(x, params.conv1)
out = convNoRelu(out, params.conv2)
let pooled = tf.avgPool(x, 2, 2, 'valid') as tf.Tensor4D
const zeros = tf.zeros<tf.Rank.R4>(pooled.shape)
const isPad = pooled.shape[3] !== out.shape[3]
const isAdjustShape = pooled.shape[1] !== out.shape[1] || pooled.shape[2] !== out.shape[2]
if (isAdjustShape) {
const padShapeX = [...out.shape] as [number, number, number, number]
padShapeX[1] = 1
const zerosW = tf.zeros<tf.Rank.R4>(padShapeX)
out = tf.concat([out, zerosW], 1)
const padShapeY = [...out.shape] as [number, number, number, number]
padShapeY[2] = 1
const zerosH = tf.zeros<tf.Rank.R4>(padShapeY)
out = tf.concat([out, zerosH], 2)
}
pooled = isPad ? tf.concat([pooled, zeros], 3) : pooled
out = tf.add(pooled, out) as tf.Tensor4D
out = tf.relu(out)
return out
}
示例3: getImageTensor
const outTensor = tf.tidy(() => {
const params = this._params
let imgTensor = getImageTensor(netInput)
const [height, width] = imgTensor.shape.slice(1)
imageDimensions = { width, height }
// work with 128 x 128 sized face images
if (imgTensor.shape[1] !== 128 || imgTensor.shape[2] !== 128) {
imgTensor = tf.image.resizeBilinear(imgTensor, [128, 128])
}
let out = conv(imgTensor, params.conv0_params)
out = maxPool(out)
out = conv(out, params.conv1_params)
out = conv(out, params.conv2_params)
out = maxPool(out)
out = conv(out, params.conv3_params)
out = conv(out, params.conv4_params)
out = maxPool(out)
out = conv(out, params.conv5_params)
out = conv(out, params.conv6_params)
out = maxPool(out, [1, 1])
out = conv(out, params.conv7_params)
const fc0 = tf.relu(fullyConnectedLayer(out.as2D(out.shape[0], -1), params.fc0_params))
const fc1 = fullyConnectedLayer(fc0, params.fc1_params)
return fc1
})
示例4:
return tf.tidy(() => {
const out = tf.add(
tf.conv2d(x, params.filters, [1, 1], padding),
params.bias
) as tf.Tensor4D
return withRelu ? tf.relu(out) : out
})
示例5: convLayer
function convLayer(
x: tf.Tensor4D,
params: ConvLayerParams,
strides: [number, number],
withRelu: boolean,
padding: 'valid' | 'same' = 'same'
): tf.Tensor4D {
const { filters, bias } = params.conv
let out = tf.conv2d(x, filters, strides, padding)
out = tf.add(out, bias)
out = scale(out, params.scale)
return withRelu ? tf.relu(out) : out
}
示例6: switch
export let executeOp: OpExecutor = (node: Node, tensorMap: NamedTensorsMap,
context: ExecutionContext):
tfc.Tensor[] => {
switch (node.op) {
case 'abs':
return [tfc.abs(
getParamValue('x', node, tensorMap, context) as tfc.Tensor)];
case 'acos':
return [tfc.acos(
getParamValue('x', node, tensorMap, context) as tfc.Tensor)];
case 'acosh':
return [tfc.acosh(
getParamValue('x', node, tensorMap, context) as tfc.Tensor)];
case 'asin':
return [tfc.asin(
getParamValue('x', node, tensorMap, context) as tfc.Tensor)];
case 'asinh':
return [tfc.asinh(
getParamValue('x', node, tensorMap, context) as tfc.Tensor)];
case 'atan':
return [tfc.atan(
getParamValue('x', node, tensorMap, context) as tfc.Tensor)];
case 'atanh':
return [tfc.atanh(
getParamValue('x', node, tensorMap, context) as tfc.Tensor)];
case 'ceil':
return [tfc.ceil(
getParamValue('x', node, tensorMap, context) as tfc.Tensor)];
case 'cos':
return [tfc.cos(
getParamValue('x', node, tensorMap, context) as tfc.Tensor)];
case 'cosh':
return [tfc.cosh(
getParamValue('x', node, tensorMap, context) as tfc.Tensor)];
case 'elu':
return [tfc.elu(
getParamValue('x', node, tensorMap, context) as tfc.Tensor)];
case 'erf':
return [tfc.erf(
getParamValue('x', node, tensorMap, context) as tfc.Tensor)];
case 'exp':
return [tfc.exp(
getParamValue('x', node, tensorMap, context) as tfc.Tensor)];
case 'expm1': {
return [tfc.expm1(
getParamValue('x', node, tensorMap, context) as tfc.Tensor)];
}
case 'floor':
return [tfc.floor(
getParamValue('x', node, tensorMap, context) as tfc.Tensor)];
case 'log':
return [tfc.log(
getParamValue('x', node, tensorMap, context) as tfc.Tensor)];
case 'log1p': {
return [tfc.log1p(
getParamValue('x', node, tensorMap, context) as tfc.Tensor)];
}
case 'neg':
return [tfc.neg(
getParamValue('x', node, tensorMap, context) as tfc.Tensor)];
case 'reciprocal': {
return [tfc.reciprocal(
getParamValue('x', node, tensorMap, context) as tfc.Tensor)];
}
case 'relu':
return [tfc.relu(
getParamValue('x', node, tensorMap, context) as tfc.Tensor)];
case 'round': {
return [tfc.round(
getParamValue('x', node, tensorMap, context) as tfc.Tensor)];
}
case 'selu':
return [tfc.selu(
getParamValue('x', node, tensorMap, context) as tfc.Tensor)];
case 'sigmoid':
return [tfc.sigmoid(
getParamValue('x', node, tensorMap, context) as tfc.Tensor)];
case 'sin':
return [tfc.sin(
getParamValue('x', node, tensorMap, context) as tfc.Tensor)];
case 'sign': {
return [tfc.sign(
getParamValue('x', node, tensorMap, context) as tfc.Tensor)];
}
case 'sinh': {
return [tfc.sinh(
getParamValue('x', node, tensorMap, context) as tfc.Tensor)];
}
case 'softplus': {
return [tfc.softplus(
getParamValue('x', node, tensorMap, context) as tfc.Tensor)];
}
case 'sqrt': {
return [tfc.sqrt(
getParamValue('x', node, tensorMap, context) as tfc.Tensor)];
}
case 'square': {
return [tfc.square(
getParamValue('x', node, tensorMap, context) as tfc.Tensor)];
}
//.........这里部分代码省略.........