当前位置: 首页>>代码示例>>Java>>正文


Java DataEncryptionKeyFactory类代码示例

本文整理汇总了Java中org.apache.hadoop.hdfs.protocol.datatransfer.sasl.DataEncryptionKeyFactory的典型用法代码示例。如果您正苦于以下问题:Java DataEncryptionKeyFactory类的具体用法?Java DataEncryptionKeyFactory怎么用?Java DataEncryptionKeyFactory使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


DataEncryptionKeyFactory类属于org.apache.hadoop.hdfs.protocol.datatransfer.sasl包,在下文中一共展示了DataEncryptionKeyFactory类的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: connectToDN

import org.apache.hadoop.hdfs.protocol.datatransfer.sasl.DataEncryptionKeyFactory; //导入依赖的package包/类
/**
 * Connect to the given datanode's datantrasfer port, and return
 * the resulting IOStreamPair. This includes encryption wrapping, etc.
 */
public static IOStreamPair connectToDN(DatanodeInfo dn, int timeout,
                                       Configuration conf,
                                       SaslDataTransferClient saslClient,
                                       SocketFactory socketFactory,
                                       boolean connectToDnViaHostname,
                                       DataEncryptionKeyFactory dekFactory,
                                       Token<BlockTokenIdentifier> blockToken)
    throws IOException {

  boolean success = false;
  Socket sock = null;
  try {
    sock = socketFactory.createSocket();
    String dnAddr = dn.getXferAddr(connectToDnViaHostname);
    LOG.debug("Connecting to datanode {}", dnAddr);
    NetUtils.connect(sock, NetUtils.createSocketAddr(dnAddr), timeout);
    sock.setSoTimeout(timeout);

    OutputStream unbufOut = NetUtils.getOutputStream(sock);
    InputStream unbufIn = NetUtils.getInputStream(sock);
    IOStreamPair pair = saslClient.newSocketSend(sock, unbufOut,
        unbufIn, dekFactory, blockToken, dn);

    IOStreamPair result = new IOStreamPair(
        new DataInputStream(pair.in),
        new DataOutputStream(new BufferedOutputStream(pair.out,
            NuCypherExtUtilClient.getSmallBufferSize(conf)))
    );

    success = true;
    return result;
  } finally {
    if (!success) {
      IOUtils.closeSocket(sock);
    }
  }
}
 
开发者ID:nucypher,项目名称:hadoop-oss,代码行数:42,代码来源:NuCypherExtUtilClient.java

示例2: peerFromSocketAndKey

import org.apache.hadoop.hdfs.protocol.datatransfer.sasl.DataEncryptionKeyFactory; //导入依赖的package包/类
public static Peer peerFromSocketAndKey(
    SaslDataTransferClient saslClient, Socket s,
    DataEncryptionKeyFactory keyFactory,
    Token<BlockTokenIdentifier> blockToken, DatanodeID datanodeId)
    throws IOException {
  Peer peer = null;
  boolean success = false;
  try {
    peer = peerFromSocket(s);
    peer = saslClient.peerSend(peer, keyFactory, blockToken, datanodeId);
    success = true;
    return peer;
  } finally {
    if (!success) {
      IOUtilsClient.cleanup(null, peer);
    }
  }
}
 
开发者ID:nucypher,项目名称:hadoop-oss,代码行数:19,代码来源:NuCypherExtUtilClient.java

示例3: peerFromSocketAndKey

import org.apache.hadoop.hdfs.protocol.datatransfer.sasl.DataEncryptionKeyFactory; //导入依赖的package包/类
public static Peer peerFromSocketAndKey(
      SaslDataTransferClient saslClient, Socket s,
      DataEncryptionKeyFactory keyFactory,
      Token<BlockTokenIdentifier> blockToken, DatanodeID datanodeId)
      throws IOException {
  Peer peer = null;
  boolean success = false;
  try {
    peer = peerFromSocket(s);
    peer = saslClient.peerSend(peer, keyFactory, blockToken, datanodeId);
    success = true;
    return peer;
  } finally {
    if (!success) {
      IOUtils.cleanup(null, peer);
    }
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:19,代码来源:TcpPeerServer.java

示例4: peerFromSocketAndKey

import org.apache.hadoop.hdfs.protocol.datatransfer.sasl.DataEncryptionKeyFactory; //导入依赖的package包/类
public static Peer peerFromSocketAndKey(
      SaslDataTransferClient saslClient, Socket s,
      DataEncryptionKeyFactory keyFactory,
      Token<BlockTokenIdentifier> blockToken, DatanodeID datanodeId)
      throws IOException {
  Peer peer = null;
  boolean success = false;
  try {
    peer = peerFromSocket(s);
    peer = saslClient.peerSend(peer, keyFactory, blockToken, datanodeId);
    success = true;
    return peer;
  } finally {
    if (!success) {
      IOUtilsClient.cleanup(null, peer);
    }
  }
}
 
开发者ID:aliyun-beta,项目名称:aliyun-oss-hadoop-fs,代码行数:19,代码来源:DFSUtilClient.java

示例5: getDataEncryptionKeyFactoryForBlock

import org.apache.hadoop.hdfs.protocol.datatransfer.sasl.DataEncryptionKeyFactory; //导入依赖的package包/类
/**
 * Returns a new DataEncryptionKeyFactory that generates a key from the
 * BlockPoolTokenSecretManager, using the block pool ID of the given block.
 *
 * @param block for which the factory needs to create a key
 * @return DataEncryptionKeyFactory for block's block pool ID
 */
DataEncryptionKeyFactory getDataEncryptionKeyFactoryForBlock(
    final ExtendedBlock block) {
  return new DataEncryptionKeyFactory() {
    @Override
    public DataEncryptionKey newDataEncryptionKey() {
      return dnConf.encryptDataTransfer ?
        blockPoolTokenSecretManager.generateDataEncryptionKey(
          block.getBlockPoolId()) : null;
    }
  };
}
 
开发者ID:naver,项目名称:hadoop,代码行数:19,代码来源:DataNode.java

示例6: getDataEncryptionKeyFactoryForBlock

import org.apache.hadoop.hdfs.protocol.datatransfer.sasl.DataEncryptionKeyFactory; //导入依赖的package包/类
/**
 * Returns a new DataEncryptionKeyFactory that generates a key from the
 * BlockPoolTokenSecretManager, using the block pool ID of the given block.
 *
 * @param block for which the factory needs to create a key
 * @return DataEncryptionKeyFactory for block's block pool ID
 */
public DataEncryptionKeyFactory getDataEncryptionKeyFactoryForBlock(
    final ExtendedBlock block) {
  return new DataEncryptionKeyFactory() {
    @Override
    public DataEncryptionKey newDataEncryptionKey() {
      return dnConf.encryptDataTransfer ?
        blockPoolTokenSecretManager.generateDataEncryptionKey(
          block.getBlockPoolId()) : null;
    }
  };
}
 
开发者ID:aliyun-beta,项目名称:aliyun-oss-hadoop-fs,代码行数:19,代码来源:DataNode.java

示例7: initTargetStreams

import org.apache.hadoop.hdfs.protocol.datatransfer.sasl.DataEncryptionKeyFactory; //导入依赖的package包/类
/**
 * Initialize  output/input streams for transferring data to target
 * and send create block request. 
 */
private int initTargetStreams(boolean[] targetsStatus) {
  int nsuccess = 0;
  for (int i = 0; i < targets.length; i++) {
    Socket socket = null;
    DataOutputStream out = null;
    DataInputStream in = null;
    boolean success = false;
    try {
      InetSocketAddress targetAddr = 
          getSocketAddress4Transfer(targets[i]);
      socket = datanode.newSocket();
      NetUtils.connect(socket, targetAddr, 
          datanode.getDnConf().getSocketTimeout());
      socket.setSoTimeout(datanode.getDnConf().getSocketTimeout());

      ExtendedBlock block = getBlock(blockGroup, targetIndices[i]);
      Token<BlockTokenIdentifier> blockToken = 
          datanode.getBlockAccessToken(block,
              EnumSet.of(BlockTokenIdentifier.AccessMode.WRITE));

      long writeTimeout = datanode.getDnConf().getSocketWriteTimeout();
      OutputStream unbufOut = NetUtils.getOutputStream(socket, writeTimeout);
      InputStream unbufIn = NetUtils.getInputStream(socket);
      DataEncryptionKeyFactory keyFactory =
        datanode.getDataEncryptionKeyFactoryForBlock(block);
      IOStreamPair saslStreams = datanode.getSaslClient().socketSend(
          socket, unbufOut, unbufIn, keyFactory, blockToken, targets[i]);

      unbufOut = saslStreams.out;
      unbufIn = saslStreams.in;

      out = new DataOutputStream(new BufferedOutputStream(unbufOut,
          DFSUtilClient.getSmallBufferSize(conf)));
      in = new DataInputStream(unbufIn);

      DatanodeInfo source = new DatanodeInfo(datanode.getDatanodeId());
      new Sender(out).writeBlock(block, targetStorageTypes[i], 
          blockToken, "", new DatanodeInfo[]{targets[i]}, 
          new StorageType[]{targetStorageTypes[i]}, source, 
          BlockConstructionStage.PIPELINE_SETUP_CREATE, 0, 0, 0, 0, 
          checksum, cachingStrategy, false, false, null);

      targetSockets[i] = socket;
      targetOutputStreams[i] = out;
      targetInputStreams[i] = in;
      nsuccess++;
      success = true;
    } catch (Throwable e) {
      LOG.warn(e.getMessage());
    } finally {
      if (!success) {
        IOUtils.closeStream(out);
        IOUtils.closeStream(in);
        IOUtils.closeStream(socket);
      }
    }
    targetsStatus[i] = success;
  }
  return nsuccess;
}
 
开发者ID:aliyun-beta,项目名称:aliyun-oss-hadoop-fs,代码行数:65,代码来源:ErasureCodingWorker.java


注:本文中的org.apache.hadoop.hdfs.protocol.datatransfer.sasl.DataEncryptionKeyFactory类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。