当前位置: 首页>>代码示例>>Java>>正文


Java FSDataInputStream.readInt方法代码示例

本文整理汇总了Java中org.apache.hadoop.fs.FSDataInputStream.readInt方法的典型用法代码示例。如果您正苦于以下问题:Java FSDataInputStream.readInt方法的具体用法?Java FSDataInputStream.readInt怎么用?Java FSDataInputStream.readInt使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.hadoop.fs.FSDataInputStream的用法示例。


在下文中一共展示了FSDataInputStream.readInt方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: getModelFormat

import org.apache.hadoop.fs.FSDataInputStream; //导入方法依赖的package包/类
/**
 * Read model row type
 *
 * @param modelDir model save directory
 * @return row type 0:sparse double, 1:dense double, 2:sparse int, 3:dense int, 4:dense float,
 * 5:sparse float, 7:sparse long key double
 */
public static int getModelFormat(String modelDir) throws IOException {
  Configuration conf = new Configuration();
  Path meteFilePath = new Path(new Path(modelDir), "meta");

  FileSystem fs = meteFilePath.getFileSystem(conf);
  if (!fs.exists(meteFilePath)) {
    throw new IOException("matrix meta file does not exist ");
  }

  FSDataInputStream input = fs.open(meteFilePath);

  try {
    input.readInt();
    input.readUTF();
    return input.readInt();
  } finally {
    input.close();
  }
}
 
开发者ID:Tencent,项目名称:angel,代码行数:27,代码来源:ModelLoader.java

示例2: loadSparseDoublePartition

import org.apache.hadoop.fs.FSDataInputStream; //导入方法依赖的package包/类
private static void loadSparseDoublePartition(SparseDoubleModel model, FSDataInputStream input,
    ModelPartitionMeta partMeta) throws IOException {
  int rowNum = input.readInt();
  int rowId = 0;
  int nnz = 0;
  int totalNNZ = 0;
  Int2DoubleOpenHashMap row = null;
  for (int i = 0; i < rowNum; i++) {
    rowId = input.readInt();
    nnz = input.readInt();
    totalNNZ = (int) (nnz * (model.col) / (partMeta.getEndCol() - partMeta.getStartCol()));
    row = model.getRow(rowId, partMeta.getPartId(), totalNNZ);
    for (int j = 0; j < nnz; j++) {
      row.put(input.readInt(), input.readDouble());
    }
  }
}
 
开发者ID:Tencent,项目名称:angel,代码行数:18,代码来源:ModelLoader.java

示例3: loadSparseFloatPartition

import org.apache.hadoop.fs.FSDataInputStream; //导入方法依赖的package包/类
private static void loadSparseFloatPartition(SparseFloatModel model, FSDataInputStream input,
    ModelPartitionMeta partMeta) throws IOException {
  int rowNum = input.readInt();
  int rowId = 0;
  int nnz = 0;
  int totalNNZ = 0;
  Int2FloatOpenHashMap row = null;
  for (int i = 0; i < rowNum; i++) {
    rowId = input.readInt();
    nnz = input.readInt();
    totalNNZ = (int) (nnz * (model.col) / (partMeta.getEndCol() - partMeta.getStartCol()));
    row = model.getRow(rowId, partMeta.getPartId(), totalNNZ);
    for (int j = 0; j < nnz; j++) {
      row.put(input.readInt(), input.readFloat());
    }
  }
}
 
开发者ID:Tencent,项目名称:angel,代码行数:18,代码来源:ModelLoader.java

示例4: loadSparseIntPartition

import org.apache.hadoop.fs.FSDataInputStream; //导入方法依赖的package包/类
private static void loadSparseIntPartition(SparseIntModel model, FSDataInputStream input,
    ModelPartitionMeta partMeta) throws IOException {
  int rowNum = input.readInt();
  int rowId = 0;
  int nnz = 0;
  int totalNNZ = 0;
  Int2IntOpenHashMap row = null;

  for (int i = 0; i < rowNum; i++) {
    rowId = input.readInt();
    nnz = input.readInt();
    totalNNZ = (int) (nnz * (model.col) / (partMeta.getEndCol() - partMeta.getStartCol()));
    row = model.getRow(rowId, partMeta.getPartId(), totalNNZ);
    for (int j = 0; j < nnz; j++) {
      row.put(input.readInt(), input.readInt());
    }
  }
}
 
开发者ID:Tencent,项目名称:angel,代码行数:19,代码来源:ModelLoader.java

示例5: loadSparseDoubleLongKeyPartition

import org.apache.hadoop.fs.FSDataInputStream; //导入方法依赖的package包/类
private static void loadSparseDoubleLongKeyPartition(SparseDoubleLongKeyModel model,
    FSDataInputStream input, ModelPartitionMeta partMeta) throws IOException {
  int rowNum = input.readInt();
  int rowId = 0;
  int nnz = 0;
  int totalNNZ = 0;
  Long2DoubleOpenHashMap row = null;

  for (int i = 0; i < rowNum; i++) {
    rowId = input.readInt();
    nnz = input.readInt();
    totalNNZ = (int) (nnz * (model.col) / (partMeta.getEndCol() - partMeta.getStartCol()));
    row = model.getRow(rowId, partMeta.getPartId(), totalNNZ);
    for (int j = 0; j < nnz; j++) {
      row.put(input.readLong(), input.readDouble());
    }
  }
}
 
开发者ID:Tencent,项目名称:angel,代码行数:19,代码来源:ModelLoader.java

示例6: deserializeSpatialPartitioner

import org.apache.hadoop.fs.FSDataInputStream; //导入方法依赖的package包/类
private SpatialPartitioner deserializeSpatialPartitioner(
        String spatialPartitionerFilePath) {
    String hdfsPath = getDirectoryId()+spatialPartitionerFilePath;

    FileSystem hdfs = KiteInstance.hdfs();
    try {
        FSDataInputStream block = hdfs.open(new Path(hdfsPath),
                ConstantsAndDefaults.BUFFER_SIZE_BYTES);

        int len = block.readInt();
        byte [] serializedData = new byte[len];

        int read = block.read(serializedData, 0, len);
        if(read <= 0)
            return null;

        return SpatialPartitioner.deserialize(serializedData);
    } catch (IOException e) {
        String errMsg = "Unable to read HDFS path "+hdfsPath;
        errMsg += System.lineSeparator();
        errMsg += "Error: "+e.getMessage();
        KiteInstance.logError(errMsg);
        System.err.println(errMsg);
        return null;
    }
}
 
开发者ID:amrmagdy4,项目名称:kite,代码行数:27,代码来源:DiskSpatialIndexSegment.java

示例7: loadDenseDoublePartition

import org.apache.hadoop.fs.FSDataInputStream; //导入方法依赖的package包/类
private static void loadDenseDoublePartition(DenseDoubleModel model, FSDataInputStream input,
    ModelPartitionMeta partMeta) throws IOException {
  int rowNum = input.readInt();
  int startCol = (int) partMeta.getStartCol();
  int endCol = (int) partMeta.getEndCol();
  int rowId = 0;
  double[] row = null;
  for (int i = 0; i < rowNum; i++) {
    rowId = input.readInt();
    row = model.getRow(rowId);
    for (int j = startCol; j < endCol; j++) {
      row[j] = input.readDouble();
    }
  }
}
 
开发者ID:Tencent,项目名称:angel,代码行数:16,代码来源:ModelLoader.java

示例8: loadSparseDoubleRowFromPartition

import org.apache.hadoop.fs.FSDataInputStream; //导入方法依赖的package包/类
public static Int2DoubleOpenHashMap loadSparseDoubleRowFromPartition(FSDataInputStream input,
    ModelPartitionMeta partMeta, int rowId) throws IOException {
  RowOffset rowOffset = partMeta.getRowMetas().get(rowId);
  input.seek(rowOffset.getOffset());
  Preconditions.checkState (input.readInt() == rowId);
  int num = input.readInt();
  Int2DoubleOpenHashMap row = new Int2DoubleOpenHashMap();
  for (int i = 0; i < num; i++) {
    row.put(input.readInt(), input.readDouble());
  }
  return row;
}
 
开发者ID:Tencent,项目名称:angel,代码行数:13,代码来源:ModelLoader.java

示例9: loadDenseFloatPartition

import org.apache.hadoop.fs.FSDataInputStream; //导入方法依赖的package包/类
private static void loadDenseFloatPartition(DenseFloatModel model, FSDataInputStream input,
    ModelPartitionMeta partMeta) throws IOException {
  int rowNum = input.readInt();
  int startCol = (int) partMeta.getStartCol();
  int endCol = (int) partMeta.getEndCol();
  int rowId = 0;
  float[] row = null;
  for (int i = 0; i < rowNum; i++) {
    rowId = input.readInt();
    row = model.getRow(rowId);
    for (int j = startCol; j < endCol; j++) {
      row[j] = input.readFloat();
    }
  }
}
 
开发者ID:Tencent,项目名称:angel,代码行数:16,代码来源:ModelLoader.java

示例10: loadSparseFloatRowFromPartition

import org.apache.hadoop.fs.FSDataInputStream; //导入方法依赖的package包/类
public static Int2FloatOpenHashMap loadSparseFloatRowFromPartition(FSDataInputStream input,
    ModelPartitionMeta partMeta, int rowId) throws IOException {
  RowOffset rowOffset = partMeta.getRowMetas().get(rowId);
  input.seek(rowOffset.getOffset());
  Preconditions.checkState (input.readInt() == rowId);
  int num = input.readInt();
  Int2FloatOpenHashMap row = new Int2FloatOpenHashMap();
  for (int i = 0; i < num; i++) {
    row.put(input.readInt(), input.readFloat());
  }
  return row;
}
 
开发者ID:Tencent,项目名称:angel,代码行数:13,代码来源:ModelLoader.java

示例11: loadDenseIntPartition

import org.apache.hadoop.fs.FSDataInputStream; //导入方法依赖的package包/类
private static void loadDenseIntPartition(DenseIntModel model, FSDataInputStream input,
    ModelPartitionMeta partMeta) throws IOException {
  int rowNum = input.readInt();
  int startCol = (int) partMeta.getStartCol();
  int endCol = (int) partMeta.getEndCol();
  int rowId = 0;
  int[] row = null;
  for (int i = 0; i < rowNum; i++) {
    rowId = input.readInt();
    row = model.getRow(rowId);
    for (int j = startCol; j < endCol; j++) {
      row[j] = input.readInt();
    }
  }
}
 
开发者ID:Tencent,项目名称:angel,代码行数:16,代码来源:ModelLoader.java

示例12: loadDenseIntRowFromPartition

import org.apache.hadoop.fs.FSDataInputStream; //导入方法依赖的package包/类
public static int[] loadDenseIntRowFromPartition(FSDataInputStream input,
    ModelPartitionMeta partMeta, int rowId)
    throws IOException {
  RowOffset rowOffset = partMeta.getRowMetas().get(rowId);
  input.seek(rowOffset.getOffset());
  Preconditions.checkState (input.readInt() == rowId);
  int num = (int) (partMeta.getEndCol() - partMeta.getStartCol());
  int[] row = new int[num];
  for (int i = 0; i < num; i++) {
    row[i] = input.readInt();
  }
  return row;
}
 
开发者ID:Tencent,项目名称:angel,代码行数:14,代码来源:ModelLoader.java

示例13: loadSparseIntRowFromPartition

import org.apache.hadoop.fs.FSDataInputStream; //导入方法依赖的package包/类
public static Int2IntOpenHashMap loadSparseIntRowFromPartition(FSDataInputStream input,
    ModelPartitionMeta partMeta, int rowId) throws IOException {
  RowOffset rowOffset = partMeta.getRowMetas().get(rowId);
  input.seek(rowOffset.getOffset());
  Preconditions.checkState (input.readInt() == rowId);
  int num = input.readInt();
  Int2IntOpenHashMap row = new Int2IntOpenHashMap();
  for (int i = 0; i < num; i++) {
    row.put(input.readInt(), input.readInt());
  }
  return row;
}
 
开发者ID:Tencent,项目名称:angel,代码行数:13,代码来源:ModelLoader.java

示例14: loadSparseDoubleLongKeyRowFromPartition

import org.apache.hadoop.fs.FSDataInputStream; //导入方法依赖的package包/类
public static Long2DoubleOpenHashMap loadSparseDoubleLongKeyRowFromPartition(
    FSDataInputStream input, ModelPartitionMeta partMeta, int rowId) throws IOException {
  RowOffset rowOffset = partMeta.getRowMetas().get(rowId);
  input.seek(rowOffset.getOffset());
  Preconditions.checkState(input.readInt() == rowId);
  int num = input.readInt();
  Long2DoubleOpenHashMap row = new Long2DoubleOpenHashMap();
  for (int j = 0; j < num; j++) {
    row.put(input.readLong(), input.readDouble());
  }
  return row;
}
 
开发者ID:Tencent,项目名称:angel,代码行数:13,代码来源:ModelLoader.java

示例15: deserialize

import org.apache.hadoop.fs.FSDataInputStream; //导入方法依赖的package包/类
/**
 * read data splits from a input stream
 * 
 * @param inputStream input stream
 * @throws IOException
 */
public void deserialize(FSDataInputStream inputStream) throws IOException, ClassNotFoundException {
  splitNum = inputStream.readInt();
  int size = inputStream.readInt();

  for (int i = 0; i < size; i++) {
    int index = inputStream.readInt();
    SplitClassification split = new SplitClassification();
    split.deserialize(inputStream);
    splitClassifications.put(index, split);
  }
  inputStream.close();
}
 
开发者ID:Tencent,项目名称:angel,代码行数:19,代码来源:DataSpliter.java


注:本文中的org.apache.hadoop.fs.FSDataInputStream.readInt方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。