當前位置: 首頁>>代碼示例>>Java>>正文


Java FSDataInputStream.readInt方法代碼示例

本文整理匯總了Java中org.apache.hadoop.fs.FSDataInputStream.readInt方法的典型用法代碼示例。如果您正苦於以下問題:Java FSDataInputStream.readInt方法的具體用法?Java FSDataInputStream.readInt怎麽用?Java FSDataInputStream.readInt使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在org.apache.hadoop.fs.FSDataInputStream的用法示例。


在下文中一共展示了FSDataInputStream.readInt方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。

示例1: getModelFormat

import org.apache.hadoop.fs.FSDataInputStream; //導入方法依賴的package包/類
/**
 * Read model row type
 *
 * @param modelDir model save directory
 * @return row type 0:sparse double, 1:dense double, 2:sparse int, 3:dense int, 4:dense float,
 * 5:sparse float, 7:sparse long key double
 */
public static int getModelFormat(String modelDir) throws IOException {
  Configuration conf = new Configuration();
  Path meteFilePath = new Path(new Path(modelDir), "meta");

  FileSystem fs = meteFilePath.getFileSystem(conf);
  if (!fs.exists(meteFilePath)) {
    throw new IOException("matrix meta file does not exist ");
  }

  FSDataInputStream input = fs.open(meteFilePath);

  try {
    input.readInt();
    input.readUTF();
    return input.readInt();
  } finally {
    input.close();
  }
}
 
開發者ID:Tencent,項目名稱:angel,代碼行數:27,代碼來源:ModelLoader.java

示例2: loadSparseDoublePartition

import org.apache.hadoop.fs.FSDataInputStream; //導入方法依賴的package包/類
private static void loadSparseDoublePartition(SparseDoubleModel model, FSDataInputStream input,
    ModelPartitionMeta partMeta) throws IOException {
  int rowNum = input.readInt();
  int rowId = 0;
  int nnz = 0;
  int totalNNZ = 0;
  Int2DoubleOpenHashMap row = null;
  for (int i = 0; i < rowNum; i++) {
    rowId = input.readInt();
    nnz = input.readInt();
    totalNNZ = (int) (nnz * (model.col) / (partMeta.getEndCol() - partMeta.getStartCol()));
    row = model.getRow(rowId, partMeta.getPartId(), totalNNZ);
    for (int j = 0; j < nnz; j++) {
      row.put(input.readInt(), input.readDouble());
    }
  }
}
 
開發者ID:Tencent,項目名稱:angel,代碼行數:18,代碼來源:ModelLoader.java

示例3: loadSparseFloatPartition

import org.apache.hadoop.fs.FSDataInputStream; //導入方法依賴的package包/類
private static void loadSparseFloatPartition(SparseFloatModel model, FSDataInputStream input,
    ModelPartitionMeta partMeta) throws IOException {
  int rowNum = input.readInt();
  int rowId = 0;
  int nnz = 0;
  int totalNNZ = 0;
  Int2FloatOpenHashMap row = null;
  for (int i = 0; i < rowNum; i++) {
    rowId = input.readInt();
    nnz = input.readInt();
    totalNNZ = (int) (nnz * (model.col) / (partMeta.getEndCol() - partMeta.getStartCol()));
    row = model.getRow(rowId, partMeta.getPartId(), totalNNZ);
    for (int j = 0; j < nnz; j++) {
      row.put(input.readInt(), input.readFloat());
    }
  }
}
 
開發者ID:Tencent,項目名稱:angel,代碼行數:18,代碼來源:ModelLoader.java

示例4: loadSparseIntPartition

import org.apache.hadoop.fs.FSDataInputStream; //導入方法依賴的package包/類
private static void loadSparseIntPartition(SparseIntModel model, FSDataInputStream input,
    ModelPartitionMeta partMeta) throws IOException {
  int rowNum = input.readInt();
  int rowId = 0;
  int nnz = 0;
  int totalNNZ = 0;
  Int2IntOpenHashMap row = null;

  for (int i = 0; i < rowNum; i++) {
    rowId = input.readInt();
    nnz = input.readInt();
    totalNNZ = (int) (nnz * (model.col) / (partMeta.getEndCol() - partMeta.getStartCol()));
    row = model.getRow(rowId, partMeta.getPartId(), totalNNZ);
    for (int j = 0; j < nnz; j++) {
      row.put(input.readInt(), input.readInt());
    }
  }
}
 
開發者ID:Tencent,項目名稱:angel,代碼行數:19,代碼來源:ModelLoader.java

示例5: loadSparseDoubleLongKeyPartition

import org.apache.hadoop.fs.FSDataInputStream; //導入方法依賴的package包/類
private static void loadSparseDoubleLongKeyPartition(SparseDoubleLongKeyModel model,
    FSDataInputStream input, ModelPartitionMeta partMeta) throws IOException {
  int rowNum = input.readInt();
  int rowId = 0;
  int nnz = 0;
  int totalNNZ = 0;
  Long2DoubleOpenHashMap row = null;

  for (int i = 0; i < rowNum; i++) {
    rowId = input.readInt();
    nnz = input.readInt();
    totalNNZ = (int) (nnz * (model.col) / (partMeta.getEndCol() - partMeta.getStartCol()));
    row = model.getRow(rowId, partMeta.getPartId(), totalNNZ);
    for (int j = 0; j < nnz; j++) {
      row.put(input.readLong(), input.readDouble());
    }
  }
}
 
開發者ID:Tencent,項目名稱:angel,代碼行數:19,代碼來源:ModelLoader.java

示例6: deserializeSpatialPartitioner

import org.apache.hadoop.fs.FSDataInputStream; //導入方法依賴的package包/類
private SpatialPartitioner deserializeSpatialPartitioner(
        String spatialPartitionerFilePath) {
    String hdfsPath = getDirectoryId()+spatialPartitionerFilePath;

    FileSystem hdfs = KiteInstance.hdfs();
    try {
        FSDataInputStream block = hdfs.open(new Path(hdfsPath),
                ConstantsAndDefaults.BUFFER_SIZE_BYTES);

        int len = block.readInt();
        byte [] serializedData = new byte[len];

        int read = block.read(serializedData, 0, len);
        if(read <= 0)
            return null;

        return SpatialPartitioner.deserialize(serializedData);
    } catch (IOException e) {
        String errMsg = "Unable to read HDFS path "+hdfsPath;
        errMsg += System.lineSeparator();
        errMsg += "Error: "+e.getMessage();
        KiteInstance.logError(errMsg);
        System.err.println(errMsg);
        return null;
    }
}
 
開發者ID:amrmagdy4,項目名稱:kite,代碼行數:27,代碼來源:DiskSpatialIndexSegment.java

示例7: loadDenseDoublePartition

import org.apache.hadoop.fs.FSDataInputStream; //導入方法依賴的package包/類
private static void loadDenseDoublePartition(DenseDoubleModel model, FSDataInputStream input,
    ModelPartitionMeta partMeta) throws IOException {
  int rowNum = input.readInt();
  int startCol = (int) partMeta.getStartCol();
  int endCol = (int) partMeta.getEndCol();
  int rowId = 0;
  double[] row = null;
  for (int i = 0; i < rowNum; i++) {
    rowId = input.readInt();
    row = model.getRow(rowId);
    for (int j = startCol; j < endCol; j++) {
      row[j] = input.readDouble();
    }
  }
}
 
開發者ID:Tencent,項目名稱:angel,代碼行數:16,代碼來源:ModelLoader.java

示例8: loadSparseDoubleRowFromPartition

import org.apache.hadoop.fs.FSDataInputStream; //導入方法依賴的package包/類
public static Int2DoubleOpenHashMap loadSparseDoubleRowFromPartition(FSDataInputStream input,
    ModelPartitionMeta partMeta, int rowId) throws IOException {
  RowOffset rowOffset = partMeta.getRowMetas().get(rowId);
  input.seek(rowOffset.getOffset());
  Preconditions.checkState (input.readInt() == rowId);
  int num = input.readInt();
  Int2DoubleOpenHashMap row = new Int2DoubleOpenHashMap();
  for (int i = 0; i < num; i++) {
    row.put(input.readInt(), input.readDouble());
  }
  return row;
}
 
開發者ID:Tencent,項目名稱:angel,代碼行數:13,代碼來源:ModelLoader.java

示例9: loadDenseFloatPartition

import org.apache.hadoop.fs.FSDataInputStream; //導入方法依賴的package包/類
private static void loadDenseFloatPartition(DenseFloatModel model, FSDataInputStream input,
    ModelPartitionMeta partMeta) throws IOException {
  int rowNum = input.readInt();
  int startCol = (int) partMeta.getStartCol();
  int endCol = (int) partMeta.getEndCol();
  int rowId = 0;
  float[] row = null;
  for (int i = 0; i < rowNum; i++) {
    rowId = input.readInt();
    row = model.getRow(rowId);
    for (int j = startCol; j < endCol; j++) {
      row[j] = input.readFloat();
    }
  }
}
 
開發者ID:Tencent,項目名稱:angel,代碼行數:16,代碼來源:ModelLoader.java

示例10: loadSparseFloatRowFromPartition

import org.apache.hadoop.fs.FSDataInputStream; //導入方法依賴的package包/類
public static Int2FloatOpenHashMap loadSparseFloatRowFromPartition(FSDataInputStream input,
    ModelPartitionMeta partMeta, int rowId) throws IOException {
  RowOffset rowOffset = partMeta.getRowMetas().get(rowId);
  input.seek(rowOffset.getOffset());
  Preconditions.checkState (input.readInt() == rowId);
  int num = input.readInt();
  Int2FloatOpenHashMap row = new Int2FloatOpenHashMap();
  for (int i = 0; i < num; i++) {
    row.put(input.readInt(), input.readFloat());
  }
  return row;
}
 
開發者ID:Tencent,項目名稱:angel,代碼行數:13,代碼來源:ModelLoader.java

示例11: loadDenseIntPartition

import org.apache.hadoop.fs.FSDataInputStream; //導入方法依賴的package包/類
private static void loadDenseIntPartition(DenseIntModel model, FSDataInputStream input,
    ModelPartitionMeta partMeta) throws IOException {
  int rowNum = input.readInt();
  int startCol = (int) partMeta.getStartCol();
  int endCol = (int) partMeta.getEndCol();
  int rowId = 0;
  int[] row = null;
  for (int i = 0; i < rowNum; i++) {
    rowId = input.readInt();
    row = model.getRow(rowId);
    for (int j = startCol; j < endCol; j++) {
      row[j] = input.readInt();
    }
  }
}
 
開發者ID:Tencent,項目名稱:angel,代碼行數:16,代碼來源:ModelLoader.java

示例12: loadDenseIntRowFromPartition

import org.apache.hadoop.fs.FSDataInputStream; //導入方法依賴的package包/類
public static int[] loadDenseIntRowFromPartition(FSDataInputStream input,
    ModelPartitionMeta partMeta, int rowId)
    throws IOException {
  RowOffset rowOffset = partMeta.getRowMetas().get(rowId);
  input.seek(rowOffset.getOffset());
  Preconditions.checkState (input.readInt() == rowId);
  int num = (int) (partMeta.getEndCol() - partMeta.getStartCol());
  int[] row = new int[num];
  for (int i = 0; i < num; i++) {
    row[i] = input.readInt();
  }
  return row;
}
 
開發者ID:Tencent,項目名稱:angel,代碼行數:14,代碼來源:ModelLoader.java

示例13: loadSparseIntRowFromPartition

import org.apache.hadoop.fs.FSDataInputStream; //導入方法依賴的package包/類
public static Int2IntOpenHashMap loadSparseIntRowFromPartition(FSDataInputStream input,
    ModelPartitionMeta partMeta, int rowId) throws IOException {
  RowOffset rowOffset = partMeta.getRowMetas().get(rowId);
  input.seek(rowOffset.getOffset());
  Preconditions.checkState (input.readInt() == rowId);
  int num = input.readInt();
  Int2IntOpenHashMap row = new Int2IntOpenHashMap();
  for (int i = 0; i < num; i++) {
    row.put(input.readInt(), input.readInt());
  }
  return row;
}
 
開發者ID:Tencent,項目名稱:angel,代碼行數:13,代碼來源:ModelLoader.java

示例14: loadSparseDoubleLongKeyRowFromPartition

import org.apache.hadoop.fs.FSDataInputStream; //導入方法依賴的package包/類
public static Long2DoubleOpenHashMap loadSparseDoubleLongKeyRowFromPartition(
    FSDataInputStream input, ModelPartitionMeta partMeta, int rowId) throws IOException {
  RowOffset rowOffset = partMeta.getRowMetas().get(rowId);
  input.seek(rowOffset.getOffset());
  Preconditions.checkState(input.readInt() == rowId);
  int num = input.readInt();
  Long2DoubleOpenHashMap row = new Long2DoubleOpenHashMap();
  for (int j = 0; j < num; j++) {
    row.put(input.readLong(), input.readDouble());
  }
  return row;
}
 
開發者ID:Tencent,項目名稱:angel,代碼行數:13,代碼來源:ModelLoader.java

示例15: deserialize

import org.apache.hadoop.fs.FSDataInputStream; //導入方法依賴的package包/類
/**
 * read data splits from a input stream
 * 
 * @param inputStream input stream
 * @throws IOException
 */
public void deserialize(FSDataInputStream inputStream) throws IOException, ClassNotFoundException {
  splitNum = inputStream.readInt();
  int size = inputStream.readInt();

  for (int i = 0; i < size; i++) {
    int index = inputStream.readInt();
    SplitClassification split = new SplitClassification();
    split.deserialize(inputStream);
    splitClassifications.put(index, split);
  }
  inputStream.close();
}
 
開發者ID:Tencent,項目名稱:angel,代碼行數:19,代碼來源:DataSpliter.java


注:本文中的org.apache.hadoop.fs.FSDataInputStream.readInt方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。