本文整理汇总了Java中org.apache.hadoop.hbase.HTableDescriptor.hasFamily方法的典型用法代码示例。如果您正苦于以下问题:Java HTableDescriptor.hasFamily方法的具体用法?Java HTableDescriptor.hasFamily怎么用?Java HTableDescriptor.hasFamily使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.hbase.HTableDescriptor
的用法示例。
在下文中一共展示了HTableDescriptor.hasFamily方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: modifyColumn
import org.apache.hadoop.hbase.HTableDescriptor; //导入方法依赖的package包/类
/**
* Modify Column of a table
* @param tableName
* @param hcd HColumnDesciptor
* @return Modified HTableDescriptor with the column modified.
* @throws IOException
*/
public HTableDescriptor modifyColumn(TableName tableName, HColumnDescriptor hcd)
throws IOException {
LOG.info("AddModifyColumn. Table = " + tableName
+ " HCD = " + hcd.toString());
HTableDescriptor htd = this.services.getTableDescriptors().get(tableName);
byte [] familyName = hcd.getName();
if(!htd.hasFamily(familyName)) {
throw new InvalidFamilyOperationException("Family '" +
Bytes.toString(familyName) + "' doesn't exists so cannot be modified");
}
htd.modifyFamily(hcd);
this.services.getTableDescriptors().add(htd);
return htd;
}
示例2: updateTableDescriptor
import org.apache.hadoop.hbase.HTableDescriptor; //导入方法依赖的package包/类
/**
* Add the column family to the file system
*/
private void updateTableDescriptor(final MasterProcedureEnv env) throws IOException {
// Update table descriptor
LOG.info("AddColumn. Table = " + tableName + " HCD = " + cfDescriptor.toString());
HTableDescriptor htd = env.getMasterServices().getTableDescriptors().get(tableName);
if (htd.hasFamily(cfDescriptor.getName())) {
// It is possible to reach this situation, as we could already add the column family
// to table descriptor, but the master failover happens before we complete this state.
// We should be able to handle running this function multiple times without causing problem.
return;
}
htd.addFamily(cfDescriptor);
env.getMasterServices().getTableDescriptors().add(htd);
}
示例3: updateTableDescriptor
import org.apache.hadoop.hbase.HTableDescriptor; //导入方法依赖的package包/类
/**
* Remove the column family from the file system and update the table descriptor
*/
private void updateTableDescriptor(final MasterProcedureEnv env) throws IOException {
// Update table descriptor
LOG.info("DeleteColumn. Table = " + tableName + " family = " + getColumnFamilyName());
HTableDescriptor htd = env.getMasterServices().getTableDescriptors().get(tableName);
if (!htd.hasFamily(familyName)) {
// It is possible to reach this situation, as we could already delete the column family
// from table descriptor, but the master failover happens before we complete this state.
// We should be able to handle running this function multiple times without causing problem.
return;
}
htd.removeFamily(familyName);
env.getMasterServices().getTableDescriptors().add(htd);
}
示例4: hasColumnFamily
import org.apache.hadoop.hbase.HTableDescriptor; //导入方法依赖的package包/类
byte [] hasColumnFamily(final HTableDescriptor htd, final byte [] cf)
throws InvalidFamilyOperationException {
if (!htd.hasFamily(cf)) {
throw new InvalidFamilyOperationException("Column family '" +
Bytes.toString(cf) + "' does not exist");
}
return cf;
}
示例5: restoreTableDescriptor
import org.apache.hadoop.hbase.HTableDescriptor; //导入方法依赖的package包/类
/**
* Restore the table descriptor back to pre-add
* @param env MasterProcedureEnv
* @throws IOException
**/
private void restoreTableDescriptor(final MasterProcedureEnv env) throws IOException {
HTableDescriptor htd = env.getMasterServices().getTableDescriptors().get(tableName);
if (htd.hasFamily(cfDescriptor.getName())) {
// Remove the column family from file system and update the table descriptor to
// the before-add-column-family-state
MasterDDLOperationHelper.deleteColumnFamilyFromFileSystem(env, tableName,
getRegionInfoList(env), cfDescriptor.getName());
env.getMasterServices().getTableDescriptors().add(unmodifiedHTableDescriptor);
// Make sure regions are opened after table descriptor is updated.
reOpenAllRegionsIfTableIsOnline(env);
}
}
示例6: init
import org.apache.hadoop.hbase.HTableDescriptor; //导入方法依赖的package包/类
@SuppressWarnings("deprecation")
private Store init(String methodName, Configuration conf, HTableDescriptor htd,
HColumnDescriptor hcd) throws IOException {
//Setting up a Store
Path basedir = new Path(DIR+methodName);
Path tableDir = FSUtils.getTableDir(basedir, htd.getTableName());
final Path logdir = new Path(basedir, DefaultWALProvider.getWALDirectoryName(methodName));
FileSystem fs = FileSystem.get(conf);
fs.delete(logdir, true);
if (htd.hasFamily(hcd.getName())) {
htd.modifyFamily(hcd);
} else {
htd.addFamily(hcd);
}
HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false);
final Configuration walConf = new Configuration(conf);
FSUtils.setRootDir(walConf, basedir);
final WALFactory wals = new WALFactory(walConf, null, methodName);
HRegion region = new HRegion(tableDir, wals.getWAL(info.getEncodedNameAsBytes()), fs, conf,
info, htd, null);
store = new HStore(region, hcd, conf);
return store;
}
示例7: hasFamily
import org.apache.hadoop.hbase.HTableDescriptor; //导入方法依赖的package包/类
private boolean hasFamily(HColumnDescriptor family, TableName table) throws IOException {
byte[] familyName = family.getName();
HTableDescriptor tableDescriptor = admin.getTableDescriptor(table);
return tableDescriptor.hasFamily(familyName);
}