本文整理汇总了Java中org.apache.hadoop.hive.metastore.api.EnvironmentContext类的典型用法代码示例。如果您正苦于以下问题:Java EnvironmentContext类的具体用法?Java EnvironmentContext怎么用?Java EnvironmentContext使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
EnvironmentContext类属于org.apache.hadoop.hive.metastore.api包,在下文中一共展示了EnvironmentContext类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: alter_partition_with_environment_context
import org.apache.hadoop.hive.metastore.api.EnvironmentContext; //导入依赖的package包/类
/**
* {@inheritDoc}
*/
@Override
public void alter_partition_with_environment_context(
final String dbName,
final String tblName,
final Partition newPart,
@Nullable final EnvironmentContext ec
) throws TException {
final String databaseName = normalizeIdentifier(dbName);
final String tableName = normalizeIdentifier(tblName);
requestWrapper("alter_partition_with_environment_context", new Object[]{databaseName, tableName, ec},
() -> {
addPartitionsCore(dbName, tableName, ImmutableList.of(newPart), false);
return null;
});
}
示例2: alter_table_with_environment_context
import org.apache.hadoop.hive.metastore.api.EnvironmentContext; //导入依赖的package包/类
/**
* {@inheritDoc}
*/
@Override
public void alter_table_with_environment_context(
final String dbname,
final String tblName,
final Table newTbl,
@Nullable final EnvironmentContext environmentContext
) throws TException {
requestWrapper("alter_table_with_environment_context",
new Object[]{dbname, tblName, newTbl, environmentContext}, () -> {
final String databaseName = normalizeIdentifier(dbname);
final String tableName = normalizeIdentifier(tblName);
final QualifiedName oldName = QualifiedName.ofTable(catalogName, databaseName, tableName);
final QualifiedName newName = QualifiedName
.ofTable(catalogName, newTbl.getDbName(), newTbl.getTableName());
final TableDto dto = hiveConverters.hiveToMetacatTable(newName, newTbl);
if (!oldName.equals(newName)) {
v1.renameTable(catalogName, oldName.getDatabaseName(), oldName.getTableName(),
newName.getTableName());
}
v1.updateTable(catalogName, dbname, newName.getTableName(), dto);
return null;
});
}
示例3: append_partition_with_environment_context
import org.apache.hadoop.hive.metastore.api.EnvironmentContext; //导入依赖的package包/类
/**
* {@inheritDoc}
*/
@Override
public Partition append_partition_with_environment_context(
final String dbName,
final String tblName,
final List<String> partVals,
@Nullable final EnvironmentContext environmentContext
) throws TException {
return requestWrapper("append_partition_by_name_with_environment_context",
new Object[]{dbName, tblName, partVals}, () -> {
final TableDto tableDto = getTableDto(dbName, tblName);
final String partName = hiveConverters.getNameFromPartVals(tableDto, partVals);
appendPartitionsCore(dbName, tblName, partName);
return hiveConverters.metacatToHivePartition(getPartitionDtoByName(tableDto, partName), tableDto);
});
}
示例4: create_table_with_environment_context
import org.apache.hadoop.hive.metastore.api.EnvironmentContext; //导入依赖的package包/类
/**
* {@inheritDoc}
*/
@Override
public void create_table_with_environment_context(
final Table tbl,
@Nullable final EnvironmentContext environmentContext
) throws TException {
requestWrapper("create_table_with_environment_context", new Object[]{tbl, environmentContext}, () -> {
final String dbname = normalizeIdentifier(tbl.getDbName());
final String tblName = normalizeIdentifier(tbl.getTableName());
final QualifiedName name = QualifiedName.ofTable(catalogName, dbname, tblName);
final TableDto dto = hiveConverters.hiveToMetacatTable(name, tbl);
v1.createTable(catalogName, dbname, tblName, dto);
return null;
});
}
示例5: drop_partition_by_name_with_environment_context
import org.apache.hadoop.hive.metastore.api.EnvironmentContext; //导入依赖的package包/类
/**
* {@inheritDoc}
*/
@Override
public boolean drop_partition_by_name_with_environment_context(
final String dbName, final String tblName,
final String partName,
final boolean deleteData,
@Nullable final EnvironmentContext environmentContext
) throws TException {
return requestWrapper("drop_partition_by_name_with_environment_context",
new Object[]{dbName, tblName, partName, deleteData, environmentContext}, () -> {
final String databaseName = normalizeIdentifier(dbName);
final String tableName = normalizeIdentifier(tblName);
if (deleteData) {
log.warn("Ignoring command to delete data for {}/{}/{}/{}",
catalogName, databaseName, tableName, partName);
}
partV1.deletePartitions(catalogName, databaseName, tableName, ImmutableList.of(partName));
return true;
});
}
示例6: drop_table_with_environment_context
import org.apache.hadoop.hive.metastore.api.EnvironmentContext; //导入依赖的package包/类
/**
* {@inheritDoc}
*/
@Override
public void drop_table_with_environment_context(final String dbname, final String name,
final boolean deleteData,
@Nullable final EnvironmentContext ec) throws TException {
requestWrapper("drop_table_with_environment_context", new Object[]{dbname, name, deleteData, ec}, () -> {
final String databaseName = normalizeIdentifier(dbname);
final String tableName = normalizeIdentifier(name);
if (deleteData) {
log.warn("Ignoring command to delete data for {}/{}/{}", catalogName, databaseName, tableName);
}
return v1.deleteTable(catalogName, databaseName, tableName);
});
}
示例7: get_fields_with_environment_context
import org.apache.hadoop.hive.metastore.api.EnvironmentContext; //导入依赖的package包/类
/**
* {@inheritDoc}
*/
@Override
public List<FieldSchema> get_fields_with_environment_context(
final String dbName,
final String tableName,
@Nullable final EnvironmentContext environmentContext
) throws TException {
return requestWrapper("get_fields_with_environment_context",
new Object[]{dbName, tableName, environmentContext}, () -> {
final Table table = get_table(dbName, tableName);
if (table == null || table.getSd() == null || table.getSd().getCols() == null) {
throw new MetaException("Unable to get fields for " + dbName + "." + tableName);
}
return table.getSd().getCols();
});
}
示例8: dropTable
import org.apache.hadoop.hive.metastore.api.EnvironmentContext; //导入依赖的package包/类
public void dropTable(String dbname, String name, boolean deleteData,
boolean ignoreUnknownTab, EnvironmentContext envContext) throws MetaException, TException,
NoSuchObjectException, UnsupportedOperationException {
Table tbl;
try {
tbl = getTable(dbname, name);
} catch (NoSuchObjectException e) {
if (!ignoreUnknownTab) {
throw e;
}
return;
}
if (isIndexTable(tbl)) {
throw new UnsupportedOperationException("Cannot drop index tables");
}
client.drop_table_with_environment_context(dbname, name, deleteData, envContext);
}
示例9: create_table_with_environment_context
import org.apache.hadoop.hive.metastore.api.EnvironmentContext; //导入依赖的package包/类
@Test
public void create_table_with_environment_context()
throws AlreadyExistsException, InvalidObjectException, MetaException, NoSuchObjectException, TException {
EnvironmentContext environmentContext = new EnvironmentContext();
Table table = new Table();
table.setDbName(DB_P);
Table inboundTable = new Table();
inboundTable.setDbName("inbound");
when(primaryMapping.transformInboundTable(table)).thenReturn(inboundTable);
handler.create_table_with_environment_context(table, environmentContext);
verify(primaryMapping).checkWritePermissions(DB_P);
verify(primaryClient).create_table_with_environment_context(inboundTable, environmentContext);
}
示例10: drop_table_with_environment_context
import org.apache.hadoop.hive.metastore.api.EnvironmentContext; //导入依赖的package包/类
@Test
public void drop_table_with_environment_context() throws NoSuchObjectException, MetaException, TException {
EnvironmentContext environmentContext = new EnvironmentContext();
when(primaryMapping.transformInboundDatabaseName(DB_P)).thenReturn("inbound");
handler.drop_table_with_environment_context(DB_P, "table", false, environmentContext);
verify(primaryMapping).checkWritePermissions(DB_P);
verify(primaryClient).drop_table_with_environment_context("inbound", "table", false, environmentContext);
}
示例11: alter_table_with_environment_context
import org.apache.hadoop.hive.metastore.api.EnvironmentContext; //导入依赖的package包/类
@Test
public void alter_table_with_environment_context() throws InvalidOperationException, MetaException, TException {
EnvironmentContext environmentContext = new EnvironmentContext();
Table table = new Table();
table.setDbName(DB_P);
Table inbound = new Table();
when(primaryMapping.transformInboundDatabaseName(DB_P)).thenReturn("inbound");
when(primaryMapping.transformInboundTable(table)).thenReturn(inbound);
handler.alter_table_with_environment_context(DB_P, "table", table, environmentContext);
verify(primaryMapping, times(2)).checkWritePermissions(DB_P);
verify(primaryClient).alter_table_with_environment_context("inbound", "table", inbound, environmentContext);
}
示例12: add_partition_with_environment_context
import org.apache.hadoop.hive.metastore.api.EnvironmentContext; //导入依赖的package包/类
@Test
public void add_partition_with_environment_context()
throws InvalidObjectException, AlreadyExistsException, MetaException, TException {
EnvironmentContext environmentContext = new EnvironmentContext();
Partition newPartition = new Partition();
newPartition.setDbName(DB_P);
Partition inbound = new Partition();
Partition outbound = new Partition();
when(primaryMapping.transformInboundPartition(newPartition)).thenReturn(inbound);
when(primaryClient.add_partition_with_environment_context(inbound, environmentContext)).thenReturn(inbound);
when(primaryMapping.transformOutboundPartition(inbound)).thenReturn(outbound);
Partition result = handler.add_partition_with_environment_context(newPartition, environmentContext);
assertThat(result, is(outbound));
verify(primaryMapping).checkWritePermissions(DB_P);
}
示例13: append_partition_with_environment_context
import org.apache.hadoop.hive.metastore.api.EnvironmentContext; //导入依赖的package包/类
@Test
public void append_partition_with_environment_context()
throws InvalidObjectException, AlreadyExistsException, MetaException, TException {
EnvironmentContext environmentContext = new EnvironmentContext();
Partition inbound = new Partition();
Partition outbound = new Partition();
List<String> partVals = Lists.newArrayList();
when(primaryMapping.transformInboundDatabaseName(DB_P)).thenReturn("inbound");
when(primaryClient.append_partition_with_environment_context("inbound", "table1", partVals, environmentContext))
.thenReturn(inbound);
when(primaryMapping.transformOutboundPartition(inbound)).thenReturn(outbound);
Partition result = handler.append_partition_with_environment_context(DB_P, "table1", partVals, environmentContext);
assertThat(result, is(outbound));
verify(primaryMapping).checkWritePermissions(DB_P);
}
示例14: append_partition_by_name_with_environment_context
import org.apache.hadoop.hive.metastore.api.EnvironmentContext; //导入依赖的package包/类
@Test
public void append_partition_by_name_with_environment_context()
throws InvalidObjectException, AlreadyExistsException, MetaException, TException {
EnvironmentContext environmentContext = new EnvironmentContext();
Partition inbound = new Partition();
Partition outbound = new Partition();
when(primaryMapping.transformInboundDatabaseName(DB_P)).thenReturn("inbound");
when(primaryClient.append_partition_by_name_with_environment_context("inbound", "table1", "partName",
environmentContext)).thenReturn(inbound);
when(primaryMapping.transformOutboundPartition(inbound)).thenReturn(outbound);
Partition result = handler.append_partition_by_name_with_environment_context(DB_P, "table1", "partName",
environmentContext);
assertThat(result, is(outbound));
verify(primaryMapping).checkWritePermissions(DB_P);
}
示例15: drop_partition_with_environment_context
import org.apache.hadoop.hive.metastore.api.EnvironmentContext; //导入依赖的package包/类
@Test
public void drop_partition_with_environment_context() throws NoSuchObjectException, MetaException, TException {
EnvironmentContext environmentContext = new EnvironmentContext();
List<String> partVals = Lists.newArrayList();
when(primaryMapping.transformInboundDatabaseName(DB_P)).thenReturn("inbound");
when(
primaryClient.drop_partition_with_environment_context("inbound", "table1", partVals, false, environmentContext))
.thenReturn(true);
boolean result = handler.drop_partition_with_environment_context(DB_P, "table1", partVals, false,
environmentContext);
assertThat(result, is(true));
verify(primaryMapping).checkWritePermissions(DB_P);
}