本文整理汇总了Java中com.esri.arcgis.geodatabase.FeatureClass类的典型用法代码示例。如果您正苦于以下问题:Java FeatureClass类的具体用法?Java FeatureClass怎么用?Java FeatureClass使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
FeatureClass类属于com.esri.arcgis.geodatabase包,在下文中一共展示了FeatureClass类的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: getWkid
import com.esri.arcgis.geodatabase.FeatureClass; //导入依赖的package包/类
protected int getWkid(final FeatureClass featureClass) throws IOException
{
final int wkid;
final ISpatialReference spatialReference = featureClass.getSpatialReference();
try
{
if (spatialReference instanceof ISpatialReferenceAuthority)
{
final ISpatialReferenceAuthority spatialReferenceAuthority = (ISpatialReferenceAuthority) spatialReference;
final int code = spatialReferenceAuthority.getCode();
wkid = code == 0 ? esriSRGeoCSType.esriSRGeoCS_WGS1984 : code;
}
else
{
wkid = esriSRGeoCSType.esriSRGeoCS_WGS1984;
}
}
finally
{
Cleaner.release(spatialReference);
}
return wkid;
}
示例2: doSchema
import com.esri.arcgis.geodatabase.FeatureClass; //导入依赖的package包/类
private void doSchema(
final FSDataOutputStream fsDataOutputStream,
final String namespace,
final FeatureClass featureClass,
final IGPMessages messages) throws IOException
{
final int wkid = getWkid(featureClass);
final JsonFactory jsonFactory = new JsonFactory();
final JsonGenerator g = jsonFactory.createJsonGenerator(fsDataOutputStream);
try
{
g.writeStartObject();
g.writeStringField("type", "record");
g.writeStringField("namespace", namespace);
g.writeStringField("name", featureClass.getName());
g.writeArrayFieldStart("fields");
writeShape(g, namespace, featureClass.getShapeType(), wkid, messages);
writeFields(g, featureClass, messages);
g.writeEndArray();
g.writeEndObject();
}
finally
{
g.close();
}
}
示例3: AccessGdbForAnalysisImpl
import com.esri.arcgis.geodatabase.FeatureClass; //导入依赖的package包/类
public AccessGdbForAnalysisImpl(DBInspector soe) throws AutomationException, IOException {
LOGGER.debug("Creating AccessGdbForAnalysisImpl.");
this.soe = soe;
// Workspace creation
IMapServer3 ms = (IMapServer3) soe.getMapServerDataAccess();
String mapName = ms.getDefaultMapName();
IMapServerDataAccess mapServerDataAccess = soe.getMapServerDataAccess();
Object dataSource= mapServerDataAccess.getDataSource(mapName, 0);
FeatureClass fc = new FeatureClass(dataSource);
Workspace workspace = new Workspace(fc.getWorkspace());
this.workspace = new WorkspaceWrapper();
if (fc.getWorkspace() instanceof SqlWorkspace) {
this.workspace.setSqlWorkspace((SqlWorkspace) fc.getWorkspace());
this.workspace.setWorkspace(workspace);
}
else {
this.workspace.setWorkspace(workspace);
}
}
示例4: doExport
import com.esri.arcgis.geodatabase.FeatureClass; //导入依赖的package包/类
private void doExport(
final IGPValue hadoopPropValue,
final IGPValue featureClassValue,
final IGPValue outputValue,
final IGPMessages messages) throws Exception
{
final IFeatureClass[] featureClasses = new IFeatureClass[]{new IFeatureClassProxy()};
gpUtilities.decodeFeatureLayer(featureClassValue, featureClasses, null);
final FeatureClass featureClass = new FeatureClass(featureClasses[0]);
try
{
final Configuration configuration = createConfiguration(hadoopPropValue.getAsText());
final String namespace = configuration.get("schema.namespace", "com.esri");
final Path path = new Path(outputValue.getAsText());
final FileSystem fileSystem = path.getFileSystem(configuration);
try
{
final FSDataOutputStream fsDataOutputStream = fileSystem.create(path, true);
try
{
doSchema(fsDataOutputStream, namespace, featureClass, messages);
}
finally
{
fsDataOutputStream.close();
}
}
finally
{
fileSystem.close();
}
}
finally
{
Cleaner.release(featureClass);
}
}
示例5: writeFields
import com.esri.arcgis.geodatabase.FeatureClass; //导入依赖的package包/类
private void writeFields(
final JsonGenerator g,
final FeatureClass featureClass,
final IGPMessages messages) throws IOException
{
final IFields fields = featureClass.getFields();
try
{
final int count = fields.getFieldCount();
for (int c = 0; c < count; c++)
{
final IField field = fields.getField(c);
messages.addMessage(String.format("%s %d %d",
field.getName(), field.getType(), field.getLength()));
switch (field.getType())
{
case esriFieldType.esriFieldTypeString:
writeField(g, field, "string");
break;
case esriFieldType.esriFieldTypeDouble:
writeField(g, field, "double");
break;
case esriFieldType.esriFieldTypeSingle:
writeField(g, field, "float");
break;
case esriFieldType.esriFieldTypeInteger:
writeField(g, field, "int");
break;
case esriFieldType.esriFieldTypeSmallInteger:
writeField(g, field, "int");
break;
}
}
}
finally
{
Cleaner.release(fields);
}
}
示例6: doExport
import com.esri.arcgis.geodatabase.FeatureClass; //导入依赖的package包/类
private int doExport(
final IArray parameters,
final IGPMessages messages
) throws Exception
{
final IGPValue hadoopConfValue = gpUtilities.unpackGPValue(parameters.getElement(0));
final IGPValue featureClassValue = gpUtilities.unpackGPValue(parameters.getElement(2));
int count = 0;
final IFeatureClass[] featureClasses = new IFeatureClass[]{new IFeatureClassProxy()};
gpUtilities.decodeFeatureLayer(featureClassValue, featureClasses, null);
final FeatureClass featureClass = new FeatureClass(featureClasses[0]);
try
{
final Configuration configuration = HBaseConfiguration.create(createConfiguration(hadoopConfValue.getAsText()));
createIfDoesNotExist(configuration, featureClass.getName());
final HTableInterface table = new HTable(configuration, featureClass.getName());
try
{
table.setAutoFlush(configuration.getBoolean("exportToHBaseTool.autoFlush", false));
messages.addMessage("autoFlush is " + (table.isAutoFlush() ? "true" : "false"));
table.setWriteBufferSize(configuration.getInt("exportToHBaseTool.writeBufferSize", 2 * 1024 * 1024));
messages.addMessage(String.format("writeBufferSize = %d", table.getWriteBufferSize()));
count = doExport(configuration, messages, featureClass, table);
if (!table.isAutoFlush())
{
table.flushCommits();
}
}
finally
{
table.close();
}
}
finally
{
Cleaner.release(featureClass);
}
return count;
}
示例7: doExport
import com.esri.arcgis.geodatabase.FeatureClass; //导入依赖的package包/类
private int doExport(
final IGPValue hadoopPropValue,
final IGPValue featureClassValue,
final IGPValue schemaValue,
final IGPValue outputValue) throws Exception
{
int count = 0;
final IFeatureClass[] featureClasses = new IFeatureClass[]{new IFeatureClassProxy()};
gpUtilities.decodeFeatureLayer(featureClassValue, featureClasses, null);
final FeatureClass featureClass = new FeatureClass(featureClasses[0]);
try
{
final int wkid = getWkid(featureClass);
final Configuration configuration = createConfiguration(hadoopPropValue.getAsText());
final Schema schema = parseSchema(schemaValue.getAsText(), configuration);
final Path path = new Path(outputValue.getAsText());
path.getFileSystem(configuration).delete(path, true);
final AvroParquetWriter<GenericRecord> writer = new AvroParquetWriter<GenericRecord>(path, schema);
try
{
final IFeatureCursor cursor = featureClass.search(null, false);
try
{
final IFields fields = cursor.getFields();
IFeature feature = cursor.nextFeature();
try
{
while (feature != null)
{
final IGeometry shape = feature.getShape();
if (shape instanceof Point)
{
writer.write(buildRecord(schema, wkid, fields, feature, (Point) shape));
count++;
}
else if (shape instanceof Polyline)
{
writer.write(buildRecord(schema, wkid, fields, feature, (Polyline) shape));
count++;
}
else if (shape instanceof Polygon)
{
writer.write(buildRecord(schema, wkid, fields, feature, (Polygon) shape));
count++;
}
feature = cursor.nextFeature();
}
}
finally
{
Cleaner.release(fields);
}
}
finally
{
Cleaner.release(cursor);
}
}
finally
{
writer.close();
}
}
finally
{
Cleaner.release(featureClass);
}
return count;
}
示例8: AccessGDBImpl
import com.esri.arcgis.geodatabase.FeatureClass; //导入依赖的package包/类
/**
* Creates an AccessObservationGDB object and connects to the DB of the
* ArcGIS MapServer handed over as a parameter.
*
* @throws AutomationException
* @throws IOException
*/
public AccessGDBImpl(SosSoe sos) throws AutomationException, IOException {
LOGGER.info("Creating AccessGDBImpl.");
long start = System.currentTimeMillis();
this.sos = sos;
// Workspace creation
IMapServer3 ms = (IMapServer3) sos.getMapServerDataAccess();
String mapName = ms.getDefaultMapName();
LOGGER.info("Using mapName: "+mapName);
IMapServerDataAccess mapServerDataAccess = sos.getMapServerDataAccess();
LOGGER.info("Using IMapServerDataAccess: "+mapServerDataAccess);
Object dataSource= mapServerDataAccess.getDataSource(mapName, 0);
LOGGER.info("Using dataSource: "+dataSource.getClass());
FeatureClass fc = new FeatureClass(dataSource);
resolveDatabaseName(fc);
Workspace workspace = new Workspace(fc.getWorkspace());
this.workspaceWrapper = new WorkspaceWrapper();
// logConnectionProperties(fc.getWorkspace());
if (fc.getWorkspace() instanceof SqlWorkspace) {
this.workspaceWrapper.setSqlWorkspace((SqlWorkspace) fc.getWorkspace());
this.workspaceWrapper.setWorkspace(workspace);
}
else {
// SqlWorkspaceFactory fac = new SqlWorkspaceFactory();
// SqlWorkspace sqlW = (SqlWorkspace) fac.open(fc.getWorkspace().getConnectionProperties(), fc.getWorkspace().getType());
// workspace = new Workspace(sqlW);
// this.workspaceWrapper.setSqlWorkspace(sqlW);
this.workspaceWrapper.setWorkspace(workspace);
}
LOGGER.info("workspace: "+this.workspaceWrapper.toString());
init("/arcGisSos.properties", sos.getMaximumRecordCount());
long delta = System.currentTimeMillis() - start;
LOGGER.info("End of creating AccessGDBImpl. Created in " + delta/1000 + " seconds.");
}
示例9: resolveDatabaseName
import com.esri.arcgis.geodatabase.FeatureClass; //导入依赖的package包/类
private void resolveDatabaseName(FeatureClass fc) throws IOException {
String name = fc.getName();
int lastIndex = name.lastIndexOf(".");
this.databaseName = name.substring(0, lastIndex).trim();
LOGGER.info("databaseName = "+this.databaseName);
}
示例10: toShapeWriter
import com.esri.arcgis.geodatabase.FeatureClass; //导入依赖的package包/类
private ShapeWriterInterface toShapeWriter(
final Configuration configuration,
final FeatureClass featureClass,
final String rowKeyGenerator,
final IGPMessages messages) throws IOException
{
final ShapeWriterInterface shapeWriter;
final String shapeWriterType = configuration.get("exportToHBaseTool.shapeWriterType", "bytes");
messages.addMessage("shapeWriterType = " + shapeWriterType);
switch (featureClass.getShapeType())
{
case esriShapeType.esriShapePoint:
if ("geohash".equalsIgnoreCase(rowKeyGenerator))
{
shapeWriter = new ShapeWriterNoop();
}
else if ("geojson".equalsIgnoreCase(shapeWriterType))
{
shapeWriter = new PointWriterGeoJSON();
}
else if ("avro".equalsIgnoreCase(shapeWriterType))
{
shapeWriter = new PointWriterAvro(getWkid(featureClass));
}
else if ("bytes".equalsIgnoreCase(shapeWriterType))
{
shapeWriter = new PointWriterBytes();
}
else if ("esri".equalsIgnoreCase(shapeWriterType))
{
shapeWriter = new ShapeWriterEsri();
}
else // noop
{
shapeWriter = new ShapeWriterNoop();
}
break;
default:
if ("esri".equalsIgnoreCase(shapeWriterType))
{
shapeWriter = new ShapeWriterEsri();
}
else
{
// TODO - Polyline and Polygon !
shapeWriter = new ShapeWriterNoop();
}
}
return shapeWriter;
}
示例11: doExport
import com.esri.arcgis.geodatabase.FeatureClass; //导入依赖的package包/类
private Void doExport(
final IArray parameters,
final IGPMessages messages
) throws Exception
{
final IGPValue hadoopConfValue = gpUtilities.unpackGPValue(parameters.getElement(0));
final IGPValue featureClassValue = gpUtilities.unpackGPValue(parameters.getElement(2));
final IFeatureClass[] featureClasses = new IFeatureClass[]{new IFeatureClassProxy()};
gpUtilities.decodeFeatureLayer(featureClassValue, featureClasses, null);
final FeatureClass featureClass = new FeatureClass(featureClasses[0]);
try
{
final Configuration configuration = HBaseConfiguration.create(createConfiguration(hadoopConfValue.getAsText()));
final HBaseAdmin admin = new HBaseAdmin(configuration);
try
{
if (admin.tableExists(featureClass.getName()))
{
admin.disableTable(featureClass.getName());
admin.deleteTable(featureClass.getName());
}
messages.addMessage("Creating HTable '" + featureClass.getName() + "'");
final int maxVersions = configuration.getInt("createHTableTool.maxVersions", 1);
final HTableDescriptor tableDescriptor = new HTableDescriptor(featureClass.getName());
final HColumnDescriptor geomDescriptor = new HColumnDescriptor(Const.GEOM);
geomDescriptor.setMaxVersions(maxVersions);
tableDescriptor.addFamily(geomDescriptor);
final HColumnDescriptor attrDescriptor = new HColumnDescriptor(Const.ATTR);
attrDescriptor.setMaxVersions(maxVersions);
tableDescriptor.addFamily(attrDescriptor);
admin.createTable(tableDescriptor);
}
finally
{
admin.close();
}
}
finally
{
Cleaner.release(featureClass);
}
return null;
}