本文整理匯總了Java中org.joda.time.Interval.parse方法的典型用法代碼示例。如果您正苦於以下問題:Java Interval.parse方法的具體用法?Java Interval.parse怎麽用?Java Interval.parse使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類org.joda.time.Interval
的用法示例。
在下文中一共展示了Interval.parse方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: fromFile
import org.joda.time.Interval; //導入方法依賴的package包/類
public static ForecastErrorsAnalyzerParameters fromFile(Path file) throws FileNotFoundException, IOException {
Properties properties = new Properties();
try (InputStream input = new FileInputStream(file.toFile())) {
properties.load(input);
return new ForecastErrorsAnalyzerParameters(
Interval.parse(properties.getProperty("histoInterval")),
properties.getProperty("feAnalysisId"),
Double.parseDouble(properties.getProperty("ir")),
Integer.parseInt(properties.getProperty("flagPQ")),
Integer.parseInt(properties.getProperty("method")),
Integer.parseInt(properties.getProperty("nClusters")),
Double.parseDouble(properties.getProperty("percentileHistorical")),
Integer.parseInt(properties.getProperty("modalityGaussian")),
Integer.parseInt(properties.getProperty("outliers")),
Integer.parseInt(properties.getProperty("conditionalSampling")),
Integer.parseInt((properties.getProperty("nSamples") != null) ? properties.getProperty("nSamples") : "-1")
);
}
}
示例2: run
import org.joda.time.Interval; //導入方法依賴的package包/類
@Override
public void run(CommandLine line, ToolRunningContext context) throws Exception {
Interval interval = Interval.parse(line.getOptionValue("interval"));
HistoDbHorizon horizon = HistoDbHorizon.SN;
if (line.hasOption("horizon")) {
horizon = HistoDbHorizon.valueOf(line.getOptionValue("horizon"));
}
OfflineConfig config = OfflineConfig.load();
try (HistoDbClient histoDbClient = config.getHistoDbClientFactoryClass().newInstance().create(true)) {
Set<HistoDbAttributeId> attributeIds = new LinkedHashSet<>(histoDbClient.listAttributes());
HistoDbStats stats = histoDbClient.queryStats(attributeIds, interval, horizon, true);
for (HistoDbAttributeId attributeId : attributeIds) {
context.getOutputStream().println(attributeId + ";" + (int) stats.getValue(HistoDbStatsType.COUNT, attributeId, -1));
}
}
}
示例3: load
import org.joda.time.Interval; //導入方法依賴的package包/類
public static ForecastErrorsAnalysisParameters load() {
ModuleConfig config = PlatformConfig.defaultConfig().getModuleConfig("fea-parameters");
DateTime baseCaseDate = DateTime.parse(config.getStringProperty("baseCaseDate"));
Interval histoInterval = Interval.parse(config.getStringProperty("histoInterval"));
String feAnalysisId = config.getStringProperty("feAnalysisId");
double ir = config.getDoubleProperty("ir");
Integer flagPQ = config.getIntProperty("flagPQ");
Integer method = config.getIntProperty("method");
Integer nClusters = config.getIntProperty("nClusters");
double percentileHistorical = config.getDoubleProperty("percentileHistorical");
Integer modalityGaussian = config.getOptionalIntegerProperty("modalityGaussian").orElse(null);
Integer outliers = config.getOptionalIntegerProperty("outliers").orElse(null);
Integer conditionalSampling = config.getOptionalIntegerProperty("conditionalSampling").orElse(null);
Integer nSamples = config.getIntProperty("nSamples");
Set<Country> countries = config.getEnumSetProperty("countries", Country.class, DEFAULT_COUNTRIES);
CaseType caseType = config.getEnumProperty("caseType", CaseType.class, DEFAULT_CASE_TYPE);
return new ForecastErrorsAnalysisParameters(baseCaseDate, histoInterval, feAnalysisId, ir, flagPQ, method, nClusters, percentileHistorical,
modalityGaussian, outliers, conditionalSampling, nSamples, countries, caseType);
}
示例4: testWrite
import org.joda.time.Interval; //導入方法依賴的package包/類
@Test
public void testWrite() throws Exception {
Network network = NetworkTest1Factory.create();
Interval histoInterval = Interval.parse("2013-01-01T00:00:00+01:00/2013-01-31T23:59:00+01:00");
UncertaintiesAnalyser uncertaintiesAnalyser = new UncertaintiesAnalyserTestImpl(network);
Uncertainties uncertainties = uncertaintiesAnalyser.analyse(histoInterval).join();
MemDataSource dataSource = new MemDataSource();
StringToIntMapper<AmplSubset> mapper = new StringToIntMapper<>(AmplSubset.class);
AmplUtil.fillMapper(mapper, network);
new UncertaintiesAmplWriter(uncertainties, dataSource, mapper).write();
String fileContent = String.join(System.lineSeparator(),
"#Reduction matrix",
"#\"inj. type\" \"inj. num\" \"var. num\" \"coeff.\"",
"L 1 1 1.00000",
"G 1 2 1.00000");
assertEquals(fileContent, new String(dataSource.getData(WCAConstants.REDUCTION_MATRIX_FILE_SUFFIX, WCAConstants.TXT_EXT), StandardCharsets.UTF_8).trim());
fileContent = String.join(System.lineSeparator(),
"#Trust intervals",
"#\"var. num\" \"min\" \"max\"",
"1 -1.00000 1.00000",
"2 -90.0000 90.0000");
assertEquals(fileContent, new String(dataSource.getData(WCAConstants.TRUST_INTERVAL_FILE_SUFFIX, WCAConstants.TXT_EXT), StandardCharsets.UTF_8).trim());
fileContent = String.join(System.lineSeparator(),
"#Means",
"#\"inj. type\" \"inj. num\" \"mean\"",
"L 1 10.0000",
"G 1 900.000");
assertEquals(fileContent, new String(dataSource.getData(WCAConstants.MEANS_FILE_SUFFIX, WCAConstants.TXT_EXT), StandardCharsets.UTF_8).trim());
}
示例5: run
import org.joda.time.Interval; //導入方法依賴的package包/類
@Override
public void run(CommandLine line, ToolRunningContext context) throws Exception {
Path caseFile = Paths.get(line.getOptionValue("case-file"));
Interval interval = Interval.parse(line.getOptionValue("interval"));
Path dictFile = null;
if (line.hasOption("use-short-ids-dict")) {
dictFile = Paths.get(line.getOptionValue("use-short-ids-dict"));
}
double correlationThreshold = Double.parseDouble(line.getOptionValue("correlation-threshold"));
double probabilityThreshold = Double.parseDouble(line.getOptionValue("probability-threshold"));
Network network = Importers.loadNetwork(caseFile);
if (network == null) {
throw new RuntimeException("Case '" + caseFile + "' not found");
}
network.getStateManager().allowStateMultiThreadAccess(true);
OfflineConfig config = OfflineConfig.load();
try (TopologyMiner topologyMiner = config.getTopologyMinerFactoryClass().newInstance().create()) {
Path topoCacheDir = TopologyContext.createTopoCacheDir(network, interval, correlationThreshold, probabilityThreshold);
TopologyContext topologyContext = topologyMiner.loadContext(topoCacheDir, interval, correlationThreshold, probabilityThreshold);
if (topologyContext == null) {
throw new RuntimeException("Topology context not found");
}
ShortIdDictionary dict = null;
if (dictFile != null) {
dict = new ShortIdDictionary(dictFile);
}
new UniqueTopologyBuilder(topologyContext.getTopologyHistory(), dict)
.build(network);
}
}
示例6: run
import org.joda.time.Interval; //導入方法依賴的package包/類
@Override
public void run(CommandLine line, ToolRunningContext context) throws Exception {
Path caseFile = Paths.get(line.getOptionValue("case-file"));
String substationId = line.getOptionValue("substation-id");
Interval interval = Interval.parse(line.getOptionValue("interval"));
Path dictFile = null;
if (line.hasOption("use-short-ids-dict")) {
dictFile = Paths.get(line.getOptionValue("use-short-ids-dict"));
}
double correlationThreshold = Double.parseDouble(line.getOptionValue("correlation-threshold"));
double probabilityThreshold = Double.parseDouble(line.getOptionValue("probability-threshold"));
Network network = Importers.loadNetwork(caseFile);
if (network == null) {
throw new RuntimeException("Case '" + caseFile + "' not found");
}
network.getStateManager().allowStateMultiThreadAccess(true);
OfflineConfig config = OfflineConfig.load();
try (TopologyMiner topologyMiner = config.getTopologyMinerFactoryClass().newInstance().create()) {
Path topoCacheDir = TopologyContext.createTopoCacheDir(network, interval, correlationThreshold, probabilityThreshold);
TopologyContext topologyContext = topologyMiner.loadContext(topoCacheDir, interval, correlationThreshold, probabilityThreshold);
Map<String, UniqueTopology> uniqueTopologies = new UniqueTopologyBuilder(topologyContext.getTopologyHistory()).build();
UniqueTopology uniqueTopology = uniqueTopologies.get(substationId);
if (uniqueTopology == null) {
throw new RuntimeException("Unique topology not found for substation " + substationId);
}
ShortIdDictionary dict = null;
if (dictFile != null) {
dict = new ShortIdDictionary(dictFile);
}
uniqueTopology.print(context.getOutputStream(), dict);
}
}
示例7: run
import org.joda.time.Interval; //導入方法依賴的package包/類
@Override
public void run(CommandLine line, ToolRunningContext context) throws Exception {
OfflineConfig config = OfflineConfig.load();
try (HistoDbClient histoDbClient = config.getHistoDbClientFactoryClass().newInstance().create()) {
boolean statistics = line.hasOption("statistics");
Set<HistoDbAttributeId> attrs = new LinkedHashSet<>();
if (!statistics && line.hasOption("add-datetime")) {
attrs.add(HistoDbMetaAttributeId.datetime);
}
for (String str : line.getOptionValue("attributes").split(",")) {
attrs.add(HistoDbAttributeIdParser.parse(str));
}
Interval interval = Interval.parse(line.getOptionValue("interval"));
boolean format = line.hasOption("format");
HistoDbHorizon horizon = HistoDbHorizon.SN;
if (line.hasOption("horizon")) {
horizon = HistoDbHorizon.valueOf(line.getOptionValue("horizon"));
}
boolean async = false;
boolean zipped = false;
InputStream is = histoDbClient.queryCsv(statistics ? HistoQueryType.stats : HistoQueryType.data, attrs, interval, horizon, zipped, async);
if (format) {
format(is, zipped, context.getOutputStream());
} else {
try (Reader reader = createReader(is, zipped)) {
CharStreams.copy(reader, context.getOutputStream());
}
}
}
}
示例8: convert
import org.joda.time.Interval; //導入方法依賴的package包/類
@Override
public Interval convert(String value) {
// Interval.parse(null) creates an interval with both start and end times set to now.
// Do something a little more reasonable.
if (value == null) {
throw new NullPointerException();
}
Interval interval = Interval.parse(value);
// Interval does not have a way to set the time zone, so create a new interval with the
// start and end times of the parsed interval converted to UTC.
return new Interval(
interval.getStart().withZone(DateTimeZone.UTC),
interval.getEnd().withZone(DateTimeZone.UTC));
}
示例9: testmethods
import org.joda.time.Interval; //導入方法依賴的package包/類
@Test
public void testmethods() throws IOException, InterruptedException {
Interval interval = Interval.parse("2013-01-14T00:00:00+01:00/2013-01-14T01:00:00+01:00");
HistoDbClient histoDbClient = new HistoDbClientTestFactoryImpl().create(false);
assertEquals(histoDbClient.getDbName(), "");
assertTrue(histoDbClient.listDbs().size() == 0);
assertTrue(histoDbClient.queryCount(interval, HistoDbHorizon.SN) == 0);
assertEquals(histoDbClient.listAttributes(), Collections.emptyList());
assertEquals(IOUtils.toString(histoDbClient.queryCsv(HistoQueryType.data, Collections.emptySet(), Collections.emptySet(), Collections.emptySet(), interval, HistoDbHorizon.SN, true, true), StandardCharsets.UTF_8), "");
assertEquals(IOUtils.toString(histoDbClient.queryCsv(HistoQueryType.data, Collections.emptySet(), interval, HistoDbHorizon.SN, true, true), StandardCharsets.UTF_8), "");
assertNotNull(histoDbClient.queryStats(Collections.emptySet(), Collections.emptySet(), Collections.emptySet(), interval, HistoDbHorizon.SN, true));
assertNotNull(histoDbClient.queryStats(Collections.emptySet(), interval, HistoDbHorizon.SN, true));
}
示例10: unmarshal
import org.joda.time.Interval; //導入方法依賴的package包/類
@Override
public Interval unmarshal(String v) throws Exception {
return Interval.parse(v);
}
示例11: getIntervalProperty
import org.joda.time.Interval; //導入方法依賴的package包/類
@Override
public Interval getIntervalProperty(String name) {
return Interval.parse(getStringProperty(name));
}
示例12: IntervalConstant
import org.joda.time.Interval; //導入方法依賴的package包/類
public IntervalConstant(String value) {
super(Interval.parse(value));
}
示例13: parse
import org.joda.time.Interval; //導入方法依賴的package包/類
public static TimeInterval parse(String value) {
return new TimeInterval(Interval.parse(value));
}
示例14: loadDefault
import org.joda.time.Interval; //導入方法依賴的package包/類
public static OnlineWorkflowParameters loadDefault() {
ModuleConfig config = PlatformConfig.defaultConfig().getModuleConfig("online-default-parameters");
int states = config.getIntProperty("states");
String offlineWorkflowId = config.getStringProperty("offlineWorkflowId", null);
TimeHorizon timeHorizon = TimeHorizon.fromName(config.getStringProperty("timeHorizon").trim());
Interval histoInterval = Interval.parse(config.getStringProperty("histoInterval"));
String feAnalysisId = config.getStringProperty("feAnalysisId");
double rulesPurityThreshold = Double.parseDouble(config.getStringProperty("rulesPurityThreshold"));
boolean storeStates = config.getBooleanProperty("storeStates", false);
boolean analyseBasecase = config.getBooleanProperty("analyseBasecase", true);
boolean validation = config.getBooleanProperty("validation", false);
Set<SecurityIndexType> securityIndexes = config.getEnumSetProperty("securityIndexes", SecurityIndexType.class, null);
boolean mergeOptimized = config.getBooleanProperty("mergeOptimized", DEFAULT_MERGE_OPTIMIZED);
float limitReduction = config.getFloatProperty("limitReduction", DEFAULT_LIMIT_REDUCTION);
boolean handleViolationsInN = config.getBooleanProperty("handleViolationsInN", DEFAULT_HANDLE_VIOLATIONS_IN_N);
float constraintMargin = config.getFloatProperty("constraintMargin", DEFAULT_CONSTRAINT_MARGIN);
String caseFile = config.getStringProperty("caseFile", null);
if (caseFile != null) {
if ((config.getStringProperty("baseCaseDate", null) != null)
|| (config.getStringProperty("caseType", null) != null)
|| (config.getStringProperty("countries", null) != null)) {
throw new RuntimeException("caseFile and ( baseCaseDate, caseType, countries ) are mutually exclusive options");
}
return new OnlineWorkflowParameters(states,
histoInterval,
offlineWorkflowId,
timeHorizon,
feAnalysisId,
rulesPurityThreshold,
storeStates,
analyseBasecase,
validation,
securityIndexes,
mergeOptimized,
limitReduction,
handleViolationsInN,
constraintMargin,
caseFile);
}
DateTime baseCaseDate = DateTime.parse(config.getStringProperty("baseCaseDate"));
CaseType caseType = config.getEnumProperty("caseType", CaseType.class);
Set<Country> countries = config.getEnumSetProperty("countries", Country.class);
return new OnlineWorkflowParameters(baseCaseDate,
states,
histoInterval,
offlineWorkflowId,
timeHorizon,
feAnalysisId,
rulesPurityThreshold,
storeStates,
analyseBasecase,
validation,
securityIndexes,
caseType,
countries,
mergeOptimized,
limitReduction,
handleViolationsInN,
constraintMargin);
}
示例15: testHistoricalDataToCsvFile
import org.joda.time.Interval; //導入方法依賴的package包/類
@Test
public void testHistoricalDataToCsvFile() throws Exception {
List<String> generatorsIds = Arrays.asList("generator1", "generator2");
List<String> loadsIds = Arrays.asList("load1", "load2", "load3");
Interval histoInterval = Interval.parse("2013-01-01T00:00:00+01:00/2013-01-31T23:59:00+01:00");
String csvContent = String.join(System.lineSeparator(),
String.join(",",
"datetime",
"horizon",
"forecastTime",
generatorsIds.stream().map(generatorId -> String.join(",", generatorId + "_P", generatorId + "_Q")).collect(Collectors.joining(",")),
loadsIds.stream().map(loadId -> String.join(",", loadId + "_P", loadId + "_Q")).collect(Collectors.joining(","))),
String.join(",",
"Fri 01 Jan 2013 00:00:00 GMT","720","DACF",
"0.1","-0.1","0.2","-0.2",
"0.1","-0.1","0.2","-0.2","0.3","-0.3"),
String.join(",",
"Fri 01 Jan 2013 00:00:00 GMT","0","SN",
"0.11","-0.11","0.21","-0.21",
"0.11","-0.11","0.21","-0.21","0.31","-0.31"));
HistoDbClient histoDbClient = Mockito.mock(HistoDbClient.class);
Mockito.when(histoDbClient.queryCsv(Matchers.eq(HistoQueryType.forecastDiff),
Matchers.any(),
Matchers.eq(histoInterval),
Matchers.eq(HistoDbHorizon.DACF),
Matchers.eq(false),
Matchers.eq(false)))
.thenReturn(new ByteArrayInputStream(csvContent.getBytes()));
String feaCsvFileName = "forecasterrors_historicaldata.csv";
Path workingDir = Files.createDirectory(fileSystem.getPath("/working-dir"));
Path historicalDataCsvFile = workingDir.resolve(feaCsvFileName);
FEAHistoDBFacade.historicalDataToCsvFile(histoDbClient,
generatorsIds,
loadsIds,
histoInterval,
historicalDataCsvFile);
assertTrue(Files.exists(historicalDataCsvFile));
try (InputStream expectedStream = new ByteArrayInputStream(csvContent.getBytes());
InputStream actualStream = Files.newInputStream(historicalDataCsvFile)) {
assertTrue(IOUtils.contentEquals(expectedStream, actualStream));
}
}