本文整理汇总了C++中ISamples::getStreamSize方法的典型用法代码示例。如果您正苦于以下问题:C++ ISamples::getStreamSize方法的具体用法?C++ ISamples::getStreamSize怎么用?C++ ISamples::getStreamSize使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类ISamples
的用法示例。
在下文中一共展示了ISamples::getStreamSize方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: train
bool MyFusion::train (ssi_size_t n_models,
IModel **models,
ISamples &samples) {
if (samples.getSize () == 0) {
ssi_wrn ("empty sample list");
return false;
}
if (isTrained ()) {
ssi_wrn ("already trained");
return false;
}
ssi_size_t n_streams = samples.getStreamSize ();
if (n_streams != n_models) {
ssi_err ("#models (%u) differs from #streams (%u)", n_models, n_streams);
}
for (ssi_size_t n_model = 0; n_model < n_models; n_model++) {
if (!models[n_model]->isTrained ()) {
models[n_model]->train (samples, n_model);
}
}
_is_trained = true;
return true;
}
示例2: train
bool MajorityVoting::train (ssi_size_t n_models,
IModel **models,
ISamples &samples) {
if (samples.getSize () == 0) {
ssi_wrn ("empty sample list");
return false;
}
if (samples.getStreamSize () != n_models) {
ssi_wrn ("#models (%u) differs from #streams (%u)", n_models, samples.getStreamSize ());
return false;
}
if (isTrained ()) {
ssi_wrn ("already trained");
return false;
}
_n_streams = samples.getStreamSize ();
_n_classes = samples.getClassSize ();
_n_models = n_models;
if (samples.hasMissingData ()) {
ISMissingData samples_h (&samples);
for (ssi_size_t n_model = 0; n_model < n_models; n_model++) {
if (!models[n_model]->isTrained ()) {
samples_h.setStream (n_model);
models[n_model]->train (samples_h, n_model);
}
}
}
else{
for (ssi_size_t n_model = 0; n_model < n_models; n_model++) {
if (!models[n_model]->isTrained ()) { models[n_model]->train (samples, n_model); }
}
}
return true;
}
示例3: train
bool SimpleFusion::train (ssi_size_t n_models,
IModel **models,
ISamples &samples) {
if (samples.getSize () == 0) {
ssi_wrn ("empty sample list");
return false;
}
if (isTrained ()) {
ssi_wrn ("already trained");
return false;
}
ssi_size_t n_streams = samples.getStreamSize ();
if (n_streams != 1 && n_streams != n_models) {
ssi_err ("#models (%u) differs from #streams (%u)", n_models, n_streams);
}
if (samples.hasMissingData ()) {
ISMissingData samples_h (&samples);
for (ssi_size_t n_model = 0; n_model < n_models; n_model++) {
if (!models[n_model]->isTrained ()) {
samples_h.setStream(n_streams == 1 ? 0 : n_model);
models[n_model]->train (samples_h, n_model);
}
}
} else {
for (ssi_size_t n_model = 0; n_model < n_models; n_model++) {
if (!models[n_model]->isTrained ()) {
models[n_model]->train(samples, n_streams == 1 ? 0 : n_model);
}
}
}
_is_trained = true;
return true;
}
示例4: open
bool FileSamplesOut::open (ISamples &data,
const ssi_char_t *path,
File::TYPE type,
File::VERSION version) {
ssi_msg (SSI_LOG_LEVEL_DETAIL, "open files '%s'", path);
_version = version;
if (_version < File::V2) {
ssi_wrn ("version < V2 not supported");
return false;
}
if (_file_info || _file_data) {
ssi_wrn ("samples already open");
return false;
}
_n_users = data.getUserSize ();
_users = new ssi_char_t *[_n_users];
_n_per_user = new ssi_size_t[_n_users];
for (ssi_size_t i = 0; i < _n_users; i++) {
_users[i] = ssi_strcpy (data.getUserName (i));
_n_per_user[i] = 0;
}
_n_classes = data.getClassSize ();
_classes = new ssi_char_t *[_n_classes];
_n_per_class = new ssi_size_t[_n_classes];
for (ssi_size_t i = 0; i < _n_classes; i++) {
_classes[i] = ssi_strcpy (data.getClassName (i));
_n_per_class[i] = 0;
}
_n_streams = data.getStreamSize ();
_streams = new ssi_stream_t[_n_streams];
for (ssi_size_t i = 0; i < _n_streams; i++) {
ssi_stream_t s = data.getStream (i);
ssi_stream_init (_streams[i], 0, s.dim, s.byte, s.type, s.sr, 0);
}
_has_missing_data = false;
if (path == 0 || path[0] == '\0') {
_console = true;
}
if (_console) {
_file_data = File::CreateAndOpen (type, File::WRITE, "");
if (!_file_data) {
ssi_wrn ("could not open console");
return false;
}
} else {
FilePath fp (path);
ssi_char_t *path_info = 0;
if (strcmp (fp.getExtension (), SSI_FILE_TYPE_SAMPLES) != 0) {
path_info = ssi_strcat (path, SSI_FILE_TYPE_SAMPLES);
} else {
path_info = ssi_strcpy (path);
}
_path = ssi_strcpy (path_info);
_file_info = File::CreateAndOpen (File::ASCII, File::WRITE, path_info);
if (!_file_info) {
ssi_wrn ("could not open info file '%s'", path_info);
return false;
}
ssi_sprint (_string, "<?xml version=\"1.0\" ?>\n<samples ssi-v=\"%d\">", version);
_file_info->writeLine (_string);
ssi_char_t *path_data = ssi_strcat (path_info, "~");
_file_data = File::CreateAndOpen (type, File::WRITE, path_data);
if (!_file_data) {
ssi_wrn ("could not open data file '%s'", path_data);
return false;
}
if (_version == File::V3) {
_file_streams = new FileStreamOut[_n_streams];
ssi_char_t string[SSI_MAX_CHAR];
for (ssi_size_t i = 0; i < _n_streams; i++) {
ssi_sprint (string, "%s.#%u", path_info, i);
_file_streams[i].open (_streams[i], string, type);
}
}
delete[] path_info;
delete[] path_data;
}
return true;
};
示例5: train
bool FeatureFusion::train (ssi_size_t n_models,
IModel **models,
ISamples &samples) {
if (samples.getSize () == 0) {
ssi_wrn ("empty sample list");
return false;
}
if (isTrained ()) {
ssi_wrn ("already trained");
return false;
}
_n_streams = samples.getStreamSize ();
_n_classes = samples.getClassSize ();
_n_models = n_models;
//initialize weights
ssi_real_t **weights = new ssi_real_t*[n_models];
for (ssi_size_t n_model = 0; n_model < n_models; n_model++) {
weights[n_model] = new ssi_real_t[_n_classes+1];
}
if (samples.hasMissingData ()) {
_handle_md = true;
ISMissingData samples_h (&samples);
Evaluation eval;
if (ssi_log_level >= SSI_LOG_LEVEL_DEBUG) {
ssi_print("\nMissing data detected.\n");
}
//models[0] is featfuse_model, followed by singlechannel_models
ISMergeDim ffusionSamples (&samples);
ISMissingData ffusionSamples_h (&ffusionSamples);
ffusionSamples_h.setStream(0);
if (!models[0]->isTrained ()) { models[0]->train (ffusionSamples_h, 0); }
if (ssi_log_level >= SSI_LOG_LEVEL_DEBUG) {
eval.eval (*models[0], ffusionSamples_h, 0);
eval.print();
}
//dummy weights for fused model
for (ssi_size_t n_class = 0; n_class < _n_classes; n_class++) {
weights[0][n_class] = 0.0f;
}
weights[0][_n_classes] = 0.0f;
for (ssi_size_t n_model = 1; n_model < n_models; n_model++) {
if (!models[n_model]->isTrained ()) {
samples_h.setStream (n_model - 1);
models[n_model]->train (samples_h, n_model - 1);
}
eval.eval (*models[n_model], samples_h, n_model - 1);
if (ssi_log_level >= SSI_LOG_LEVEL_DEBUG) {
eval.print();
}
for (ssi_size_t n_class = 0; n_class < _n_classes; n_class++) {
weights[n_model][n_class] = eval.get_class_prob (n_class);
}
weights[n_model][_n_classes] = eval.get_classwise_prob ();
}
//calculate fillers
_filler = new ssi_size_t[_n_streams];
for (ssi_size_t n_fill = 0; n_fill < _n_streams; n_fill++) {
_filler[n_fill] = 1;
ssi_real_t filler_weight = weights[1][_n_classes];
for (ssi_size_t n_model = 2; n_model < n_models; n_model++) {
if (filler_weight < weights[n_model][_n_classes]) {
_filler[n_fill] = n_model;
filler_weight = weights[n_model][_n_classes];
}
}
weights[_filler[n_fill]][_n_classes] = 0.0f;
}
if (ssi_log_level >= SSI_LOG_LEVEL_DEBUG) {
ssi_print("\nfiller:\n");
for (ssi_size_t n_model = 0; n_model < _n_streams; n_model++) {
ssi_print("%d ", _filler[n_model]);
}ssi_print("\n");
}
}
else{
_handle_md = false;
if (ssi_log_level >= SSI_LOG_LEVEL_DEBUG) {
ssi_print("\nNo missing data detected.\n");
}
ISMergeDim ffusionSamples (&samples);
if (!models[0]->isTrained ()) { models[0]->train (ffusionSamples, 0); }
//.........这里部分代码省略.........
示例6: train
bool WeightedMajorityVoting::train (ssi_size_t n_models,
IModel **models,
ISamples &samples) {
if (samples.getSize () == 0) {
ssi_wrn ("empty sample list");
return false;
}
if (samples.getStreamSize () != n_models) {
ssi_wrn ("#models (%u) differs from #streams (%u)", n_models, samples.getStreamSize ());
return false;
}
if (isTrained ()) {
ssi_wrn ("already trained");
return false;
}
_n_streams = samples.getStreamSize ();
_n_classes = samples.getClassSize ();
_n_models = n_models;
_weights = new ssi_real_t*[n_models];
for (ssi_size_t n_model = 0; n_model < n_models; n_model++) {
_weights[n_model] = new ssi_real_t[_n_classes+1];
}
if (samples.hasMissingData ()) {
ISMissingData samples_h (&samples);
Evaluation eval;
for (ssi_size_t n_model = 0; n_model < n_models; n_model++) {
if (!models[n_model]->isTrained ()) {
samples_h.setStream (n_model);
models[n_model]->train (samples_h, n_model);
}
eval.eval (*models[n_model], samples_h, n_model);
for (ssi_size_t n_class = 0; n_class < _n_classes; n_class++) {
_weights[n_model][n_class] = eval.get_class_prob (n_class);
}
_weights[n_model][_n_classes] = eval.get_classwise_prob ();
}
}
else{
Evaluation eval;
for (ssi_size_t n_model = 0; n_model < n_models; n_model++) {
if (!models[n_model]->isTrained ()) { models[n_model]->train (samples, n_model); }
eval.eval (*models[n_model], samples, n_model);
for (ssi_size_t n_class = 0; n_class < _n_classes; n_class++) {
_weights[n_model][n_class] = eval.get_class_prob (n_class);
}
_weights[n_model][_n_classes] = eval.get_classwise_prob ();
}
}
if (ssi_log_level >= SSI_LOG_LEVEL_DEBUG) {
ssi_print("\nClassifier Weights: \n");
for (ssi_size_t n_model = 0; n_model < n_models; n_model++) {
for (ssi_size_t n_class = 0; n_class < _n_classes; n_class++) {
ssi_print ("%f ", _weights[n_model][n_class]);
}
ssi_print ("%f\n", _weights[n_model][_n_classes]);
}
}
return true;
}