本文整理汇总了C++中THash::Save方法的典型用法代码示例。如果您正苦于以下问题:C++ THash::Save方法的具体用法?C++ THash::Save怎么用?C++ THash::Save使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类THash
的用法示例。
在下文中一共展示了THash::Save方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: SaveAll
void SaveAll()
{
printf("\n<<< SAVING STARTS (PLEASE BE PATIENT!!!) >>> .......\n");
TZipOut resOut("CascadesFullUrlsOnTwitterData.rar");
cascadesInUrlsOnTwitter.Save(resOut);
printf("The size of CascadesFullUrlsOnTwitterData was: %d\n",cascadesInUrlsOnTwitter.Len());
printf("\n<<<<<<<< SAVING DONE >>>>>>>>\n\n");
}
示例2: main
//.........这里部分代码省略.........
{
if(fileExists(qContentFname))
{
// removing the quotes' content file
system(TStr::Fmt("rm %s",qContentFname.CStr()).CStr());
}
}
else
{
if(fileExists(qContentFname))
{
THash<TStr,TInt> quotesContent;
THash<TInt,TSecTmV> CascadesOnTwitter;
TZipIn ZquotesIn(qContentFname);
quotesContent.Load(ZquotesIn);
printf("Q%d loading done, it contains %d quotes.\n",indx-1,quotesContent.Len());
conn = PQconnectdb("dbname=twitter host=postgresql01.mpi-sws.org user=twitter [email protected]");
if (PQstatus(conn) == CONNECTION_BAD)
{
printf("We were unable to connect to the database");
return 1;
}
// we use cursors/fetch to speed up the process; batch of 10000 tweets
PQexec(conn, "begin work");
PQexec(conn,TStr::Fmt("declare mycursor cursor for select tweettext, extract(epoch from tweettime) from tweets where tweettime >= timestamp '%s' and tweettime < timestamp '%s'", StartDate.CStr(), EndDate.CStr()).CStr());
do
{
res = PQexec(conn, "FETCH 1000000 IN mycursor"); // all of them are: 1675401026
if (PQresultStatus(res) == PGRES_TUPLES_OK)
{
rec_count = PQntuples(res);
total_number_tweets += rec_count;
printf("Adding %d tweets... (total: %d)\n", rec_count, total_number_tweets);
for (row=0; row<rec_count; row++)
{
TweetStr = PQgetvalue(res, row, 0);
tweet_date = TStr(PQgetvalue(res, row, 1)).GetFlt();
TweetStrLc = TweetStr.ToLc();
for(q=0;q<quotesContent.Len();q++)
{
if (TweetStrLc.SearchStr(quotesContent.GetKey(q)) > -1)
{
TSecTm td(tweet_date);
id = CascadesOnTwitter.GetKeyId(quotesContent[q]);
if(id == -1)
{
CascadesOnTwitter.AddDat(quotesContent[q]).Add(td);
}
else
{
CascadesOnTwitter.GetDat(quotesContent[q]).AddSorted(td);
}
}
}
}
PQclear(res);
}
else
{
rec_count = 0;
}
}
while (rec_count);
PQexec(conn, "close mycursor");
PQexec(conn, "commit work");
PQfinish(conn);
// Save the results
TZipOut zout(resultFname);
CascadesOnTwitter.Save(zout);
// Remove the qoutes' content file
system(TStr::Fmt("rm %s",qContentFname.CStr()).CStr());
}
}
}
printf("\n\nD O N E\n\n");
}
catch(exception& ex)
{
printf("\nError1 happened, it was: %s\n\n",ex.what());
}
catch(TPt<TExcept>& ex)
{
printf("\nError2 happened: %s\n\n",ex[0].GetStr().CStr());
}
printf("\nrun time: %s (%s)\n", ExeTm.GetTmStr(), TSecTm::GetCurTm().GetTmStr().CStr());
return 0;
}
示例3: main
int main(int argc, char* argv[])
{
TExeTm ExeTm;
THash< TStr , CascadeElementV > quotesFiltered;
double* vol_me;
uint period = 9 * 3600; // 9 days because of NIFTY paper
printf("((((( Starting The Filtering Cascades CODE )))))\n");
try
{
Env = TEnv(argc, argv, TNotify::StdNotify);
Env.PrepArgs(TStr::Fmt("\nFiltering Memes Cascades. build: %s, %s. Time: %s", __TIME__, __DATE__, TExeTm::GetCurTm()));
// ---== Loading Data ==---
TZipIn ZquotesIn("QuotesPreprocessedData_NIFTY.rar"); ///("/agbs/cluster/oaskaris/Data_Preparing_Codes/RESULTS/QuotesPreprocessedData_NIFTY.rar");
THash< TStr , CascadeElementV > quotes;
quotes.Load(ZquotesIn);
printf("Loaded QuotesPreprocessedData_NIFTY has instances: %d\n\n\n",quotes.Len());
// NIFTY Method for Filtering by Peaks
uint begin = TSecTm(2008,7,31,0,0,0).GetAbsSecs();
uint end = TSecTm(2009,10,1,0,0,0).GetAbsSecs();
TSecTmV memesTimes;
int bins = (end - begin) / period;
for(int c=0;c<quotes.Len();c++)
{
memesTimes.Clr();
for(int i=0;i<quotes[c].Len();i++)
{
memesTimes.Add(quotes[c][i].time);
}
vol_me = Tools::calculateHistOfCascade(memesTimes,begin,period,false);
// calculating mean and standard deviation
double mean = 0;
for(int i=0;i<bins;i++)
{
mean += vol_me[i];
}
mean /= bins;
double std = 0;
for(int i=0;i<bins;i++)
{
std += pow(vol_me[i]-mean , 2);
}
std = sqrt(std / (bins-1));
// peak definition by NIFTY: a point is a peak if its volume in 9 days binning is 1 standard deviation higher than the average frequency
double maxVolume = mean + std;
int peakCnt = 0;
for(int i=0;i<bins;i++)
{
if(vol_me[i] > maxVolume)
{
peakCnt++;
}
}
// if there is more than 5 peaks ignore this quote, since it is not a meme
if(peakCnt > 5)
{
delete[] vol_me;
continue;
}
quotesFiltered.AddDat(quotes.GetKey(c),quotes[c]);
delete[] vol_me;
}
TZipOut mout("QuotesPreprocessedData_NIFTY_FINALFILTERED.rar");
quotesFiltered.Save(mout);
printf("Saved QuotesPreprocessedData_NIFTY_FINALFILTERED has instances: %d\n\n\n",quotesFiltered.Len());
printf("\nThe Meme Filter for plotting had been done successfully.\n");
}
catch(exception& ex)
{
printf("\nError1 happened, it was: %s\n\n",ex.what());
}
catch(TPt<TExcept>& ex)
{
printf("\nError2 happened: %s\n\n",ex[0].GetStr().CStr());
}
printf("\nrun time: %s (%s)\n", ExeTm.GetTmStr(), TSecTm::GetCurTm().GetTmStr().CStr());
return 0;
}