本文整理汇总了PHP中Logger::logUserInfo方法的典型用法代码示例。如果您正苦于以下问题:PHP Logger::logUserInfo方法的具体用法?PHP Logger::logUserInfo怎么用?PHP Logger::logUserInfo使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Logger
的用法示例。
在下文中一共展示了Logger::logUserInfo方法的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的PHP代码示例。
示例1: fetchFriends
/**
* Fetch and save the instance users's friends.
*
* if is_archive_loaded
* if ($this->instance->total_follows_in_system !== $this->user->friend_count) {
* is_archive_loaded = false;
*
* if !is_archive_loaded
* if followed_by_next_cursor is set
* pageThroughFriends(followed_by_next_cursor)
* else
* pageThroughFriends()
*
* if is_archive_loaded
* updateStaleFollows()
*
*/
public function fetchFriends()
{
if (!isset($this->user)) {
//Force-refresh instance user in data store
$this->user = self::fetchUser($this->instance->network_user_id, 'Owner info', $this->instance->network_username, null, null, true);
}
$follow_dao = DAOFactory::getDAO('FollowDAO');
$this->instance->total_friends_in_system = $follow_dao->countTotalFriends($this->instance->network_user_id, 'instagram');
$this->logger->logUserInfo($this->instance->total_friends_in_system . " friends in system, " . $this->user->friend_count . " friends according to Instagram", __METHOD__ . ',' . __LINE__);
if ($this->instance->total_friends_in_system < $this->user->friend_count) {
$this->instance->is_archive_loaded_friends = false;
} elseif ($this->instance->total_friends_in_system > $this->user->friend_count) {
$this->instance->is_archive_loaded_friends = true;
} else {
$this->instance->is_archive_loaded_friends = true;
}
//If archive is not loaded, page through friends
if (!$this->instance->is_archive_loaded_friends) {
$this->logger->logInfo("Friend archive is not loaded, start paging", __METHOD__ . ',' . __LINE__);
$this->pageThroughFriends($this->instance->follows_next_cursor);
}
//If archive is loaded, updateStaleFollows
if ($this->instance->is_archive_loaded_friends) {
$this->logger->logInfo("Friend archive loaded, start updating stale friendships", __METHOD__ . ',' . __LINE__);
$this->updateStaleFollows(true);
}
}
示例2: fetchPagePostsAndReplies
/**
* Fetch a save the posts and replies on a Facebook page.
* @param int $pid Page ID
*/
public function fetchPagePostsAndReplies($pid) {
$stream = FacebookGraphAPIAccessor::apiRequest('/'.$pid.'/posts', $this->access_token);
if (isset($stream->data) && is_array($stream->data) && sizeof($stream->data > 0)) {
$this->logger->logSuccess(sizeof($stream->data)." Facebook posts found for page ID $pid.",
__METHOD__.','.__LINE__);
$thinkup_data = $this->parseStream($stream, 'facebook page');
$posts = $thinkup_data["posts"];
$post_dao = DAOFactory::getDAO('PostDAO');
$added_posts = 0;
foreach ($posts as $post) {
if ($post['author_username']== "" && isset($post['author_user_id'])) {
$commenter_object = $this->fetchUserInfo($post['author_user_id'], 'facebook',
'Facebook page comments');
if (isset($commenter_object)) {
$post["author_username"] = $commenter_object->full_name;
$post["author_fullname"] = $commenter_object->full_name;
$post["author_avatar"] = $commenter_object->avatar;
}
}
$added_posts = $added_posts + $post_dao->addPost($post);
$this->logger->logInfo("Added post ID ".$post["post_id"]." on ".$post["network"].
" for ".$post["author_username"].":".$post["post_text"], __METHOD__.','.__LINE__);
}
$added_users = 0;
$users = $thinkup_data["users"];
if (count($users) > 0) {
foreach ($users as $user) {
$user["post_count"] = $post_dao->getTotalPostsByUser($user['user_id'], $user['network']);
$found_in = 'Facebook page stream';
$user_object = new User($user, $found_in);
$user_dao = DAOFactory::getDAO('UserDAO');
$user_dao->updateUser($user_object);
$added_users = $added_users + 1;
}
}
if ($added_posts > 0 || $added_users > 0) {
$this->logger->logUserSuccess($added_posts." post(s) added; ".$added_users." user(s) updated.",
__METHOD__.','.__LINE__);
} else {
$this->logger->logUserInfo("No new page posts found.", __METHOD__.','.__LINE__);
}
} else {
$this->logger->logInfo("No Facebook posts found for page ID $pid", __METHOD__.','.__LINE__);
}
}
示例3: fetchInstanceUserFavorites
/**
* Fetch instance user's favorites since the last favorite stored.
*/
public function fetchInstanceUserFavorites()
{
if (!isset($this->user)) {
$this->fetchInstanceUserInfo();
}
$this->logger->logUserInfo("Checking for new favorites.", __METHOD__ . ',' . __LINE__);
$last_fav_id = $this->instance->last_favorite_id;
$this->logger->logInfo("Owner favs: " . $this->user->favorites_count . ", instance owner favs in system: " . $this->instance->owner_favs_in_system, __METHOD__ . ',' . __LINE__);
$continue = true;
while ($continue) {
list($tweets, $http_status, $payload) = $this->getFavorites($last_fav_id);
if ($http_status == 200) {
if (sizeof($tweets) == 0) {
// then done -- this should happen when we have run out of favs
$this->logger->logInfo("It appears that we have run out of favorites to process", __METHOD__ . ',' . __LINE__);
$continue = false;
} else {
$post_dao = DAOFactory::getDAO('FavoritePostDAO');
$fav_count = 0;
foreach ($tweets as $tweet) {
$tweet['network'] = 'twitter';
if ($post_dao->addFavorite($this->user->user_id, $tweet) > 0) {
URLProcessor::processPostURLs($tweet['post_text'], $tweet['post_id'], 'twitter', $this->logger);
$this->logger->logInfo("Found new fav: " . $tweet['post_id'], __METHOD__ . ',' . __LINE__);
$fav_count++;
$this->logger->logInfo("Fav count: {$fav_count}", __METHOD__ . ',' . __LINE__);
$this->logger->logInfo("Added favorite: " . $tweet['post_id'], __METHOD__ . ',' . __LINE__);
} else {
// fav was already stored, so take no action. This could happen both because some
// of the favs on the given page were processed last time, or because a separate process,
// such as a UserStream process, is also watching for and storing favs.
//$status_message = "Have already stored fav ". $tweet['post_id'];
//$this->logger->logDebug($status_message, __METHOD__.','.__LINE__);
}
// keep track of the highest fav id we've encountered
if ($tweet['post_id'] > $last_fav_id) {
$last_fav_id = $tweet['post_id'];
}
}
// end foreach
}
} else {
$continue = false;
}
}
}
示例4: fetchPostsAndReplies
/**
* Fetch and save the posts and replies for the crawler's instance. This function will loop back through the
* user's or pages archive of posts.
* @return void
* @throws APIOAuthException
*/
public function fetchPostsAndReplies()
{
$id = $this->instance->network_user_id;
$network = $this->instance->network;
// fetch user's friends
$this->fetchAndStoreFriends();
$fetch_next_page = true;
$current_page_number = 1;
$next_api_request = 'https://graph.facebook.com/' . $id . '/feed?access_token=' . $this->access_token;
//Cap crawl time for very busy pages with thousands of likes/comments
$fetch_stop_time = time() + $this->max_crawl_time;
//Determine 'since', datetime of oldest post in datastore
$post_dao = DAOFactory::getDAO('PostDAO');
$since_post = $post_dao->getAllPosts($id, $network, 1, 1, true, 'pub_date', 'ASC');
$since = isset($since_post[0]) ? $since_post[0]->pub_date : 0;
$since = strtotime($since) - 60 * 60 * 24;
// last post minus one day, just to be safe
$since < 0 ? $since = 0 : ($since = $since);
while ($fetch_next_page) {
$stream = FacebookGraphAPIAccessor::rawApiRequest($next_api_request, true);
if (isset($stream->data) && is_array($stream->data) && sizeof($stream->data) > 0) {
$this->logger->logInfo(sizeof($stream->data) . " Facebook posts found on page " . $current_page_number, __METHOD__ . ',' . __LINE__);
$this->processStream($stream, $network, $current_page_number);
if (isset($stream->paging->next)) {
$next_api_request = $stream->paging->next . '&since=' . $since;
$current_page_number++;
} else {
$fetch_next_page = false;
}
} elseif (isset($stream->error->type) && $stream->error->type == 'OAuthException') {
throw new APIOAuthException($stream->error->message);
} else {
$this->logger->logInfo("No Facebook posts found for ID {$id}", __METHOD__ . ',' . __LINE__);
$fetch_next_page = false;
}
if (time() > $fetch_stop_time) {
$fetch_next_page = false;
$this->logger->logUserInfo("Stopping this service user's crawl because it has exceeded max time of " . $this->max_crawl_time / 60 . " minute(s). ", __METHOD__ . ',' . __LINE__);
}
}
}
示例5: acquireBitlyClickStats
/**
* Expand Bit.ly links and recheck click count on any links less than 2 days old.
*
* @param str bitly api key
* @param str bitly login name
*/
public function acquireBitlyClickStats($api_key, $bit_login)
{
$this->logger->setUsername(null);
$api_accessor = new BitlyAPIAccessor($api_key, $bit_login);
$bitly_urls = array('http://bit.ly/', 'http://bitly.com/', 'http://j.mp/');
foreach ($bitly_urls as $bitly_url) {
if ($this->link_limit != 0) {
//all short links first seen in the last 48 hours
$bitly_links_to_update = $this->short_link_dao->getLinksToUpdate($bitly_url);
if (count($bitly_links_to_update) > 0) {
$this->logger->logUserInfo(count($bitly_links_to_update) . " {$bitly_url}" . " links to acquire click stats for.", __METHOD__ . ',' . __LINE__);
} else {
$this->logger->logUserInfo("There are no " . $bitly_url . " links to fetch click stats for.", __METHOD__ . ',', __LINE__);
}
$total_links = 0;
$total_errors = 0;
$total_updated = 0;
foreach ($bitly_links_to_update as $link) {
$this->logger->logInfo("Getting bit.ly click stats for " . ($total_updated + 1) . " of " . count($bitly_links_to_update) . " " . $bitly_url . " links (" . $link->short_url . ")", __METHOD__ . ',' . __LINE__);
$link_data = $api_accessor->getBitlyLinkData($link->short_url);
if ($link_data["clicks"] != '') {
//save click total here
$this->short_link_dao->saveClickCount($link->short_url, $link_data["clicks"]);
// Save title to links table
if ($link_data["title"] != '') {
$this->link_dao->updateTitle($link->link_id, $link_data["title"]);
}
$total_links = $total_links + 1;
$total_updated = $total_updated + 1;
} elseif ($link_data["error"] != '') {
$this->link_dao->saveExpansionError($link->short_url, $link_data["error"]);
$total_errors = $total_errors + 1;
$total_updated = $total_updated + 1;
}
}
$this->logger->logUserSuccess($total_links . " " . $bitly_url . " link click stats acquired (" . $total_errors . " errors)", __METHOD__ . ',' . __LINE__);
}
}
}
示例6: fetchInstanceFavorites
/**
* This method, and the two supporting private methods 'maintFavsFetch' and 'archivingFavsFetch', provide the
* primary crawler functionality for adding the user's favorites to the database.
* For a given user, the process starts in 'archiving mode', by
* working forwards from the last (oldest) page of tweets to the newest. This archiving crawl
* is only done once. The crawler tries to do this all in one go, but if it exhausts the available API count,
* it will continue where it left off in the next run.
* Then, when page 1 is reached in archiving mode, the crawler goes into 'maintenance mode' and works
* backwards from then on. It first pages back until
* it has reached the last fav it previously processed. Then it searches back N more pages to catch any older
* tweets that were fav'd out of chronological order, where N is determined by favs_older_pages option.
* The bookkeeping for these two crawler stages is maintained in the in tu_instances entry for the user.
*
* Recently, the Twitter favorites API has developed some bugs that need to be worked around. The comments below
* provide more detail, but in a nutshell, these methods can not currently use information from Twitter to
* calculate loop termination (so a bit more work may be done than necessary), and do not currently remove un-fav'd
* tweets from the database. Hopefully these API issues will be fixed by Twitter in future.
*/
public function fetchInstanceFavorites()
{
// first, check that we have the resources to do work
if (!($this->api->available && $this->api->available_api_calls_for_crawler)) {
$this->logger->logInfo("terminating fetchInstanceFavorites-- no API calls available", __METHOD__ . ',' . __LINE__);
return true;
}
$status_message = "";
//@TODO Can we get this from API?
$page_size = 20;
// number of favs per page retrieved from the API call
$this->logger->logUserInfo("Checking for new favorites.", __METHOD__ . ',' . __LINE__);
$last_favorites_count = $this->instance->favorites_profile;
$this->logger->logInfo("last favs count: {$last_favorites_count}", __METHOD__ . ',' . __LINE__);
$last_page_fetched_favorites = $this->instance->last_page_fetched_favorites;
$last_fav_id = $this->instance->last_favorite_id;
$curr_favs_count = $this->user->favorites_count;
$this->logger->logInfo("curr favs count: {$curr_favs_count}", __METHOD__ . ',' . __LINE__);
$last_page_of_favs = round($this->api->archive_limit / $page_size);
// under normal circs the latter clause below should never hold, but due to a previously-existing
// bug that could set a negative last_page_fetched_favorites value in the db in some cases,
// it is necessary for recovery.
if ($last_page_fetched_favorites == "" || $last_page_fetched_favorites < 0) {
$last_page_fetched_favorites = 0;
}
$this->logger->logInfo("got last_page_fetched_favorites: {$last_page_fetched_favorites}", __METHOD__ . ',' . __LINE__);
if ($last_fav_id == "") {
$last_fav_id = 0;
}
// the owner favs count, from twitter, is currently unreliable and may be less than the actual number of
// favs, by a large margin. So, we still go ahead and calculate the number of 'missing' tweets based on
// this info, but currently do not use it for fetch loop termination.
$this->logger->logInfo("owner favs: " . $this->user->favorites_count . ", instance owner favs in system: " . $this->instance->owner_favs_in_system, __METHOD__ . ',' . __LINE__);
$favs_missing = $this->user->favorites_count - $this->instance->owner_favs_in_system;
$this->logger->logInfo("favs missing: {$favs_missing}", __METHOD__ . ',' . __LINE__);
// figure out if we're in 'archiving' or 'maintenance' mode, via # of last_page_fetched_favorites
$mode = 0;
// default is archving/first-fetch
if ($last_page_fetched_favorites == 1) {
$mode = 1;
// we are in maint. mode
$new_favs_to_add = $favs_missing;
$this->logger->logInfo("new favs to add/missing: {$new_favs_to_add}", __METHOD__ . ',' . __LINE__);
$mpage = 1;
$starting_fav_id = $last_fav_id;
} else {
// we are in archiving mode.
$new_favs_to_add = $curr_favs_count - $last_favorites_count;
// twitter profile information is not always consistent, so ensure that this value is not negative
if ($new_favs_to_add < 0) {
$new_favs_to_add == 0;
}
$this->logger->logInfo("new favs to add: {$new_favs_to_add}", __METHOD__ . ',' . __LINE__);
// figure out start page based on where we left off last time, and how many favs added since then
$extra_pages = ceil($new_favs_to_add / $page_size);
$this->logger->logInfo("extra pages: {$extra_pages}", __METHOD__ . ',' . __LINE__);
$finished_first_fetch = false;
if ($last_page_fetched_favorites == 0) {
// if at initial starting fetch (first time favs ever crawled)
if ($extra_pages == 0) {
$extra_pages = 1;
// always check at least one page on initial fetch
}
$last_page_fetched_favs_start = $extra_pages + 1;
} else {
$last_page_fetched_favs_start = $last_page_fetched_favorites + $extra_pages;
}
if ($last_page_fetched_favs_start > $last_page_of_favs) {
$last_page_fetched_favs_start = $last_page_of_favs + 1;
}
}
$status_message = "total last favs count: {$last_favorites_count}" . ", last page fetched: {$last_page_fetched_favorites}, last fav id: {$last_fav_id}";
$this->logger->logInfo($status_message, __METHOD__ . ',' . __LINE__);
$this->logger->logInfo("current favs count: {$curr_favs_count}" . ", new favs to add: {$new_favs_to_add}, last page of favs: {$last_page_of_favs}, mode: {$mode}", __METHOD__ . ',' . __LINE__);
$continue = true;
$fcount = 0;
$older_favs_smode = false;
$stop_page = 0;
$status_message = "in fetchInstanceFavorites: API available: " . $this->api->available . ", avail for crawler: " . $this->api->available_api_calls_for_crawler;
$this->logger->logInfo($status_message, __METHOD__ . ',' . __LINE__);
while ($this->api->available && $this->api->available_api_calls_for_crawler > 0 && $continue) {
try {
//.........这里部分代码省略.........
示例7: fetchPostsAndReplies
/**
* Fetch and save the posts and replies for the crawler's instance. This function will loop back through the
* user's or pages archive of posts.
* @return void
* @throws APIOAuthException
*/
public function fetchPostsAndReplies()
{
$id = $this->instance->network_user_id;
$network = $this->instance->network;
$fetch_next_page = true;
$current_page_number = 1;
$next_api_request = $id . '/feed';
$fields = self::$feed_fields;
//Cap crawl time for very busy pages with thousands of likes/comments
$fetch_stop_time = time() + $this->max_crawl_time;
$api_request_params = null;
$use_full_api_url = false;
$dig_into_archives = false;
while ($fetch_next_page) {
if (!$use_full_api_url) {
$stream = FacebookGraphAPIAccessor::apiRequest($next_api_request, $this->access_token, $api_request_params, $fields);
$api_request_params = null;
} else {
//Use full paging URL
$stream = FacebookGraphAPIAccessor::apiRequestFullURL($next_api_request, $this->access_token);
}
if (isset($stream->data) && is_array($stream->data) && sizeof($stream->data) > 0) {
$this->logger->logInfo(sizeof($stream->data) . " Facebook posts found on page " . $current_page_number, __METHOD__ . ',' . __LINE__);
$total_added_posts = $this->processStream($stream, $network, $current_page_number);
if ($total_added_posts == 0) {
//No new posts were found, try going back into the archives
if (!$dig_into_archives) {
$dig_into_archives = true;
//Determine 'since', datetime of oldest post in datastore
$post_dao = DAOFactory::getDAO('PostDAO');
$since_post = $post_dao->getAllPosts($id, $network, 1, 1, true, 'pub_date', 'ASC');
$since = isset($since_post[0]) ? $since_post[0]->pub_date : 0;
$since = strtotime($since);
$this->logger->logInfo("No Facebook posts found for {$id} here, digging into archives since " . $since_post[0]->pub_date . " strtotime " . $since, __METHOD__ . ',' . __LINE__);
$api_request_params = array('since' => $since);
$use_full_api_url = false;
$next_api_request = $id . '/feed';
} else {
if (isset($stream->paging->next)) {
$next_api_request = $stream->paging->next;
$use_full_api_url = true;
//DEBUG
$this->logger->logInfo("Dug into archives, next page API request is " . $next_api_request, __METHOD__ . ',' . __LINE__);
$current_page_number++;
} else {
$fetch_next_page = false;
}
}
} else {
if (isset($stream->paging->next)) {
$next_api_request = $stream->paging->next;
$use_full_api_url = true;
//DEBUG
$this->logger->logInfo("Next page API request is " . $next_api_request, __METHOD__ . ',' . __LINE__);
$current_page_number++;
} else {
$fetch_next_page = false;
}
}
} elseif (isset($stream->error->type) && $stream->error->type == 'OAuthException') {
throw new APIOAuthException($stream->error->message);
} else {
$this->logger->logInfo("No Facebook posts found for ID {$id}", __METHOD__ . ',' . __LINE__);
$fetch_next_page = false;
}
if (time() > $fetch_stop_time) {
$fetch_next_page = false;
$this->logger->logUserInfo("Stopping this service user's crawl because it has exceeded max time of " . $this->max_crawl_time / 60 . " minute(s). ", __METHOD__ . ',' . __LINE__);
}
}
}
示例8: fetchPostsAndReplies
/**
* Fetch and save the posts and replies for the crawler's instance. This function will loop back through the
* user's or pages archive of posts.
*/
public function fetchPostsAndReplies()
{
$plugin_dao = DAOFactory::getDAO('PluginDAO');
$plugin_id = $plugin_dao->getPluginId('instagram');
$namespace = OptionDAO::PLUGIN_OPTIONS . '-' . $plugin_id;
$id = $this->instance->network_user_id;
$option_dao = DAOFactory::getDAO('OptionDAO');
$network = $this->instance->network;
//Checks if last friends update is over 2 days ago and runs storeFriends if it is.
$friends_last_updated = $option_dao->getOptionByName($namespace, 'last_crawled_friends');
$friends_last_updated_check = microtime(true) - 172800;
if ($friends_last_updated == NULL) {
$this->storeFriends();
$option_dao->insertOption($namespace, 'last_crawled_friends', microtime(true));
} elseif ($friends_last_updated->option_value < $friends_last_updated_check) {
$this->storeFriends();
$option_dao->updateOptionByName($namespace, 'last_crawled_friends', microtime(true));
}
$fetch_next_page = true;
$current_page_number = 1;
$api_param = array();
if ($this->instance->total_posts_in_system != 0) {
$last_crawl = $this->instance->crawler_last_run;
$crawl_less_week = date($last_crawl, strtotime("-1 week"));
$unix_less_week = strtotime($crawl_less_week);
$api_param = array('min_timestamp' => $unix_less_week, 'count' => 20);
} else {
$api_param = array('count' => 20);
}
$this->logger->logUserInfo("About to request media", __METHOD__ . ',' . __LINE__);
$posts = InstagramAPIAccessor::apiRequest('media', $id, $this->access_token, $api_param);
$this->logger->logUserInfo("Media requested", __METHOD__ . ',' . __LINE__);
//Cap crawl time for very busy pages with thousands of likes/comments
$fetch_stop_time = time() + $this->max_crawl_time;
//Determine 'since', datetime of oldest post in datastore
$post_dao = DAOFactory::getDAO('PostDAO');
$since_post = $post_dao->getAllPosts($id, $network, 1, 1, true, 'pub_date', 'ASC');
$since = isset($since_post[0]) ? $since_post[0]->pub_date : 0;
$since = strtotime($since) - 60 * 60 * 24;
// last post minus one day, just to be safe
if ($since < 0) {
$since = 0;
} else {
$since = $since;
}
while ($fetch_next_page) {
if ($posts->count() > 0) {
$this->logger->logInfo(sizeof($stream->data) . " Instagram posts found on page " . $current_page_number, __METHOD__ . ',' . __LINE__);
$this->processPosts($posts, $network, $current_page_number);
if ($posts->getNext() != null) {
$api_param['max_id'] = $posts->getNext();
$posts = InstagramAPIaccessor::apiRequest('media', $id, $this->access_token, $api_param);
$current_page_number++;
} else {
$fetch_next_page = false;
}
} else {
$this->logger->logInfo("No Instagram posts found for ID {$id}", __METHOD__ . ',' . __LINE__);
$fetch_next_page = false;
}
if (time() > $fetch_stop_time) {
$fetch_next_page = false;
$this->logger->logUserInfo("Stopping this service user's crawl because it has exceeded max time of " . $this->max_crawl_time / 60 . " minute(s). ", __METHOD__ . ',' . __LINE__);
}
}
}
示例9: fetchInstanceUserVideos
/**
* Collects and stores information about the users videos from the YouTube APIs
* Currently collects and stores:
* - Basic video information such as title, author, description and location the video was shot in (if available)
* - Replies to the video
* -- This uses the YouTube V2 API due to the V3 API currently not supporting replies
* - All time counts for likes, dislikes, views, average view duration, average view percentage, favorites added,
* favorites removed, shares, subscribers gained and subscribers lost
* -- The totals for these are stored in the videos table, a history of these totals is stored in the
* count_history table under a type of [metric]_all_time and date of todays date
* -- A record of these metrics for indivdual days is also saved in the count_history table under a type of
* [metric] and date of the day the metric represents usually two days ago due to a delay in the availability
* of data from the Analytics API
* @return null
*/
public function fetchInstanceUserVideos()
{
$video_dao = DAOFactory::getDAO('VideoDAO');
$user_dao = DAOFactory::getDAO('UserDAO');
$post_dao = DAOFactory::getDAO('PostDAO');
$count_history_dao = DAOFactory::getDAO('CountHistoryDAO');
$instance_dao = DAOFactory::getDAO('InstanceDAO');
// Get the users upload playlist ID
$fields_for_ids = array('part' => 'contentDetails,statistics', 'mine' => 'true');
$various_ids = $this->youtube_api_accessor->apiRequest('channels', $this->access_token, $fields_for_ids);
$upload_id = $various_ids->items[0]->contentDetails->relatedPlaylists->uploads;
// Also get their channel ID as we'll need it later on
$channel_id = $various_ids->items[0]->id;
// There are some required attributes about the author that YouTube doesn't return for the videos so we need
// to query the database for them
$author_details = $user_dao->getDetails($this->instance->network_user_id, 'youtube');
$user_id = $this->instance->network_user_id;
// Update the users subscriber count
$subscriber_count = $various_ids->items[0]->statistics->subscriberCount;
$author_details->follower_count = $subscriber_count;
$user_dao->updateUser($author_details);
$count_history_dao->insert($user_id, 'youtube', $subscriber_count, null, 'subscriber_count');
// Calculate the time at which we should stop fetching videos
$end_time = time() + $this->max_crawl_time;
// Keep track of if we finished the crawl early due to timing out
$had_to_finish_early = false;
// Check if we already loaded all the old posts for this user
$archive_loaded = $instance->is_archive_loaded_posts;
// If the archive isn't loaded yet keep track of how many times we've tried to load it
if (!$archive_loaded) {
$attempts = $count_history_dao->getLatestCountByNetworkUserIDAndType($user_id, 'youtube', 'youtube_archive_attempts');
if ($attempts == null) {
// If this is the first crawler run
$attempts['count'] = 0;
}
$attempts['count']++;
$count_history_dao->insert($user_id, 'youtube', $attempts['count'], null, 'youtube_archive_attempts', null);
}
// Now page through their videos collecting the data
$videos_fields = array('part' => 'snippet', 'maxResults' => '25', 'playlistId' => $upload_id, 'pageToken' => null);
// We may get multiple pages
do {
// This is a page of IDs of videos the user has uploaded
$user_videos = $this->youtube_api_accessor->apiRequest('playlistItems', $this->access_token, $videos_fields);
// For each video store the relevant details about it
foreach ($user_videos->items as $video) {
// If we've hit the max crawl time stop
if (time() >= $end_time) {
$this->logger->logUserInfo("Stopping this service users crawl because it has exceeded max time of " . $this->max_crawl_time / 60 . " minute(s). ", __METHOD__ . ',' . __LINE__);
$had_to_finish_early = true;
break 2;
}
$video_id = $video->snippet->resourceId->videoId;
// Get the title, description, likes, dislikes, views, and details about where
// the video was taken from the data API
$video_fields = array('id' => $video_id, 'part' => 'statistics,id,snippet,recordingDetails,status');
$video_details = $this->youtube_api_accessor->apiRequest('videos', $this->access_token, $video_fields);
$item = $video_details->items[0];
// Check we haven't used up our quota
if (isset($video_details->error)) {
$this->logger->logError('Error querying YouTube Data API V3 ', __METHOD__ . ',' . __LINE__);
break;
}
$video_attributes['post_text'] = $item->snippet->title;
$video_attributes['description'] = $item->snippet->description;
$video_attributes['likes'] = $item->statistics->likeCount;
$video_attributes['dislikes'] = $item->statistics->dislikeCount;
$video_attributes['views'] = $item->statistics->viewCount;
// Keep track of these all time counts
$count_history_dao->insert($user_id, 'youtube', $video_attributes['likes'], $video_id, 'likes_all_time');
$count_history_dao->insert($user_id, 'youtube', $video_attributes['dislikes'], $video_id, 'dislikes_all_time');
$count_history_dao->insert($user_id, 'youtube', $video_attributes['views'], $video_id, 'views_all_time');
$video_attributes['pub_date'] = $item->snippet->publishedAt;
$video_attributes['post_id'] = $item->id;
$video_attributes['location'] = $item->recordingDetails->locationDescription;
$video_attributes['place'] = $item->recordingDetails->locationDescription;
if (isset($item->recordingDetails->latitude)) {
$video_attributes['geo'] = $item->recordingDetails->latitude . "," . $item->recordingDetails->longitude;
}
$video_attributes['is_protected'] = self::determinePrivacyStatus($item->status->privacyStatus);
$today = date('Y-m-d');
$upload_date = substr($item->snippet->publishedAt, 0, 10);
// Get the favourites added, favourites removed, shares, subscribers gained, subscribers lost
// estimated minuites watched, average view duration, average view percentage
$analytics_fields = array('ids' => 'channel==' . $channel_id, 'start-date' => $upload_date, 'end-date' => $today, 'metrics' => 'favoritesAdded,favoritesRemoved,shares,subscribersGained,subscribersLost,' . 'estimatedMinutesWatched,averageViewDuration,averageViewPercentage,views,likes,dislikes', 'filters' => 'video==' . $video_id);
//.........这里部分代码省略.........
示例10: processStream
//.........这里部分代码省略.........
$comment_id = explode("_", $c->id);
$comment_id = $comment_id[sizeof($comment_id) - 1];
//Get posts
$posts_to_process = array("post_id" => $comment_id, "author_username" => $c->from->name, "author_fullname" => $c->from->name, "author_avatar" => 'https://graph.facebook.com/' . $c->from->id . '/picture', "author_user_id" => $c->from->id, "post_text" => $c->message, "pub_date" => $c->created_time, "in_reply_to_user_id" => $profile->user_id, "in_reply_to_post_id" => $post_id, "source" => '', 'network' => $network, 'is_protected' => $is_protected, 'location' => '');
array_push($thinkup_posts, $posts_to_process);
}
}
$total_added_posts = $total_added_posts + $this->storePostsAndAuthors($thinkup_posts, "Posts stream comments collapsed");
//free up memory
$thinkup_posts = array();
if (isset($comments_stream->paging->next)) {
$api_call = str_replace('\\u00257C', '|', $comments_stream->paging->next);
}
} else {
// no comments (pun intended)
break;
}
} while (isset($comments_stream->paging->next));
}
}
}
//process "likes"
if ($must_process_likes) {
if (isset($p->likes)) {
$likes_captured = 0;
if (isset($p->likes->data)) {
$post_likes = $p->likes->data;
$post_likes_count = isset($post_likes) ? sizeof($post_likes) : 0;
if (is_array($post_likes) && sizeof($post_likes) > 0) {
foreach ($post_likes as $l) {
if (isset($l->name) && isset($l->id)) {
//Get users
$ttu = array("user_name" => $l->name, "full_name" => $l->name, "user_id" => $l->id, "avatar" => 'https://graph.facebook.com/' . $l->id . '/picture', "location" => '', "description" => '', "url" => '', "is_protected" => 1, "follower_count" => 0, "post_count" => 0, "joined" => '', "found_in" => "Likes", "network" => 'facebook');
//Users are always set to network=facebook
array_push($thinkup_users, $ttu);
$fav_to_add = array("favoriter_id" => $l->id, "network" => $network, "author_user_id" => $profile->user_id, "post_id" => $post_id);
array_push($thinkup_likes, $fav_to_add);
$likes_captured = $likes_captured + 1;
}
}
}
}
$total_added_users = $total_added_users + $this->storeUsers($thinkup_users, "Likes");
$total_added_likes = $total_added_likes + $this->storeLikes($thinkup_likes);
//free up memory
$thinkup_users = array();
$thinkup_likes = array();
// collapsed likes
if (isset($p->likes->count) && $p->likes->count > $likes_captured) {
$api_call = 'https://graph.facebook.com/' . $p->from->id . '_' . $post_id . '/likes?access_token=' . $this->access_token;
do {
$likes_stream = FacebookGraphAPIAccessor::rawApiRequest($api_call);
if (isset($likes_stream) && is_array($likes_stream->data)) {
foreach ($likes_stream->data as $l) {
if (isset($l->name) && isset($l->id)) {
//Get users
$ttu = array("user_name" => $l->name, "full_name" => $l->name, "user_id" => $l->id, "avatar" => 'https://graph.facebook.com/' . $l->id . '/picture', "location" => '', "description" => '', "url" => '', "is_protected" => 1, "follower_count" => 0, "post_count" => 0, "joined" => '', "found_in" => "Likes", "network" => 'facebook');
//Users are always set to network=facebook
array_push($thinkup_users, $ttu);
$fav_to_add = array("favoriter_id" => $l->id, "network" => $network, "author_user_id" => $p->from->id, "post_id" => $post_id);
array_push($thinkup_likes, $fav_to_add);
$likes_captured = $likes_captured + 1;
}
}
$total_added_users = $total_added_users + $this->storeUsers($thinkup_users, "Likes");
$total_added_likes = $total_added_likes + $this->storeLikes($thinkup_likes);
//free up memory
$thinkup_users = array();
$thinkup_likes = array();
if (isset($likes_stream->paging->next)) {
$api_call = str_replace('\\u00257C', '|', $likes_stream->paging->next);
}
} else {
// no likes
break;
}
} while (isset($likes_stream->paging->next));
}
}
//free up memory
$thinkup_users = array();
$thinkup_likes = array();
}
}
if ($total_added_posts > 0) {
$this->logger->logUserSuccess("Collected {$total_added_posts} posts", __METHOD__ . ',' . __LINE__);
} else {
$this->logger->logUserInfo("No new posts found.", __METHOD__ . ',' . __LINE__);
}
if ($total_added_users > 0) {
$this->logger->logUserSuccess("Collected {$total_added_users} users", __METHOD__ . ',' . __LINE__);
} else {
$this->logger->logUserInfo("No new users found.", __METHOD__ . ',' . __LINE__);
}
if ($total_added_likes > 0) {
$this->logger->logUserSuccess("Collected {$total_added_likes} likes", __METHOD__ . ',' . __LINE__);
} else {
$this->logger->logUserInfo("No new likes found.", __METHOD__ . ',' . __LINE__);
}
}
示例11: fetchInstanceFavorites
/**
* This method, and the two supporting private methods 'maintFavsFetch' and 'archivingFavsFetch', provide the
* primary crawler functionality for adding the user's favorites to the database.
* For a given user, the process starts in 'archiving mode', by
* working forwards from the last (oldest) page of tweets to the newest. This archiving crawl
* is only done once. The crawler tries to do this all in one go, but if it exhausts the available API count,
* it will continue where it left off in the next run.
* Then, when page 1 is reached in archiving mode, the crawler goes into 'maintenance mode' and works
* backwards from then on. It first pages back until
* it has reached the last fav it previously processed. Then it searches back N more pages to catch any older
* tweets that were fav'd out of chronological order, where N is determined by favs_older_pages option.
* The bookkeeping for these two crawler stages is maintained in the in tu_instances entry for the user.
*
* Recently, the Twitter favorites API has developed some bugs that need to be worked around. The comments below
* provide more detail, but in a nutshell, these methods can not currently use information from Twitter to
* calculate loop termination (so a bit more work may be done than necessary), and do not currently remove un-fav'd
* tweets from the database. Hopefully these API issues will be fixed by Twitter in future.
*/
public function fetchInstanceFavorites() {
// first, check that we have the resources to do work
if (!($this->api->available && $this->api->available_api_calls_for_crawler)) {
$this->logger->logInfo("terminating fetchInstanceFavorites-- no API calls available",
__METHOD__.','.__LINE__);
return true;
}
$status_message = "";
//@TODO Can we get this from API?
$page_size = 20; // number of favs per page retrieved from the API call
$this->logger->logUserInfo("Checking for new favorites.", __METHOD__.','.__LINE__);
$last_favorites_count = $this->instance->favorites_profile;
$this->logger->logInfo("last favs count: $last_favorites_count", __METHOD__.','.__LINE__);
$last_page_fetched_favorites = $this->instance->last_page_fetched_favorites;
$last_fav_id = $this->instance->last_favorite_id;
$curr_favs_count = $this->user->favorites_count;
$this->logger->logInfo("curr favs count: $curr_favs_count", __METHOD__.','.__LINE__);
$last_page_of_favs = round($this->api->archive_limit / $page_size);
if ($last_page_fetched_favorites == "") {
$last_page_fetched_favorites = 0;
}
$this->logger->logInfo("got last_page_fetched_favorites: $last_page_fetched_favorites",
__METHOD__.','.__LINE__);
if ($last_fav_id == "") {
$last_fav_id = 0;
}
// the owner favs count, from twitter, is currently unreliable and may be less than the actual number of
// favs, by a large margin. So, we still go ahead and calculate the number of 'missing' tweets based on
// this info, but currently do not use it for fetch loop termination.
$this->logger->logInfo("owner favs: " . $this->user->favorites_count . ", instance owner favs in system: ".
$this->instance->owner_favs_in_system, __METHOD__.','.__LINE__);
$favs_missing = $this->user->favorites_count - $this->instance->owner_favs_in_system;
$this->logger->logInfo("favs missing: $favs_missing", __METHOD__.','.__LINE__);
// figure out if we're in 'archiving' or 'maintenance' mode, via # of last_page_fetched_favorites
$mode = 0; // default is archving/first-fetch
if ($last_page_fetched_favorites == 1) {
$mode = 1; // we are in maint. mode
$new_favs_to_add = $favs_missing;
$this->logger->logInfo("new favs to add/missing: $new_favs_to_add", __METHOD__.','.__LINE__);
$mpage = 1;
$starting_fav_id = $last_fav_id;
} else {
// we are in archiving mode.
$new_favs_to_add = $curr_favs_count - $last_favorites_count;
$this->logger->logInfo("new favs to add: $new_favs_to_add", __METHOD__.','.__LINE__);
// figure out start page based on where we left off last time, and how many favs added since then
$extra_pages = ceil($new_favs_to_add / $page_size);
$this->logger->logInfo("extra pages: $extra_pages", __METHOD__.','.__LINE__);
$finished_first_fetch = false;
if ($last_page_fetched_favorites == 0) {
// if at initial starting fetch (first time favs ever crawled)
$last_page_fetched_favs_start = $extra_pages + 1;
} else {
$last_page_fetched_favs_start = $last_page_fetched_favorites + $extra_pages;
}
if ($last_page_fetched_favs_start > $last_page_of_favs) {
$last_page_fetched_favs_start = $last_page_of_favs + 1;
}
}
$status_message = "total last favs count: $last_favorites_count" .
", last page fetched: $last_page_fetched_favorites, last fav id: $last_fav_id";
$this->logger->logInfo($status_message, __METHOD__.','.__LINE__);
$this->logger->logInfo("current favs count: $curr_favs_count" .
", new favs to add: $new_favs_to_add, last page of favs: $last_page_of_favs, mode: $mode",
__METHOD__.','.__LINE__);
$continue = true;
$fcount = 0;
$older_favs_smode = false;
$stop_page = 0;
$status_message = "in fetchInstanceFavorites: API available: ".$this->api->available.", avail for crawler: ".
$this->api->available_api_calls_for_crawler;
//.........这里部分代码省略.........