本文整理汇总了Python中request.Request.get_response方法的典型用法代码示例。如果您正苦于以下问题:Python Request.get_response方法的具体用法?Python Request.get_response怎么用?Python Request.get_response使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类request.Request
的用法示例。
在下文中一共展示了Request.get_response方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: friend_pages
# 需要导入模块: from request import Request [as 别名]
# 或者: from request.Request import get_response [as 别名]
def friend_pages(self):
'''Get friend's total page numbers
'''
self.url.format(self.curpage, self.core_uid)
http_request = Request(self.url)
rsp_src = http_request.get_response()
# Empty rsp_src will raise parse error
try:
assert rsp_src
except AssertionError:
print("except AssertionError, "
"http_request.get_response() failed and return nothing, "
"Check your network and cookie, ",
file=error_log)
return
# Parse the page and get friend's total page numbers
soup = BeautifulSoup(rsp_src)
text = str(soup.findAll("a", attrs={"title": unicode("最后页", "utf-8")}))
pattern = "curpage=[0-9]+"
r = re.search(pattern, text)
#
try:
result = int(text[r.start() + 8: r.end()])
except AttributeError:
with open(r"./error.log", 'a+') as error_log:
print("except AttributeError, re.search fail:", file=error_log)
print("soup: ", soup, file=error_log)
print("pattern: ", pattern, file=error_log)
print("text: ", text, file=error_log)
print("r: ", r, '\n', file=error_log)
return
return result
示例2: len
# 需要导入模块: from request import Request [as 别名]
# 或者: from request.Request import get_response [as 别名]
import sys
from logger import *
from request import Request
if __name__ == "__main__":
if len(sys.argv) != 3:
log_error(IVK_WRONG_NUM_ARGS)
# use the syslog utility instead of stderr
try:
request = Request(sys.argv[2])
response = request.get_response()
except Exception as e:
log_error(ERR_EXCEPTION, e.message)
print response.toJSON(int(sys.argv[1]))
示例3: parse_friends
# 需要导入模块: from request import Request [as 别名]
# 或者: from request.Request import get_response [as 别名]
def parse_friends(self, cur_hop, buffer_circle):
'''Parse the friend list page and get the friends info
'''
pages = self.friend_pages()
# If pages is empty, skip the parse
pages = [pages, 0][pages == None]
for self.curpage in range(0, pages + 1):
self.url = ("http://friend.renren.com/GetFriendList.do?"
"curpage={0}&id={1}"\
.format(self.curpage, self.core_uid))
http_request = Request(self.url)
rsp_src = http_request.get_response()
# !!! For debug
# print(rsp_src)
# with open("./page.html", "wb") as f:
# f.write(rsp_src)
soup = BeautifulSoup(rsp_src)
friends_list_divs = soup.findAll("div", attrs={"class": "info"})
for dl in friends_list_divs:
# Fetch uid as int type
try:
uid = int(dl.dd.a["href"][36:])
# Being string rather than NavigableString, shelve later
name = str(dl.dd.a.string)
network_class = str(dl.findAll("dt")[1].string)
network_name = str(dl.findAll("dd")[1].string)
except AttributeError:
with open(r"./error.log", 'a+') as error_log:
print("except AttributeError, fail to parse dl:",
file=error_log)
userinfo = {}
userinfo["name"] = name
userinfo["network_class"] = network_class
userinfo["network_name"] = network_name
if uid not in buffer_circle:
userinfo["friends"] = set([])
userinfo["hop"] = cur_hop + 1
else:
userinfo["friends"] = buffer_circle[uid]["friends"]
userinfo["hop"] = buffer_circle[uid]["hop"]
buffer_circle[uid] = userinfo
# Add child to parent
buffer_circle[self.core_uid]["friends"].add(uid)
# Add parent to child
buffer_circle[uid]["friends"].add(self.core_uid)
with closing(shelve.open('./circle.db', writeback=True)) as s:
s[str(uid)] = pickle.dumps(userinfo)
#!!! For Debug
print(self.core_uid, end=',')
print(uid, end=',')
print(buffer_circle[uid]["name"], end=',')
print(buffer_circle[uid]["network_class"], end=',')
print(buffer_circle[uid]["network_name"], end=',')
print(buffer_circle[uid]["hop"], end=',')
print(buffer_circle[uid]["friends"].__sizeof__())