本文整理汇总了Python中utils.utils.Utils.remove_file方法的典型用法代码示例。如果您正苦于以下问题:Python Utils.remove_file方法的具体用法?Python Utils.remove_file怎么用?Python Utils.remove_file使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类utils.utils.Utils
的用法示例。
在下文中一共展示了Utils.remove_file方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: _walk_dir
# 需要导入模块: from utils.utils import Utils [as 别名]
# 或者: from utils.utils.Utils import remove_file [as 别名]
def _walk_dir(self, dir, path_out):
try:
for root, dirs, files in os.walk(dir):
for file in files:
file.replace('$', '\$')
if Utils.is_zip(root, file) or \
Utils.is_rar(root, file) or \
Utils.is_tar(root, file):
self._extract_file(root, file)
Utils.remove_file(root, file)
else:
if Utils.is_apk(root, file):
Utils.rename_file(root, path_out, file)
else:
Utils.remove_file(root, file)
except OSError, e:
logging.error("Error walking dir '%s': %s" % (dir, e))
raise OSError
示例2: _download_phishtank
# 需要导入模块: from utils.utils import Utils [as 别名]
# 或者: from utils.utils.Utils import remove_file [as 别名]
def _download_phishtank(self, path_in, user_agent, url_feed):
logging.debug("Downloading PhishTank feeds...")
try:
request = urllib2.Request(url_feed)
opener = urllib2.build_opener()
opener.addheaders = [('User-agent', user_agent)]
handler = opener.open(request)
data = handler.read()
with open(os.path.join(path_in, 'phishtank.gz'), "wb") as file_gz:
file_gz.write(data)
file_gz = gzip.GzipFile(os.path.join(path_in, 'phishtank.gz'), 'rb')
data = file_gz.read()
file_gz.close()
Utils.remove_file(self._cfg['dir_in'], 'phishtank.gz')
file_txt = file(os.path.join(path_in, 'phishtank.txt'), 'wb')
file_txt.write(data)
file_txt.close()
except Exception, e:
logging.error("Error downloading PhishTank feed '%s': %s" % (url_feed, e))
raise Exception
示例3: _download_cleanmx
# 需要导入模块: from utils.utils import Utils [as 别名]
# 或者: from utils.utils.Utils import remove_file [as 别名]
def _download_cleanmx(self, path_in, user_agent, url_feed):
logging.debug("Downloading CleanMX feeds...")
try:
request = urllib2.Request(url_feed)
opener = urllib2.build_opener()
opener.addheaders = [('User-agent', user_agent)]
handler = opener.open(request)
data = handler.read()
with open(os.path.join(path_in, 'cleanmx.xml'), "wb") as file_xml:
file_xml.write(data)
file_xml = open(os.path.join(path_in, 'cleanmx.xml'), 'r')
data = file_xml.read()
file_xml.close()
Utils.remove_file(self._cfg['dir_in'], 'cleanmx.xml')
file_txt = file(os.path.join(path_in, 'cleanmx.txt'), 'w')
file_txt.write(json.dumps(xmltodict.parse(data)))
file_txt.close()
except Exception, e:
logging.error("Error downloading CleanMX feed '%s': %s" % (url_feed, e))
raise Exception