本文整理汇总了Python中mozprofile.FirefoxProfile.cleanup方法的典型用法代码示例。如果您正苦于以下问题:Python FirefoxProfile.cleanup方法的具体用法?Python FirefoxProfile.cleanup怎么用?Python FirefoxProfile.cleanup使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类mozprofile.FirefoxProfile
的用法示例。
在下文中一共展示了FirefoxProfile.cleanup方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: run
# 需要导入模块: from mozprofile import FirefoxProfile [as 别名]
# 或者: from mozprofile.FirefoxProfile import cleanup [as 别名]
def run():
parser = argparse.ArgumentParser(description="Run crawler")
parser.add_argument("-b", "--binary", type=str, help="path to the Firefox binary")
parser.add_argument("-a", "--abpdir", type=str, help="path to the Adblock Plus repository")
parser.add_argument(
"-f",
"--filters",
metavar="url",
type=str,
nargs="+",
default=[
"https://easylist-downloads.adblockplus.org/easylist.txt",
"https://easylist-downloads.adblockplus.org/exceptionrules.txt",
],
help="filter lists to install in Adblock Plus. The arguments can also have the format path=url, the data will be read from the specified path then.",
)
parser.add_argument("-t", "--timeout", type=int, default=300, help="Load timeout (seconds)")
parser.add_argument("-x", "--maxtabs", type=int, default=15, help="Maximal number of tabs to open in parallel")
parser.add_argument("list", type=str, help="URL list to process")
parser.add_argument("outdir", type=str, help="directory to write data into")
parameters = parser.parse_args()
import buildtools.packagerGecko as packager
cleanup = []
try:
base_dir = os.path.dirname(os.path.abspath(__file__))
handle, crawlerxpi = tempfile.mkstemp(suffix=".xpi")
os.close(handle)
cleanup.append(crawlerxpi)
packager.createBuild(base_dir, outFile=crawlerxpi, releaseBuild=True)
abpxpi = "https://addons.mozilla.org/firefox/downloads/latest/1865/addon-1865-latest.xpi"
if parameters.abpdir:
handle, abpxpi = tempfile.mkstemp(suffix=".xpi")
os.close(handle)
cleanup.append(abpxpi)
packager.createBuild(parameters.abpdir, outFile=abpxpi, releaseBuild=True)
profile = FirefoxProfile(
addons=[crawlerxpi, abpxpi],
preferences={
"browser.startup.homepage": "about:blank",
"browser.tabs.warnOnCloseOtherTabs": False,
"browser.uitour.enabled": False,
"prompts.tab_modal.enabled": False,
"startup.homepage_welcome_url": "about:blank",
"startup.homepage_welcome_url.additional": "about:blank",
"xpinstall.signatures.required": False,
},
)
abpsettings = os.path.join(profile.profile, "adblockplus")
os.makedirs(abpsettings)
with open(os.path.join(abpsettings, "patterns.ini"), "w") as handle:
print >> handle, "# Adblock Plus preferences"
print >> handle, "version=4"
for url in parameters.filters:
if "=" in url:
path, url = url.split("=", 1)
with open(path, "r") as source:
data = source.read()
else:
data = urllib.urlopen(url).read()
print >> handle, "[Subscription]"
print >> handle, "url=%s" % url
print >> handle, "[Subscription filters]"
print >> handle, "\n".join(data.splitlines()[1:])
finally:
for path in cleanup:
os.unlink(path)
server = None
try:
port = random.randrange(2000, 60000)
print "Communicating with client on port %i" % port
app = CrawlerApp(parameters)
server = make_server("localhost", port, app)
app.server = server
threading.Thread(target=lambda: server.serve_forever()).start()
runner = FirefoxRunner(
profile=profile,
binary=parameters.binary,
cmdargs=["--crawler-port", str(port)],
env=dict(os.environ, MOZ_CRASHREPORTER_DISABLE="1"),
)
while app.urls:
runner.start()
runner.wait()
finally:
if server:
server.shutdown()
profile.cleanup()
示例2: run
# 需要导入模块: from mozprofile import FirefoxProfile [as 别名]
# 或者: from mozprofile.FirefoxProfile import cleanup [as 别名]
def run():
parser = argparse.ArgumentParser(description='Run crawler')
parser.add_argument(
'-b', '--binary', type=str,
help='path to the Firefox binary'
)
parser.add_argument(
'-a', '--abpdir', type=str,
help='path to the Adblock Plus repository'
)
parser.add_argument(
'-f', '--filters', metavar='url', type=str, nargs='+',
default=["https://easylist-downloads.adblockplus.org/easylist.txt", "https://easylist-downloads.adblockplus.org/exceptionrules.txt"],
help='filter lists to install in Adblock Plus. The arguments can also have the format path=url, the data will be read from the specified path then.'
)
parser.add_argument(
'-t', '--timeout', type=int, default=300,
help='Load timeout (seconds)'
)
parser.add_argument(
'-x', '--maxtabs', type=int, default=15,
help='Maximal number of tabs to open in parallel'
)
parser.add_argument(
'list', type=str,
help='URL list to process'
)
parser.add_argument(
'outdir', type=str,
help='directory to write data into'
)
parameters = parser.parse_args()
import buildtools.packagerGecko as packager
cleanup = []
try:
base_dir = os.path.dirname(__file__)
handle, crawlerxpi = tempfile.mkstemp(suffix='.xpi')
os.close(handle)
cleanup.append(crawlerxpi)
packager.createBuild(base_dir, outFile=crawlerxpi, releaseBuild=True)
abpxpi = 'https://addons.mozilla.org/firefox/downloads/latest/1865/addon-1865-latest.xpi'
if parameters.abpdir:
handle, abpxpi = tempfile.mkstemp(suffix='.xpi')
os.close(handle)
cleanup.append(abpxpi)
packager.createBuild(parameters.abpdir, outFile=abpxpi, releaseBuild=True)
profile = FirefoxProfile(
addons=[
crawlerxpi,
abpxpi,
],
preferences={
'browser.uitour.enabled': False,
'prompts.tab_modal.enabled': False,
}
)
abpsettings = os.path.join(profile.profile, 'adblockplus')
os.makedirs(abpsettings)
with open(os.path.join(abpsettings, 'patterns.ini'), 'w') as handle:
print >>handle, '# Adblock Plus preferences'
print >>handle, 'version=4'
for url in parameters.filters:
if '=' in url:
path, url = url.split('=', 1)
with open(path, 'r') as source:
data = source.read()
else:
data = urllib.urlopen(url).read()
print >>handle, '[Subscription]'
print >>handle, 'url=%s' % url
print >>handle, '[Subscription filters]'
print >>handle, '\n'.join(data.splitlines()[1:])
finally:
for path in cleanup:
os.unlink(path)
server = None
try:
port = random.randrange(2000, 60000)
print "Communicating with client on port %i" % port
app = CrawlerApp(parameters)
server = make_server('localhost', port, app)
app.server = server
threading.Thread(target=lambda: server.serve_forever()).start()
runner = FirefoxRunner(
profile=profile,
binary=parameters.binary,
cmdargs=['--crawler-port', str(port)],
env=dict(os.environ, MOZ_CRASHREPORTER_DISABLE='1'),
)
while app.urls:
runner.start()
runner.wait()
finally:
#.........这里部分代码省略.........