本文整理汇总了Python中lib.cuckoo.core.database.Database类的典型用法代码示例。如果您正苦于以下问题:Python Database类的具体用法?Python Database怎么用?Python Database使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了Database类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: autoprocess
def autoprocess(parallel=1):
maxcount = cfg.cuckoo.max_analysis_count
count = 0
db = Database()
pool = multiprocessing.Pool(parallel)
pending_results = []
# CAUTION - big ugly loop ahead.
while count < maxcount or not maxcount:
# Pending_results maintenance.
for ar, tid, target, copy_path in list(pending_results):
if ar.ready():
if ar.successful():
log.info("Task #%d: reports generation completed", tid)
else:
try:
ar.get()
except:
log.exception("Exception when processing task ID %u.", tid)
db.set_status(tid, TASK_FAILED_PROCESSING)
pending_results.remove((ar, tid, target, copy_path))
# If still full, don't add more (necessary despite pool).
if len(pending_results) >= parallel:
time.sleep(1)
continue
# If we're here, getting parallel tasks should at least
# have one we don't know.
tasks = db.list_tasks(status=TASK_COMPLETED, limit=parallel,
order_by="completed_on asc")
# For loop to add only one, nice.
for task in tasks:
# Not-so-efficient lock.
if task.id in [tid for ar, tid, target, copy_path
in pending_results]:
continue
log.info("Processing analysis data for Task #%d", task.id)
sample = db.view_sample(task.sample_id)
copy_path = os.path.join(CUCKOO_ROOT, "storage",
"binaries", sample.sha256)
args = task.id, task.target, copy_path
kwargs = dict(report=True, auto=True)
result = pool.apply_async(process, args, kwargs)
pending_results.append((result, task.id, task.target, copy_path))
count += 1
break
# If there wasn't anything to add, sleep tight.
if not tasks:
time.sleep(5)
示例2: main
def main():
parser = argparse.ArgumentParser()
parser.add_argument("path", type=str, help="Path to the file to analyze")
parser.add_argument("--package", type=str, action="store", default="", help="Specify an analysis package", required=False)
parser.add_argument("--custom", type=str, action="store", default="", help="Specify any custom value", required=False)
parser.add_argument("--timeout", type=int, action="store", default=0, help="Specify an analysis timeout", required=False)
parser.add_argument("--options", type=str, action="store", default="", help="Specify options for the analysis package (e.g. \"name=value,name2=value2\")", required=False)
parser.add_argument("--priority", type=int, action="store", default=1, help="Specify a priority for the analysis represented by an integer", required=False)
parser.add_argument("--machine", type=str, action="store", default="", help="Specify the identifier of a machine you want to use", required=False)
parser.add_argument("--platform", type=str, action="store", default="", help="Specify the operating system platform you want to use (windows/darwin/linux)", required=False)
try:
args = parser.parse_args()
except IOError as e:
parser.error(e)
return False
if not os.path.exists(args.path):
print("ERROR: the specified file does not exist at path \"%s\"" % args.path)
return False
db = Database()
task_id = db.add(file_path=args.path,
md5=File(args.path).get_md5(),
package=args.package,
timeout=args.timeout,
options=args.options,
priority=args.priority,
machine=args.machine,
platform=args.platform,
custom=args.custom)
print("SUCCESS: Task added with id %d" % task_id)
示例3: analysis_view
def analysis_view(a_id):
db = Database()
context = {}
rows = db.get_analysis(a_id)
template = lookup.get_template("browse.html")
context["cuckoo_root"] = CUCKOO_ROOT
return template.render(os=os, rows=rows, **context)
示例4: cuckoo_clean_failed_tasks
def cuckoo_clean_failed_tasks():
"""Clean up failed tasks
It deletes all stored data from file system and configured databases (SQL
and MongoDB for failed tasks.
"""
# Init logging.
# This need to init a console logger handler, because the standard
# logger (init_logging()) logs to a file which will be deleted.
create_structure()
init_console_logging()
# Initialize the database connection.
db = Database()
# Check if MongoDB reporting is enabled and drop that if it is.
cfg = Config("reporting")
if cfg.mongodb and cfg.mongodb.enabled:
from pymongo import MongoClient
host = cfg.mongodb.get("host", "127.0.0.1")
port = cfg.mongodb.get("port", 27017)
mdb = cfg.mongodb.get("db", "cuckoo")
try:
results_db = MongoClient(host, port)[mdb]
except:
log.warning("Unable to connect to MongoDB database: %s", mdb)
return
failed_tasks_a = db.list_tasks(status=TASK_FAILED_ANALYSIS)
failed_tasks_p = db.list_tasks(status=TASK_FAILED_PROCESSING)
failed_tasks_r = db.list_tasks(status=TASK_FAILED_REPORTING)
failed_tasks_rc = db.list_tasks(status=TASK_RECOVERED)
for e in failed_tasks_a,failed_tasks_p,failed_tasks_r,failed_tasks_rc:
for el2 in e:
new = el2.to_dict()
remove_task(new["id"])
示例5: main
def main():
parser = argparse.ArgumentParser()
parser.add_argument("vmname", type=str, help="Name of the Virtual Machine.")
parser.add_argument("--debug", action="store_true", help="Debug log in case of errors.")
parser.add_argument("--add", action="store_true", help="Add a Virtual Machine.")
parser.add_argument("--ip", type=str, help="Static IP Address.")
parser.add_argument("--platform", type=str, default="windows", help="Guest Operating System.")
parser.add_argument("--tags", type=str, help="Tags for this Virtual Machine.")
parser.add_argument("--interface", type=str, help="Sniffer interface for this machine.")
parser.add_argument("--snapshot", type=str, help="Specific Virtual Machine Snapshot to use.")
parser.add_argument("--resultserver", type=str, help="IP:Port of the Result Server.")
args = parser.parse_args()
logging.basicConfig()
log = logging.getLogger()
if args.debug:
log.setLevel(logging.DEBUG)
db = Database()
if args.resultserver:
resultserver_ip, resultserver_port = args.resultserver.split(":")
else:
conf = Config()
resultserver_ip = conf.resultserver.ip
resultserver_port = conf.resultserver.port
if args.add:
db.add_machine(args.vmname, args.vmname, args.ip, args.platform,
args.tags, args.interface, args.snapshot,
resultserver_ip, int(resultserver_port))
db.unlock_machine(args.vmname)
update_conf(conf.cuckoo.machinery, args)
示例6: start
def start(request, task_id):
db = Database()
db.start_task(task_id)
return render_to_response("success.html",
{"message": "Task scheduled for NOW, thanks for all the fish."},
context_instance=RequestContext(request))
示例7: attempt_to_start_analysis
def attempt_to_start_analysis(binary):
print 'starting ana'
db = Database()
tasks = db.list_tasks()
filename = ntpath.basename(binary.file_path)
output = ntpath.join('/tmp/', filename)
for task in tasks:
if task.to_dict()['target'] == output:
return
else:
with open(output, "wb") as handle:
handle.write(binary.data)
task_id = db.add_path(file_path=output,
package="",
timeout=120,
options="",
priority=1,
machine="",
custom="",
memory=False,
enforce_timeout=False,
tags=None)
if not task_id:
print 'asd'
err = "Failed adding sandbox analysis for %s" % filename
raise Exception(err)
示例8: init_config
def init_config(override=True):
"""Read configuration from the configuration files and update each entry
in the database."""
db = Database()
log.debug("Initializing configuration..")
config = db.config_all()
if not override and config:
return
for fname in os.listdir(os.path.join(CUCKOO_ROOT, "conf")):
basename, ext = os.path.splitext(fname)
if ext != ".conf":
continue
cfg = Config(basename)
for section, values in cfg.sections.items():
for key, value in values.items():
attr = "%s.%s.%s" % (basename, section, key)
if attr in config and config[attr] == value:
continue
log.debug("Updating configuration %s to '%s' (from '%s')",
attr, value, config.get(attr, ''))
db.config_set(attr, value)
示例9: store_and_submit_fileobj
def store_and_submit_fileobj(fobj, filename, package="", options="", timeout=0, priority=1, machine="", platform=""):
# Do everything in tmppath/TMPSUBDIR
tmppath = tempfile.gettempdir()
targetpath = os.path.join(tmppath, TMPSUBDIR)
if not os.path.exists(targetpath): os.mkdir(targetpath)
# Upload will be stored in a tmpdir with the original name
tmpdir = tempfile.mkdtemp(prefix="upload_", dir=targetpath)
tmpf = open(os.path.join(tmpdir, filename), "wb")
t = fobj.read(BUFSIZE)
# While reading from client also compute md5hash
md5h = hashlib.md5()
while t:
md5h.update(t)
tmpf.write(t)
t = fobj.read(BUFSIZE)
tmpf.close()
# Submit task to cuckoo db
db = Database()
task_id = db.add(file_path=tmpf.name,
md5=md5h.hexdigest(),
package=package,
timeout=timeout,
options=options,
priority=priority,
machine=machine,
platform=platform)
return task_id
示例10: main
def main():
parser = argparse.ArgumentParser()
parser.add_argument("id", type=str, help="ID of the analysis to process (auto for continuous processing of unprocessed tasks).")
parser.add_argument("-d", "--debug", help="Display debug messages", action="store_true", required=False)
parser.add_argument("-r", "--report", help="Re-generate report", action="store_true", required=False)
parser.add_argument("-p", "--parallel", help="Number of parallel threads to use (auto mode only).", type=int, required=False, default=1)
parser.add_argument("-u", "--user", type=str, help="Drop user privileges to this user")
parser.add_argument("-m", "--modules", help="Path to signature and reporting modules - overrides default modules path.", type=str, required=False)
args = parser.parse_args()
if args.user:
drop_privileges(args.user)
if args.debug:
log.setLevel(logging.DEBUG)
if args.modules:
sys.path.insert(0, args.modules)
init_modules(machinery=False)
if args.id == "auto":
autoprocess(parallel=args.parallel)
else:
task = Database().view_task(int(args.id))
if not task:
process(task={"id": int(args.id), "category": "file", "target": ""}, report=args.report)
else:
process(task=task.to_dict(), report=args.report)
示例11: main
def main():
"""main function for standalone usage"""
usage = "usage: %prog [options]"
parser = OptionParser(usage=usage)
parser.add_option('-a', '--archive-toplevel-dir', default='/mnt/cuckoo_archive',
help='Archive top-level directory [default: %default]')
parser.add_option('-m', '--local-machine-dir', default=socket.gethostname(),
help='Machine-specific directory [default: $HOST]')
(options, args) = parser.parse_args()
if len(args) != 0:
parser.print_help()
return 2
# do stuff
archive_dir = os.path.join(options.archive_toplevel_dir, options.local_machine_dir)
try:
os.mkdir(archive_dir)
except OSError: # already exists
pass
db = Database()
for task in db.list_tasks(status=TASK_REPORTED):
task_path_src = _analysis_dir(task.id)
if not os.path.islink(task_path_src):
task_path_dst = os.path.join(archive_dir, str(task.id))
move(task_path_src, task_path_dst)
os.symlink(task_path_dst, task_path_src)
print(bold(green('Successfully')) + ' archived %s' % task_path_dst)
示例12: show_reports
def show_reports(request,binary_sha1):
db = Database()
tasks_files = db.list_tasks_by_binary( binary_sha1, limit=50, category="file" )
analyses_files = []
if tasks_files:
for tup in tasks_files:
sample = tup[0]
task = tup[1]
new = task.to_dict()
#new["sample"] = db.view_sample(new["sample_id"]).to_dict()
new["sample"] = sample.to_dict()
if db.view_errors(task.id):
new["errors"] = True
# obtain station and file name with target
filepath = new["target"]
filedata = filepath.split('/')
new["file"] = filedata[3] if len(filedata) > 3 else filedata[2]
new["station"] = filedata[2] if len(filedata) > 3 else ""
analyses_files.append(new)
return render_to_response("analysis/show_reports.html",
{"files": analyses_files, "urls": None},
context_instance=RequestContext(request))
示例13: remove_pending
def remove_pending(request):
db = Database()
tasks = db.list_tasks(status=TASK_PENDING)
for task in tasks:
db.delete_task(task.id)
return redirect("analysis.views.pending")
示例14: setUp
class TestDatabase:
def setUp(self):
self.d = Database(dsn="sqlite://")
def test_machine_add_clean(self):
# Add.
self.d.add_machine("a", "a", "1.1.1.1", "win", "", "", "", "", "")
session = self.d.Session()
assert_equal(session.query(Machine).count(), 1)
# Delete.
self.d.clean_machines()
assert_equal(session.query(Machine).count(), 0)
def test_task_add_del(self):
# Add.
sample_path = tempfile.mkstemp()[1]
self.d.add_path(sample_path)
session = self.d.Session()
assert_equal(session.query(Sample).count(), 1)
assert_equal(session.query(Task).count(), 1)
# Drop tasks.
self.d.drop_tasks()
assert_equal(session.query(Task).count(), 0)
assert_equal(session.query(Sample).count(), 1)
# Drop samples.
self.d.drop_samples()
assert_equal(session.query(Sample).count(), 0)
示例15: cuckoo_clean
def cuckoo_clean():
"""Clean up cuckoo setup.
It deletes logs, all stored data from file system and configured databases (SQL
and MongoDB.
"""
# Init logging.
# This need to init a console logger handler, because the standard
# logger (init_logging()) logs to a file which will be deleted.
create_structure()
init_console_logging()
# Initialize the database connection.
db = Database()
# Drop all tables.
db.drop()
# Check if MongoDB reporting is enabled and drop that if it is.
cfg = Config("reporting")
if cfg.mongodb and cfg.mongodb.enabled:
from pymongo import MongoClient
host = cfg.mongodb.get("host", "127.0.0.1")
port = cfg.mongodb.get("port", 27017)
mdb = cfg.mongodb.get("db", "cuckoo")
try:
conn = MongoClient(host, port)
conn.drop_database(mdb)
conn.disconnect()
except:
log.warning("Unable to drop MongoDB database: %s", mdb)
# Paths to clean.
paths = [
os.path.join(CUCKOO_ROOT, "db"),
os.path.join(CUCKOO_ROOT, "log"),
os.path.join(CUCKOO_ROOT, "storage"),
]
# Delete various directories.
for path in paths:
if os.path.isdir(path):
try:
shutil.rmtree(path)
except (IOError, OSError) as e:
log.warning("Error removing directory %s: %s", path, e)
# Delete all compiled Python objects ("*.pyc").
for dirpath, dirnames, filenames in os.walk(CUCKOO_ROOT):
for fname in filenames:
if not fname.endswith(".pyc"):
continue
path = os.path.join(CUCKOO_ROOT, dirpath, fname)
try:
os.unlink(path)
except (IOError, OSError) as e:
log.warning("Error removing file %s: %s", path, e)