本文整理汇总了Python中splunklib.searchcommands.dispatch函数的典型用法代码示例。如果您正苦于以下问题:Python dispatch函数的具体用法?Python dispatch怎么用?Python dispatch使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了dispatch函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_generating_command_as_unit
def test_generating_command_as_unit(self):
simulate_path = get_searchcommand_example("simulate.py")
self.assertTrue(os.path.isfile(simulate_path))
# Copy population.csv to the $SPLUNK_HOME/var/run/splunk/ directory
population_file = os.path.join(os.path.dirname(simulate_path),
"population.csv")
shutil.copy(population_file, validators.File._var_run_splunk)
# load the SimulateCommand class from simulate.py
simulate = imp.load_source('searchcommands_app', simulate_path)
instream = StringIO()
outstream = StringIO()
cli_args = [
"simulate.py",
"__GETINFO__",
"duration=00:00:10",
"csv=population.csv",
"rate=1",
"interval=00:00:01"]
# Run the process
dispatch(simulate.SimulateCommand, cli_args, instream, outstream,
"__main__")
expected_info_path = os.path.join(os.path.dirname(__file__), 'data/_expected_results/test_generating_command_in_isolation.getinfo.csv')
self.assertEqual(io.open(os.path.abspath(expected_info_path), newline='').read(), outstream.getvalue())
instream = StringIO()
outstream = StringIO()
cli_args = [
"simulate.py",
"__EXECUTE__",
"duration=00:00:10",
"csv=population.csv",
"rate=1",
"interval=00:00:01"]
# Run the process
dispatch(simulate.SimulateCommand, cli_args, instream, outstream,
"__main__")
rows = outstream.getvalue().split("\r\n")[1:-1]
found_fields = rows[0].split(",")
expected_fields = [
'_time',
'_serial',
'text',
'__mv__time',
'__mv__serial',
'__mv_text',
]
self.assertEqual(len(expected_fields), len(found_fields))
self.assertEqual(expected_fields, found_fields)
# did we get the field names and at least 2 events?
self.assertTrue(3 < len(rows))
return
示例2: test_helloworld_generating_command_as_unit
def test_helloworld_generating_command_as_unit(self):
helloworld_path = get_searchcommand_example("generatehello.py")
self.assertTrue(os.path.isfile(helloworld_path))
helloworld = imp.load_source('searchcommands_app', helloworld_path)
instream = StringIO()
outstream = StringIO()
cli_args = [
"generatehello.py",
"__GETINFO__",
"count=5",
]
# Run the process
dispatch(helloworld.GenerateHelloCommand, cli_args, instream, outstream,
"__main__")
expected_info_path = os.path.join(os.path.dirname(__file__), 'data/_expected_results/test_generating_command_in_isolation.getinfo.csv')
self.assertEqual(io.open(os.path.abspath(expected_info_path), newline='').read(), outstream.getvalue())
# Overwrite the existing StringIO objects
instream = StringIO()
outstream = StringIO()
cli_args = [
"generatehello.py",
"__EXECUTE__",
"count=5",
]
# Run the process
dispatch(helloworld.GenerateHelloCommand, cli_args, instream, outstream,
"__main__")
# Trim the blank lines at either end of the list
rows = outstream.getvalue().split("\r\n")[1:-1]
found_fields = rows[0].split(",")
expected_fields = [
'_time',
'event_no',
'_raw',
'__mv__time',
'__mv_event_no',
'__mv__raw',
]
self.assertEqual(len(expected_fields), len(found_fields))
self.assertEqual(expected_fields, found_fields)
# Trim the field names
events = rows[1:]
self.assertEqual(5, len(events))
for i in range(1, len(events)):
event = events[i].split(",")
self.assertEqual(i + 1, int(event[1]))
self.assertEqual(i + 1, int(event[2][-1]))
return
示例3: len
event_id = hashlib.md5(incident[0]['incident_id'] + now).hexdigest()
event = 'time="%s" severity=INFO origin="ModifyIncidentsCommand" event_id="%s" user="%s" action="change" incident_id="%s" %s="%s" previous_%s="%s"' % (now, event_id, user, incident[0]['incident_id'], key, attrs[key], key, incident[0][key])
input.submit(event, hostname = socket.gethostname(), sourcetype = 'incident_change', source = 'modifyincidents.py', index = self.config['index'])
incident[0][key] = attrs[key]
if len(changed_keys) > 0:
uri = '/servicesNS/nobody/alert_manager/storage/collections/data/incidents/' + incident[0]['_key']
del incident[0]['_key']
contentsStr = json.dumps(incident[0])
serverResponse, serverContent = rest.simpleRequest(uri, sessionKey=sessionKey, jsonargs=contentsStr)
if self.comment:
self.comment = self.comment.replace('\n', '<br />').replace('\r', '')
event_id = hashlib.md5(incident[0]['incident_id'] + now).hexdigest()
event = 'time="%s" severity=INFO origin="ModifyIncidentsCommand" event_id="%s" user="%s" action="comment" incident_id="%s" comment="%s"' % (now, event_id, user, incident[0]['incident_id'], self.comment)
event = event.encode('utf8')
input.submit(event, hostname = socket.gethostname(), sourcetype = 'incident_change', source = 'modifyincidents.py', index = self.config['index'])
else:
self.logger.warn("No attributes to modify found, aborting.")
else:
self.logger.warn("No incident_id field found in event, aborting.")
yield record
dispatch(ModifyIncidentsCommand, sys.argv, sys.stdin, sys.stdout, __name__)
示例4: __dir__
textmodel.save(args['model'].replace(".pkl",".%s" % args['textmodel']))
elif args['textmodel'] == 'hashing':
model['text'] = args['textmodel']
joblib.dump(model, args['model'])
print >> sys.stderr, "END"
channel.send({
'model': args['model'],
'score': score.item() if score else None,
'training_size': X_train.shape[0],
'test_size': X_test.shape[0]
})
"""
def __dir__(self):
return ["reset", "model", "textmodel", "test_size", "target", "_C"]
@Configuration(clear_required_fields=True)
def map(self, records):
try:
for record in records:
yield record
except:
yield {}
dispatch(MCTrain, sys.argv, sys.stdin, sys.stdout, __name__)
示例5: Option
doc=''' Temp Field ''', default='SLP')
field = Option(
doc=''' field to put value in ''',
default='psi')
def stream(self, events):
"""
:param events:
:return:
"""
for event in events:
self.logger.debug("start")
try:
p = float(event[self.SLP])
event[self.field] = round( 0.491154 * p ,2)
except:
event[self.field] = "N/A"
yield event
dispatch(Psi, sys.argv, sys.stdin, sys.stdout, __name__)
示例6: Option
doc=''' Temp Field ''', default='SLP')
field = Option(
doc=''' field to put value in ''',
default='kilopascals')
def stream(self, events):
"""
:param events:
:return:
"""
for event in events:
self.logger.debug("start")
try:
p = float(event[self.SLP])
event[self.field] = round( 33.8639 * ( p /10) ,2)
except:
event[self.field] = "N/A"
yield event
dispatch(Kilopascals, sys.argv, sys.stdin, sys.stdout, __name__)
示例7: processDate
def processDate(self, event, field):
try:
timestamp = float(event[field])
value = repr(datetime.date.fromtimestamp(timestamp))
return eval("humanize." + self.command + "(" + value + ")")
except ValueError:
pass
def processTime(self, event, field):
try:
timestamp = float(event[field])
value = repr(datetime.datetime.fromtimestamp(timestamp))
return eval("humanize." + self.command + "(" + value + ")")
except ValueError:
pass
def stream(self, events):
self.logger.debug('HumanizeCommand: {}\n {}'.format(self, self.command)) # logs command line
for event in events:
for field in self.fieldnames:
if self.command in ['naturalday', 'naturaldate'] and field in event and len(event[field]) > 0:
event[field] = self.processDate(event, field)
elif self.command == 'naturaltime' and field in event and len(event[field]) > 0:
event[field] = self.processTime(event, field)
elif field in event and len(event[field]) > 0:
event[field] = eval("humanize." + self.command + "(" + event[field] + ")")
yield event
dispatch(HumanizeCommand, sys.argv, sys.stdin, sys.stdout, __name__)
示例8: stream
default='new_wind_chill')
def stream(self, events):
"""
:param events:
:return:
"""
for event in events:
self.logger.debug("start")
try:
temp = float(event[self.Temp])
RH = float(event[self.Relh])
wind = float(event[self.Winds])
old_chil = round((0.0817*(3.71*(pow(wind, 0.5)) + 5.81-0.25*wind)*(temp-91.4)+91.4),2)
new_chil = round(((35.74+0.6215*temp-35.75*pow(wind,0.16)+0.4275*temp*pow(wind,0.16))),2)
event[self.field] = round(old_chil,2)
event[self.field2] = round(new_chil,2)
except:
event[self.field] = "N/A"
event[self.field2] = "N/A"
yield event
dispatch(WindChill, sys.argv, sys.stdin, sys.stdout, __name__)
示例9: vincenty
current[relative_distance] = vincenty(last_pos, current_pos, miles=bool(self.miles))
position_tracker[current[self.group_by]] = current_pos
yield current
else:
last_pos = None
for event in events:
current = event
if not (current[latitude] or current[longitude]):
current[relative_distance] = 0.0
self.logger.debug(
"[%s] - Using distance=0 for private IPs or unknown coordinates. Exclude if undesired." % str(
self.metadata.searchinfo.sid))
else:
current_pos = (float(current[latitude]), float(current[longitude]))
if last_pos is None:
current[relative_distance] = 0.0
self.logger.debug("[%s] - Initializing the first location with distance=0" % str(
self.metadata.searchinfo.sid))
else:
if use_haversine:
current[relative_distance] = haversine(last_pos, current_pos, miles=bool(self.miles))
else:
current[relative_distance] = vincenty(last_pos, current_pos, miles=bool(self.miles))
last_pos = current_pos
self.logger.debug(current)
yield current
self.logger.info("[%s] - Completed successfully." % str(self.metadata.searchinfo.sid))
dispatch(GeoDistanceCommand, sys.argv, sys.stdin, sys.stdout, __name__)
示例10: enumerate
X = X.toarray()
y_pred = None
mah = None
clf = classifiers.get(classifier)
if clf:
try:
clf.fit(X)
y = clf.decision_function(X).ravel()
threshold = stats.scoreatpercentile(y, 100 * fraction)
y_pred = y > threshold
if classifier == 'covariance_estimator' and args['showmah']:
mah = clf.mahalanobis(X)
except ValueError:
y_pred = np.zeros((X.shape[0]))
for i, y in enumerate(y_pred):
if y:
record = records[i]
if mah is not None:
record['mahalanobis'] = mah[i].item()
channel.send(record)
else:
channel.send({ "error": "Incorrect classifier specified %s" % classifier })
"""
def __dir__(self):
return ['threshold','kernel','degree','gamma','coef0','support_fraction','showmah','classifier']
dispatch(Outliers, sys.argv, sys.stdin, sys.stdout, __name__)
示例11: SearchTableCommand
import logging, os, splunk
from splunklib.searchcommands import \
dispatch, StreamingCommand, Configuration, Option, validators
@Configuration()
class SearchTableCommand(StreamingCommand):
pattern = Option(
doc='''
**Syntax:** **pattern=***<regular-expression>*
**Description:** Regular expression pattern to match''',
require=False, validate=validators.RegularExpression())
def stream(self, records):
#pydevd.settrace()
self.logger.setLevel(logging.DEBUG)
self.logger.debug('SearchTableCommand: %s' % self) # logs command line
for record in records:
found = "false"
for field in record:
matches = len(list(self.pattern.finditer(str(record[field]))))
if matches > 0:
found = "true"
if found == "true":
yield record
self.logger.debug('SearchTableCommand: Done')
dispatch(SearchTableCommand, sys.argv, sys.stdin, sys.stdout, __name__)
示例12: str
#print 'Row:', curr_row
curr_cell = -1
_raw = ""
line = {}
while curr_cell < num_cells:
if curr_row > 0:
curr_cell += 1
# Cell Types: 0=Empty, 1=Text, 2=Number, 3=Date, 4=Boolean, 5=Error, 6=Blank
cell_type = worksheet.cell_type(curr_row, curr_cell)
cell_value = worksheet.cell_value(curr_row, curr_cell)
head_value = worksheet.cell_value(0, curr_cell)
#print ' ', cell_type, ':', cell_value
_raw = _raw + '"' + str(head_value) + '"="' + str(cell_value) +'" '
line[str(head_value)] = str(cell_value)
else:
curr_cell += 1
if curr_row > 1:
line["_time"] = time.time()
line["line_number"] = i
line["_raw"] = _raw
line["worksheet_name"] = worksheet_name
yield line
i=i+1
dispatch(WGetxlsCommand, sys.argv, sys.stdin, sys.stdout, __name__)
示例13: dict
asset['_raw'] = util.tojson(asset)
yield asset
else:
try:
# If not 200 status_code showing error message in Splunk UI
record = util.dictexpand(response)
record['url'] = url
record['_raw'] = util.tojson(response)
except Exception as e:
record = dict()
record['url'] = url
record['error'] = e
record['_raw'] = util.tojson(response)
yield record
else:
try:
# If not 200 status_code showing error message in Splunk UI
record = util.dictexpand(response)
record['url'] = url
record['_raw'] = util.tojson(response)
except Exception as e:
record = dict()
record['url'] = url
record['error'] = e
record['_raw'] = util.tojson(response)
yield record
dispatch(getUserCommand, sys.argv, sys.stdin, sys.stdout, __name__)
示例14: Option
outputfield = Option(
doc='''
**Syntax:** **outputfield=***<fieldname>*
**Description:** Name of the field that will hold the found time''',
require=True, validate=validators.Fieldname())
def stream(self, records):
self.logger.debug('StrptimeFindCommand: %s', self) # logs command line
pattern = self.pattern
for record in records:
count = 0
datetime_str_orig = record[self.fieldname]
valid_strptime_string = record[self.pattern]
datetime_object = 0
limit=len(valid_strptime_string)
while len(datetime_str_orig) > limit:
datetime_str=datetime_str_orig
while len(datetime_str) > limit:
try:
datetime_object = datetime.strptime(datetime_str, valid_strptime_string)
break
except:
datetime_str = datetime_str[:-1]
datetime_str_orig = datetime_str_orig[1:]
if datetime_object:
record[self.outputfield] = time.mktime(datetime_object.timetuple())
yield record
dispatch(StrptimeFindCommand, sys.argv, sys.stdin, sys.stdout, __name__)
示例15: PasGetUserInfoCommand
#!/usr/bin/env python
import requests
import json
import sys, time
from splunklib.searchcommands import \
dispatch, GeneratingCommand, Configuration, Option, validators
@Configuration()
class PasGetUserInfoCommand(GeneratingCommand):
user = Option(require=True)
def generate(self):
url = 'http://localhost:5000/user_list/api/v1.0/users/' + self.user
data = requests.get(url).json()
if 'user' in data:
# Known user.
row = {}
for k, v in data['user'].iteritems():
row[str(k)] = str(v)
yield row
else:
# Unknown user. Return no data.
pass
dispatch(PasGetUserInfoCommand, sys.argv, sys.stdin, sys.stdout, __name__)