本文整理汇总了Python中mozdef_util.utilities.toUTC.toUTC函数的典型用法代码示例。如果您正苦于以下问题:Python toUTC函数的具体用法?Python toUTC怎么用?Python toUTC使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了toUTC函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: verify_defaults
def verify_defaults(self, result):
assert result['category'] == 'suricata'
assert result['eventsource'] == 'nsm'
assert toUTC(result['receivedtimestamp']).isoformat() == result['receivedtimestamp']
assert result['severity'] == 'INFO'
assert 'event_type' in result
assert 'source' in result
assert toUTC(result['timestamp']).isoformat() == result['timestamp']
assert toUTC(result['utctimestamp']).isoformat() == result['utctimestamp']
开发者ID:mozilla,项目名称:MozDef,代码行数:9,代码来源:test_suricataFixup.py
示例2: esCloseIndices
def esCloseIndices():
logger.debug('started')
try:
es = ElasticsearchClient((list('{0}'.format(s) for s in options.esservers)))
indices = es.get_indices()
except Exception as e:
logger.error("Unhandled exception while connecting to ES, terminating: %r" % (e))
# examine each index pulled from get_indice
# to determine if it meets aging criteria
month_ago_date = toUTC(datetime.now()) - timedelta(days=int(options.index_age))
month_ago_date = month_ago_date.replace(tzinfo=None)
for index in indices:
if 'events' in index:
index_date = index.rsplit('-', 1)[1]
logger.debug("Checking to see if Index: %s can be closed." % (index))
if len(index_date) == 8:
index_date_obj = datetime.strptime(index_date, '%Y%m%d')
try:
if month_ago_date > index_date_obj:
logger.debug("Index: %s will be closed." % (index))
es.close_index(index)
else:
logger.debug("Index: %s does not meet aging criteria and will not be closed." % (index))
except Exception as e:
logger.error("Unhandled exception while closing indices, terminating: %r" % (e))
开发者ID:IFGHou,项目名称:MozDef,代码行数:26,代码来源:closeIndices.py
示例3: esPruneIndexes
def esPruneIndexes():
if options.output == 'syslog':
logger.addHandler(SysLogHandler(address=(options.sysloghostname, options.syslogport)))
else:
sh = logging.StreamHandler(sys.stderr)
sh.setFormatter(formatter)
logger.addHandler(sh)
logger.debug('started')
try:
es = ElasticsearchClient((list('{0}'.format(s) for s in options.esservers)))
indices = es.get_indices()
# do the pruning
for (index, dobackup, rotation, pruning) in zip(options.indices, options.dobackup, options.rotation, options.pruning):
try:
if pruning != '0':
index_to_prune = index
if rotation == 'daily':
idate = date.strftime(toUTC(datetime.now()) - timedelta(days=int(pruning)), '%Y%m%d')
index_to_prune += '-%s' % idate
elif rotation == 'monthly':
idate = date.strftime(datetime.utcnow() - timedelta(days=31 * int(pruning)), '%Y%m')
index_to_prune += '-%s' % idate
if index_to_prune in indices:
logger.debug('Deleting index: %s' % index_to_prune)
es.delete_index(index_to_prune, True)
else:
logger.error('Error deleting index %s, index missing' % index_to_prune)
except Exception as e:
logger.error("Unhandled exception while deleting %s, terminating: %r" % (index_to_prune, e))
except Exception as e:
logger.error("Unhandled exception, terminating: %r" % e)
开发者ID:IFGHou,项目名称:MozDef,代码行数:34,代码来源:pruneIndexes.py
示例4: createAlertDict
def createAlertDict(
self,
summary,
category,
tags,
events,
severity="NOTICE",
url=None,
ircchannel=None,
):
"""
Create an alert dict
"""
alert = {
"utctimestamp": toUTC(datetime.now()).isoformat(),
"severity": severity,
"summary": summary,
"category": category,
"tags": tags,
"events": [],
"ircchannel": ircchannel,
}
if url:
alert["url"] = url
for e in events:
alert["events"].append(
{
"documentindex": e["_index"],
"documentsource": e["_source"],
"documentid": e["_id"],
}
)
self.log.debug(alert)
return alert
开发者ID:mozilla,项目名称:MozDef,代码行数:35,代码来源:alerttask.py
示例5: on_message
def on_message(self, body, message):
try:
# just to be safe..check what we were sent.
if isinstance(body, dict):
body_dict = body
elif isinstance(body, str) or isinstance(body, unicode):
try:
body_dict = json.loads(body) # lets assume it's json
except ValueError as e:
# not json..ack but log the message
logger.exception("mozdefbot_slack exception: unknown body type received %r" % body)
return
else:
logger.exception("mozdefbot_slack exception: unknown body type received %r" % body)
return
if 'notify_mozdefbot' in body_dict and body_dict['notify_mozdefbot'] is False:
# If the alert tells us to not notify, then don't post message
message.ack()
return
# process valid message
# see where we send this alert
channel = options.default_alert_channel
if 'ircchannel' in body_dict:
if body_dict['ircchannel'] in options.channels:
channel = body_dict['ircchannel']
# see if we need to delay a bit before sending the alert, to avoid
# flooding the channel
if self.lastalert is not None:
delta = toUTC(datetime.now()) - self.lastalert
logger.info('new alert, delta since last is {}\n'.format(delta))
if delta.seconds < 2:
logger.info('throttling before writing next alert\n')
time.sleep(1)
self.lastalert = toUTC(datetime.now())
if len(body_dict['summary']) > 450:
logger.info('alert is more than 450 bytes, truncating\n')
body_dict['summary'] = body_dict['summary'][:450] + ' truncated...'
logger.info("Posting alert: {0}".format(body_dict['summary']))
self.bot.post_alert_message(body_dict, channel)
message.ack()
except ValueError as e:
logger.exception("mozdefbot_slack exception while processing events queue %r" % e)
开发者ID:IFGHou,项目名称:MozDef,代码行数:46,代码来源:mozdefbot.py
示例6: main
def main(self):
search_query = SearchQuery(hours=6)
day_old_date = toUTC(datetime.now() - timedelta(days=1)).isoformat()
search_query.add_must(LessThanMatch('utctimestamp', day_old_date))
self.filtersManual(search_query)
self.searchEventsAggregated('mozdefhostname', samplesLimit=1000)
self.walkAggregations(threshold=1)
开发者ID:IFGHou,项目名称:MozDef,代码行数:9,代码来源:old_events.py
示例7: genNewAttacker
def genNewAttacker():
newAttacker = dict()
newAttacker['_id'] = genMeteorID()
newAttacker['lastseentimestamp'] = toUTC(datetime.now())
newAttacker['firstseentimestamp'] = toUTC(datetime.now())
newAttacker['eventscount'] = 0
newAttacker['alerts'] = list()
newAttacker['alertscount'] = 0
newAttacker['category'] = 'unknown'
newAttacker['score'] = 0
newAttacker['geocoordinates'] = dict(countrycode='', longitude=0, latitude=0)
newAttacker['tags'] = list()
newAttacker['notes'] = list()
newAttacker['indicators'] = list()
newAttacker['attackphase'] = 'unknown'
newAttacker['datecreated'] = toUTC(datetime.now())
newAttacker['creator'] = sys.argv[0]
return newAttacker
开发者ID:mozilla,项目名称:MozDef,代码行数:19,代码来源:collectAttackers.py
示例8: test_add_required_fields_default
def test_add_required_fields_default(self):
mock_class = MockHostname()
socket.gethostname = mock_class.hostname
self.event.add_required_fields()
assert self.event['receivedtimestamp'] is not None
assert toUTC(self.event['receivedtimestamp']).isoformat() == self.event['receivedtimestamp']
assert self.event['utctimestamp'] is not None
assert toUTC(self.event['utctimestamp']).isoformat() == self.event['utctimestamp']
assert self.event['timestamp'] is not None
assert toUTC(self.event['timestamp']).isoformat() == self.event['timestamp']
assert self.event['mozdefhostname'] == 'randomhostname'
assert self.event['tags'] == []
assert self.event['category'] == 'UNKNOWN'
assert self.event['hostname'] == 'UNKNOWN'
assert self.event['processid'] == 'UNKNOWN'
assert self.event['processname'] == 'UNKNOWN'
assert self.event['severity'] == 'UNKNOWN'
assert self.event['source'] == 'UNKNOWN'
assert self.event['summary'] == 'example summary'
assert self.event['tags'] == []
assert self.event['details'] == {}
开发者ID:IFGHou,项目名称:MozDef,代码行数:21,代码来源:test_event.py
示例9: execute
def execute(self, elasticsearch_client, indices=['events', 'events-previous'], size=1000, request_timeout=30):
if self.must == [] and self.must_not == [] and self.should == [] and self.aggregation == []:
raise AttributeError('Must define a must, must_not, should query, or aggregation')
if self.date_timedelta:
end_date = toUTC(datetime.now())
begin_date = toUTC(datetime.now() - timedelta(**self.date_timedelta))
utc_range_query = RangeMatch('utctimestamp', begin_date, end_date)
received_range_query = RangeMatch('receivedtimestamp', begin_date, end_date)
range_query = utc_range_query | received_range_query
self.add_must(range_query)
search_query = None
search_query = BooleanMatch(must=self.must, must_not=self.must_not, should=self.should)
results = []
if len(self.aggregation) == 0:
results = elasticsearch_client.search(search_query, indices, size, request_timeout)
else:
results = elasticsearch_client.aggregated_search(search_query, indices, self.aggregation, size, request_timeout)
return results
开发者ID:IFGHou,项目名称:MozDef,代码行数:22,代码来源:search_query.py
示例10: run
def run(self):
while True:
try:
curRequestTime = toUTC(datetime.now()) - timedelta(seconds=options.ptbackoff)
records = self.ptrequestor.request(options.ptquery, self.lastRequestTime, curRequestTime)
# update last request time for the next request
self.lastRequestTime = curRequestTime
for msgid in records:
msgdict = records[msgid]
# strip any line feeds from the message itself, we just convert them
# into spaces
msgdict['message'] = msgdict['message'].replace('\n', ' ').replace('\r', '')
event = dict()
event['tags'] = ['papertrail', options.ptacctname]
event['details'] = msgdict
if 'generated_at' in event['details']:
event['utctimestamp'] = toUTC(event['details']['generated_at']).isoformat()
if 'hostname' in event['details']:
event['hostname'] = event['details']['hostname']
if 'message' in event['details']:
event['summary'] = event['details']['message']
if 'severity' in event['details']:
event['severity'] = event['details']['severity']
if 'source_ip' in event['details']:
event['sourceipaddress'] = event['details']['source_ip']
else:
event['severity'] = 'INFO'
event['category'] = 'syslog'
# process message
self.on_message(event, msgdict)
time.sleep(options.ptinterval)
except ValueError as e:
logger.exception('Exception while handling message: %r' % e)
开发者ID:Phrozyn,项目名称:MozDef,代码行数:39,代码来源:esworker_papertrail.py
示例11: convert_key_date_format
def convert_key_date_format(self, needle, haystack):
num_levels = needle.split(".")
if len(num_levels) == 0:
return False
current_pointer = haystack
for updated_key in num_levels:
if updated_key == num_levels[-1]:
current_pointer[updated_key] = toUTC(
current_pointer[updated_key]).isoformat()
return haystack
if updated_key in current_pointer:
current_pointer = current_pointer[updated_key]
else:
return haystack
开发者ID:IFGHou,项目名称:MozDef,代码行数:14,代码来源:guardDuty.py
示例12: updateMongo
def updateMongo(mozdefdb, esAlerts):
alerts = mozdefdb['alerts']
for a in esAlerts['hits']:
# insert alert into mongo if we don't already have it
alertrecord = alerts.find_one({'esmetadata.id': a['_id']})
if alertrecord is None:
# new record
mrecord = a['_source']
# generate a meteor-compatible ID
mrecord['_id'] = genMeteorID()
# capture the elastic search meta data (index/id/type)
# set the date back to a datetime from unicode, so mongo/meteor can properly sort, select.
mrecord['utctimestamp']=toUTC(mrecord['utctimestamp'])
# also set an epoch time field so minimongo can sort
mrecord['utcepoch'] = calendar.timegm(mrecord['utctimestamp'].utctimetuple())
mrecord['esmetadata'] = dict()
mrecord['esmetadata']['id'] = a['_id']
mrecord['esmetadata']['index'] = a['_index']
alerts.insert(mrecord)
开发者ID:Phrozyn,项目名称:MozDef,代码行数:19,代码来源:syncAlertsToMongo.py
示例13: esSearch
def esSearch(es):
search_query = SearchQuery(minutes=options.aggregationminutes)
search_query.add_aggregation(Aggregation('category'))
results = search_query.execute(es)
mozdefstats = dict(utctimestamp=toUTC(datetime.now()).isoformat())
mozdefstats['category'] = 'stats'
mozdefstats['hostname'] = socket.gethostname()
mozdefstats['mozdefhostname'] = mozdefstats['hostname']
mozdefstats['severity'] = 'INFO'
mozdefstats['source'] = 'mozdef'
mozdefstats['tags'] = ['mozdef', 'stats']
mozdefstats['summary'] = 'Aggregated category counts'
mozdefstats['processid'] = os.getpid()
mozdefstats['processname'] = sys.argv[0]
mozdefstats['details'] = dict(counts=list())
for bucket in results['aggregations']['category']['terms']:
entry = dict()
entry[bucket['key']] = bucket['count']
mozdefstats['details']['counts'].append(entry)
return mozdefstats
开发者ID:IFGHou,项目名称:MozDef,代码行数:21,代码来源:eventStats.py
示例14: createAlertDict
def createAlertDict(self, summary, category, tags, events, severity='NOTICE', url=None, ircchannel=None):
"""
Create an alert dict
"""
alert = {
'utctimestamp': toUTC(datetime.now()).isoformat(),
'severity': severity,
'summary': summary,
'category': category,
'tags': tags,
'events': [],
'ircchannel': ircchannel,
}
if url:
alert['url'] = url
for e in events:
alert['events'].append({
'documentindex': e['_index'],
'documentsource': e['_source'],
'documentid': e['_id']})
self.log.debug(alert)
return alert
开发者ID:Phrozyn,项目名称:MozDef,代码行数:23,代码来源:alerttask.py
示例15: onMessage
def onMessage(self, message, metadata):
if 'tags' not in message:
return (message, metadata)
if 'githubeventsqs' not in message['tags']:
return (message, metadata)
newmessage = {}
newmessage['details'] = {}
newmessage['category'] = 'github'
newmessage['tags'] = ['github', 'webhook']
newmessage['eventsource'] = 'githubeventsqs'
if 'event' in message['details']:
newmessage['source'] = message['details']['event']
else:
newmessage['source'] = 'UNKNOWN'
if 'request_id' in message['details']:
newmessage['details']['request_id'] = message['details']['request_id']
else:
newmessage['details']['request_id'] = 'UNKNOWN'
# iterate through top level keys - push, etc
if newmessage['source'] in self.eventtypes:
for key in self.yap[newmessage['source']]:
mappedvalue = jmespath.search(self.yap[newmessage['source']][key], message)
# JMESPath likes to silently return a None object
if mappedvalue is not None:
newmessage['details'][key] = mappedvalue
if 'commit_ts' in newmessage['details']:
newmessage['timestamp'] = newmessage['details']['commit_ts']
newmessage['utctimestamp'] = toUTC(newmessage['details']['commit_ts']).isoformat()
else:
newmessage = None
return (newmessage, metadata)
开发者ID:IFGHou,项目名称:MozDef,代码行数:36,代码来源:github_webhooks.py
示例16: onMessage
def onMessage(self, request, response):
'''
request: http://bottlepy.org/docs/dev/api.html#the-request-object
response: http://bottlepy.org/docs/dev/api.html#the-response-object
'''
# an ES query/facet to count success/failed logins
# oriented to the data having
# category: authentication
# details.success marked true/false for success/failed auth
# details.username as the user
begindateUTC=None
enddateUTC=None
resultsList = list()
if begindateUTC is None:
begindateUTC = datetime.now() - timedelta(hours=12)
begindateUTC = toUTC(begindateUTC)
if enddateUTC is None:
enddateUTC = datetime.now()
enddateUTC = toUTC(enddateUTC)
es_client = ElasticsearchClient(list('{0}'.format(s) for s in self.restoptions['esservers']))
search_query = SearchQuery()
# a query to tally users with failed logins
date_range_match = RangeMatch('utctimestamp', begindateUTC, enddateUTC)
search_query.add_must(date_range_match)
search_query.add_must(PhraseMatch('category', 'authentication'))
search_query.add_must(PhraseMatch('details.success','false'))
search_query.add_must(ExistsMatch('details.username'))
search_query.add_aggregation(Aggregation('details.success'))
search_query.add_aggregation(Aggregation('details.username'))
results = search_query.execute(es_client, indices=['events','events-previous'])
# any usernames or words to ignore
# especially useful if ES is analyzing the username field and breaking apart [email protected]
# into user somewhere and .com
stoplist =self.options.ignoreusernames.split(',')
# walk the aggregate failed users
# and look for successes/failures
for t in results['aggregations']['details.username']['terms']:
if t['key'] in stoplist:
continue
failures = 0
success = 0
username = t['key']
details_query = SearchQuery()
details_query.add_must(date_range_match)
details_query.add_must(PhraseMatch('category', 'authentication'))
details_query.add_must(PhraseMatch('details.username', username))
details_query.add_aggregation(Aggregation('details.success'))
details_results = details_query.execute(es_client)
# details.success is boolean. As an aggregate is an int (0/1)
for details_term in details_results['aggregations']['details.success']['terms']:
if details_term['key'] == 1:
success = details_term['count']
if details_term['key'] == 0:
failures = details_term['count']
resultsList.append(
dict(
username=username,
failures=failures,
success=success,
begin=begindateUTC.isoformat(),
end=enddateUTC.isoformat()
)
)
response.body = json.dumps(resultsList)
response.status = 200
return (request, response)
开发者ID:IFGHou,项目名称:MozDef,代码行数:75,代码来源:logincounts.py
示例17: getQueueSizes
def getQueueSizes():
logger.debug('starting')
logger.debug(options)
es = ElasticsearchClient(options.esservers)
sqslist = {}
sqslist['queue_stats'] = {}
qcount = len(options.taskexchange)
qcounter = qcount - 1
mqConn = boto.sqs.connect_to_region(
options.region,
aws_access_key_id=options.accesskey,
aws_secret_access_key=options.secretkey
)
while qcounter >= 0:
for exchange in options.taskexchange:
logger.debug('Looking for sqs queue stats in queue' + exchange)
eventTaskQueue = mqConn.get_queue(exchange)
# get queue stats
taskQueueStats = eventTaskQueue.get_attributes('All')
sqslist['queue_stats'][qcounter] = taskQueueStats
sqslist['queue_stats'][qcounter]['name'] = exchange
qcounter -= 1
# setup a log entry for health/status.
sqsid = '{0}-{1}'.format(options.account, options.region)
healthlog = dict(
utctimestamp=toUTC(datetime.now()).isoformat(),
hostname=sqsid,
processid=os.getpid(),
processname=sys.argv[0],
severity='INFO',
summary='mozdef health/status',
category='mozdef',
source='aws-sqs',
tags=[],
details=[])
healthlog['details'] = dict(username='mozdef')
healthlog['details']['queues']= list()
healthlog['details']['total_messages_ready'] = 0
healthlog['details']['total_feeds'] = qcount
healthlog['tags'] = ['mozdef', 'status', 'sqs']
ready = 0
qcounter = qcount - 1
for q in sqslist['queue_stats'].keys():
queuelist = sqslist['queue_stats'][qcounter]
if 'ApproximateNumberOfMessages' in queuelist:
ready1 = int(queuelist['ApproximateNumberOfMessages'])
ready = ready1 + ready
healthlog['details']['total_messages_ready'] = ready
if 'ApproximateNumberOfMessages' in queuelist:
messages = int(queuelist['ApproximateNumberOfMessages'])
if 'ApproximateNumberOfMessagesNotVisible' in queuelist:
inflight = int(queuelist['ApproximateNumberOfMessagesNotVisible'])
if 'ApproximateNumberOfMessagesDelayed' in queuelist:
delayed = int(queuelist['ApproximateNumberOfMessagesDelayed'])
if 'name' in queuelist:
name = queuelist['name']
queueinfo=dict(
queue=name,
messages_delayed=delayed,
messages_ready=messages,
messages_inflight=inflight)
healthlog['details']['queues'].append(queueinfo)
qcounter -= 1
# post to elasticsearch servers directly without going through
# message queues in case there is an availability issue
es.save_event(index=options.index, doc_type='mozdefhealth', body=json.dumps(healthlog))
# post another doc with a static docid and tag
# for use when querying for the latest sqs status
healthlog['tags'] = ['mozdef', 'status', 'sqs-latest']
es.save_event(index=options.index, doc_type='mozdefhealth', doc_id=getDocID(sqsid), body=json.dumps(healthlog))
开发者ID:IFGHou,项目名称:MozDef,代码行数:73,代码来源:sqs_queue_status.py
示例18: searchMongoAlerts
def searchMongoAlerts(mozdefdb):
attackers = mozdefdb['attackers']
alerts = mozdefdb['alerts']
# search the last X alerts for IP addresses
# aggregated by CIDR mask/24
# aggregate IPv4 addresses in the most recent alerts
# to find common attackers.
ipv4TopHits = alerts.aggregate([
# reverse sort the current alerts
{"$sort": {"utcepoch": -1}},
# most recent 100
{"$limit": 100},
# must have an ip address
{"$match": {"events.documentsource.details.sourceipaddress": {"$exists": True}}},
# must not be already related to an attacker
{"$match": {"attackerid": {"$exists": False}}},
# make each event into it's own doc
{"$unwind": "$events"},
{"$project": {
"_id": 0,
# emit the source ip only
"sourceip": "$events.documentsource.details.sourceipaddress"
}},
# count by ip
{"$group": {"_id": "$sourceip", "hitcount": {"$sum": 1}}},
# limit to those with X observances
{"$match": {"hitcount": {"$gt": options.ipv4attackerhitcount}}},
# sort
{"$sort": SON([("hitcount", -1), ("_id", -1)])},
# top 10
{"$limit": 10}
])
for ip in ipv4TopHits:
# sanity check ip['_id'] which should be the ipv4 address
if isIPv4(ip['_id']) and ip['_id'] not in netaddr.IPSet(['0.0.0.0']):
ipcidr = netaddr.IPNetwork(ip['_id'])
# set CIDR
# todo: lookup ipwhois for asn_cidr value
# potentially with a max mask value (i.e. asn is /8, limit attackers to /24)
ipcidr.prefixlen = options.ipv4attackerprefixlength
# append to or create attacker.
# does this match an existing attacker's indicators
if not ipcidr.ip.is_loopback() and not ipcidr.ip.is_private() and not ipcidr.ip.is_reserved():
logger.debug('Searching for existing attacker with ip ' + str(ipcidr))
attacker = attackers.find_one({'indicators.ipv4address': str(ipcidr)})
if attacker is None:
logger.debug('Attacker not found, creating new one')
# new attacker
# generate a meteor-compatible ID
# save the ES document type, index, id
newAttacker = genNewAttacker()
# str to get the ip/cidr rather than netblock cidr.
# i.e. '1.2.3.4/24' not '1.2.3.0/24'
newAttacker['indicators'].append(dict(ipv4address=str(ipcidr)))
matchingalerts = alerts.find(
{"events.documentsource.details.sourceipaddress":
str(ipcidr.ip),
})
total_events = 0
if matchingalerts is not None:
# update list of alerts this attacker matched.
for alert in matchingalerts:
newAttacker['alerts'].append(
dict(alertid=alert['_id'])
)
# update alert with attackerID
alert['attackerid'] = newAttacker['_id']
alerts.save(alert)
total_events += len(alert['events'])
if len(alert['events']) > 0:
newAttacker['lastseentimestamp'] = toUTC(alert['events'][-1]['documentsource']['utctimestamp'])
newAttacker['alertscount'] = len(newAttacker['alerts'])
newAttacker['eventscount'] = total_events
attackers.insert(newAttacker)
# update geoIP info
latestGeoIP = [a['events'] for a in alerts.find(
{"events.documentsource.details.sourceipaddress":
str(ipcidr.ip),
})][-1][0]['documentsource']
updateAttackerGeoIP(mozdefdb, newAttacker['_id'], latestGeoIP)
if options.broadcastattackers:
broadcastAttacker(newAttacker)
else:
logger.debug('Found existing attacker')
# if alert not present in this attackers list
# append this to the list
# todo: trim the list at X (i.e. last 100)
# search alerts without attackerid
matchingalerts = alerts.find(
{"events.documentsource.details.sourceipaddress":
str(ipcidr.ip),
"attackerid":{"$exists": False}
})
#.........这里部分代码省略.........
开发者ID:mozilla,项目名称:MozDef,代码行数:101,代码来源:collectAttackers.py
示例19: test_eve_log_alert_http
def test_eve_log_alert_http(self):
event = {
'customendpoint': '',
'category': 'suricata',
'source': 'eve-log',
'event_type': 'alert'
}
MESSAGE = {
"timestamp":"2018-09-12T22:24:09.546736+0000",
"flow_id":1484802709084080,
"in_iface":"enp216s0f0",
"event_type":"alert",
"vlan":75,
"src_ip":"10.48.240.19",
"src_port":44741,
"dest_ip":"10.48.74.17",
"dest_port":3128,
"proto":"017",
"alert":{
"action":"allowed",
"gid":1,
"signature_id":2024897,
"rev":1,
"signature":"ET USER_AGENTS Go HTTP Client User-Agent",
"category":"",
"severity":3
},
"app_proto":"http",
"flow":{
"pkts_toserver":555,
"pkts_toclient":20,
"bytes_toserver":350,
"bytes_toclient":4444,
"start":"2018-10-12T22:24:09.546736+0000"
},
"payload":"Q09OTkVDVCBzZWN1cml0eS10cmFja2VyLmRlYmlhbi5vcmc6NDQzIEhUVFAvMS4xDQpIb3N0OiBzZWN1cml0eS10cmFja2VyLmRlYmlhbi5vcmc6NDQzDQpVc2VyLUFnZW50OiBHby1odHRwLWNsaWVudC8xLjENCg0K",
"payload_printable":"CONNECT security-tracker.debian.org:443 HTTP\/1.1\r\nHost: security-tracker.debian.org:443\r\nUser-Agent: Go-http-client\/1.1\r\n\r\n",
"stream":0,
"packet":"RQAAKAAAAABABgAACjBLMAowShHR6Aw4ClEmlrx\/mcdQEgoAAAAAAA==",
"packet_info":{
"linktype":12
},
"http": {
"hostname":"security-tracker.debian.org",
"url":"security-tracker.debian.org:443",
"http_user_agent":"Go-http-client\/1.1",
"http_method":"CONNECT",
"protocol":"HTTP\/1.1",
"status":200,
"length":0,
"redirect":"afakedestination"
},
}
event['message'] = json.dumps(MESSAGE)
result, metadata = self.plugin.onMessage(event, self.metadata)
self.verify_defaults(result)
self.verify_metadata(metadata)
assert toUTC(MESSAGE['flow']['start']).isoformat() == result['utctimestamp']
assert toUTC(MESSAGE['flow']['start']).isoformat() == result['timestamp']
assert result['details']['host'] == MESSAGE['http']['hostname']
assert result['details']['method'] == MESSAGE['http']['http_method']
assert result['details']['user_agent'] == MESSAGE['http']['http_user_agent']
assert result['details']['status_code'] == MESSAGE['http']['status']
assert result['details']['uri'] == MESSAGE['http']['url']
assert result['details']['redirect_dst'] == MESSAGE['http']['redirect']
assert result['details']['request_body_len'] == MESSAGE['http']['length']
开发者ID:mozilla,项目名称:MozDef,代码行数:67,代码来源:test_suricataFixup.py
示例20: main
def main():
if options.output=='syslog':
logger.addHandler(SysLogHandler(address=(options.sysloghostname,options.syslogport)))
else:
sh=logging.StreamHandler(sys.stderr)
sh.setFormatter(formatter)
logger.addHandler(sh)
logger.debug('started')
state = State(options.state_file_name)
try:
# capture the time we start running so next time we catch any events
# created while we run.
lastrun=toUTC(datetime.now()).isoformat()
# get our credentials
mozdefClient=json.loads(open(options.jsoncredentialfile).read())
client_email = mozdefClient['client_email']
private_key=mozdefClient['private_key']
# set the oauth scope we will request
scope=[
'https://www.googleapis.com/auth/admin.reports.audit.readonly',
'https://www.googleapis.com/auth/admin.reports.usage.readonly'
]
# authorize our http object
# we do this as a 'service account' so it's important
# to specify the correct 'sub' option
# or you will get access denied even with correct delegations/scope
credentials = SignedJwtAssertionCredentials(client_email,
private_key,
scope=scope,
sub=options.impersonate)
http = Http()
credentials.authorize(http)
# build a request to the admin sdk
api = build('admin', 'reports_v1', http=http)
response = api.activities().list(userKey='all',
applicationName='login',
startTime=toUTC(state.data['lastrun']).strftime('%Y-%m-%dT%H:%M:%S.000Z'),
maxResults=options.recordlimit).execute()
# fix up the event craziness to a flatter format
events=[]
if 'items' in response:
for i in response['items']:
# flatten the sub dict/lists to pull out the good parts
event=dict(category='google')
event['tags']=['google','authentication']
event['severity']='INFO'
event['summary']='google authentication: '
details=dict()
for keyValue in flattenDict(i):
# change key/values like:
# [email protected]
# to actor_email=value
key,value =keyValue.split('=')
key=key.replace('.','_').lower()
details[key]=value
# find important keys
# and adjust their location/name
if 'ipaddress' in details:
# it's the source ip
details['sourceipaddress']=details['ipaddress']
del details['ipaddress']
if 'id_time' in details:
event['timestamp']=details['id_time']
event['utctimestamp']=details['id_time']
if 'events_name' in details:
event['summary']+= details['events_name'] + ' '
if 'actor_email' in details:
event['summary']+= details['actor_email'] + ' '
event['details']=details
events.append(event)
# post events to mozdef
logger.debug('posting {0} google events to mozdef'.format(len(events)))
for e in events:
requests.post(options.url,data=json.dumps(e))
# record the time we started as
# the start time for next time.
state.data['lastrun'] = lastrun
state.write_state_file()
except Exception as e:
logger.error("Unhandled exception, terminating: %r" % e)
开发者ID:IFGHou,项目名称:MozDef,代码行数:94,代码来源:google2mozdef.py
注:本文中的mozdef_util.utilities.toUTC.toUTC函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论