khulnasoft commited on
Commit
1c4000e
1 Parent(s): 8663f2a

Create scripts/data-shippers/Mordor-Elastic.py

Browse files
scripts/data-shippers/Mordor-Elastic.py ADDED
@@ -0,0 +1,185 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python3
2
+ # Import datasets to Elasticsearch or logstash instance
3
+
4
+ from argparse import ArgumentParser
5
+ from pathlib import Path
6
+ import tarfile
7
+ import json
8
+ import progressbar
9
+ import sys
10
+ from termcolor import colored
11
+
12
+ progressbar.streams.wrap_stdout()
13
+
14
+ argparser = ArgumentParser(description="Import datasets into Elasticsearch or Logstash")
15
+ argparser.add_argument("--output", "-o", default="elasticsearch", help="Choose Elasticsearch or Logstash as output")
16
+ argparser.add_argument("--recursive", "-r", action="store_true", help="Recurse into directories")
17
+ argparser.add_argument("--url", "-u", default="http://localhost:9200", help="URL of Elasticsearch instance (%(default)s) or Logstash")
18
+ argparser.add_argument("--cacerts", "-c", default=None, help="Path to CA certificates for TLS verification")
19
+ argparser.add_argument("--insecure", "-I", default=True, action="store_false", dest="verify_certs", help="Don't verify TLS cerificates.")
20
+ argparser.add_argument("--index", "-i", default="winlogbeat-mordor", help="Target index for data import (%(default)s)")
21
+ argparser.add_argument("--no-index-creation", "-n", action="store_false", dest="create_index", help="Don't create index.")
22
+ argparser.add_argument("inputs", nargs="+", type=Path, help="Path to dataset")
23
+ args = argparser.parse_args()
24
+
25
+ if args.output == "elasticsearch":
26
+ #Only import ES module when required
27
+ from elasticsearch import Elasticsearch
28
+ from elasticsearch.helpers import bulk
29
+
30
+ print("Initializing Elasticsearch connection and index...")
31
+ index = args.index
32
+ es = Elasticsearch(
33
+ [args.url],
34
+ ca_certs=args.cacerts,
35
+ verify_certs=args.verify_certs,
36
+ )
37
+ if args.create_index:
38
+ es.indices.create(
39
+ index,
40
+ body={ "settings": {
41
+ "index.mapping.total_fields.limit": 2000
42
+ }
43
+ }
44
+ )
45
+ elif args.output == "logstash":
46
+ #Only import requests when logstash is used
47
+ import requests
48
+
49
+ print("Initializing Logstash connection...")
50
+ logstash_url = args.url
51
+ if args.verify_certs and args.cacerts:
52
+ verify_certs = args.cacerts
53
+ elif not args.verify_certs:
54
+ from urllib3.exceptions import InsecureRequestWarning
55
+ requests.packages.urllib3.disable_warnings(category=InsecureRequestWarning)
56
+ verify_certs = False
57
+ else:
58
+ verify_certs = True
59
+ else:
60
+ print("Output type was not recognized. Exiting...")
61
+ sys.exit()
62
+
63
+ if args.recursive:
64
+ paths = [ p for path in args.inputs for p in path.glob("**/*.tar.gz") if p.is_file() ]
65
+ else:
66
+ paths = [ path for path in args.inputs if path.is_file() ]
67
+
68
+ print("Calulating total file size...")
69
+ total_size = sum([
70
+ member.size
71
+ for path in progressbar.progressbar(paths)
72
+ for member in tarfile.open(path).getmembers() if member.isfile()
73
+ ])
74
+
75
+ total_success = 0
76
+ total_failed = 0
77
+
78
+ with progressbar.DataTransferBar(max_value=total_size) as progress:
79
+ for path in paths:
80
+ print(f"Importing dataset {path}")
81
+ tf = tarfile.open(path)
82
+ for m in tf.getmembers():
83
+ if m.isfile():
84
+ print(f"- Importing member file {m.name}...")
85
+ logfile = f"{path}/{m.name}"
86
+ mf = tf.extractfile(m)
87
+ def generate_actions(f, progress):
88
+ for line in f:
89
+ source = json.loads(line)
90
+ source["log"] = { "file": { "name": logfile }}
91
+ source.setdefault("winlog", dict())
92
+
93
+ # Plain data created by nxlog is completely moved to winlog.event_data except blacklisted
94
+ if "EventID" in source:
95
+ # Move event id to appropriate location
96
+ source["winlog"]["event_id"] = source["EventID"]
97
+ del source["EventID"]
98
+
99
+ # Discard unneeded fields
100
+ try:
101
+ del source["type"]
102
+ except KeyError:
103
+ pass
104
+
105
+ try:
106
+ del source["host"]
107
+ except KeyError:
108
+ pass
109
+
110
+ # Move fields from top level to winlog.event_data
111
+ source["winlog"]["event_data"] = {
112
+ k: v
113
+ for k, v in source.items()
114
+ if k not in ("winlog", "log", "Channel", "Hostname", "@timestamp", "@version")
115
+ }
116
+ for k in source["winlog"]["event_data"].keys():
117
+ del source[k]
118
+
119
+ # Special handling for host name
120
+ try:
121
+ source["winlog"]["computer_name"] = source["Hostname"]
122
+ del source["Hostname"]
123
+ except KeyError:
124
+ pass
125
+
126
+ # Special handling for channel
127
+ try:
128
+ source["winlog"]["channel"] = source["Channel"]
129
+ del source["Channel"]
130
+ except KeyError:
131
+ pass
132
+
133
+ # Data created with Winlogbeat <7 contains event fields in event_data instead of winlog.event_data - move it
134
+ if "event_data" in source:
135
+ source["winlog"]["event_data"] = source["event_data"]
136
+ del source["event_data"]
137
+ # Old Winlogbeats also put the channel name in the log_name field move this to new field names
138
+ if "log_name" in source:
139
+ source["winlog"]["channel"] = source["log_name"]
140
+ del source["log_name"]
141
+ # Some log records contain the channel name "security" in small letters, fix this
142
+ try:
143
+ if source["winlog"]["channel"] == "security":
144
+ source["winlog"]["channel"] = "Security"
145
+ except KeyError:
146
+ pass
147
+ # Old Winlogbeats also put the event id in a different location, move it to the new one
148
+ if "event_id" in source:
149
+ source["winlog"]["event_id"] = source["event_id"]
150
+ del source["event_id"]
151
+ # Also set event.code to event id
152
+ source.setdefault("event", dict())["code"] = source["winlog"]["event_id"]
153
+
154
+ progress.update(progress.value + len(line))
155
+ if args.output == "elasticsearch":
156
+ yield {
157
+ "_index": index,
158
+ "_source": source
159
+ }
160
+ elif args.output == "logstash":
161
+ yield source
162
+ if args.output == "elasticsearch":
163
+ success_count, fail_count = bulk(es, generate_actions(mf, progress), True, raise_on_error=False)
164
+ total_success += success_count
165
+ total_failed += fail_count
166
+ if fail_count > 0:
167
+ color = "red"
168
+ else:
169
+ color = "green"
170
+ elif args.output == "logstash":
171
+ fail_count = 0
172
+ success_count = 0
173
+ for event in generate_actions(mf, progress):
174
+ r = requests.post(logstash_url, json=event, verify=verify_certs)
175
+ if r.status_code == 200:
176
+ success_count += 1
177
+ total_success += 1
178
+ color = "green"
179
+ else:
180
+ fail_count += 1
181
+ total_failed += 1
182
+ color = "red"
183
+ print(colored(f"- Imported {success_count} events, {fail_count} failed", color))
184
+ tf.close()
185
+ print(f"Imported {total_success} log records, {total_failed} failed.")