File size: 8,520 Bytes
1c4000e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
#!/usr/bin/env python3
# Import datasets to Elasticsearch or logstash instance

from argparse import ArgumentParser
from pathlib import Path
import tarfile
import json
import progressbar
import sys
from termcolor import colored

progressbar.streams.wrap_stdout()

argparser = ArgumentParser(description="Import datasets into Elasticsearch or Logstash")
argparser.add_argument("--output", "-o", default="elasticsearch", help="Choose Elasticsearch or Logstash as output")
argparser.add_argument("--recursive", "-r", action="store_true", help="Recurse into directories")
argparser.add_argument("--url", "-u", default="http://localhost:9200", help="URL of Elasticsearch instance (%(default)s) or Logstash")
argparser.add_argument("--cacerts", "-c", default=None, help="Path to CA certificates for TLS verification")
argparser.add_argument("--insecure", "-I", default=True, action="store_false", dest="verify_certs", help="Don't verify TLS cerificates.")
argparser.add_argument("--index", "-i", default="winlogbeat-mordor", help="Target index for data import (%(default)s)")
argparser.add_argument("--no-index-creation", "-n", action="store_false", dest="create_index", help="Don't create index.")
argparser.add_argument("inputs", nargs="+", type=Path, help="Path to dataset")
args = argparser.parse_args()

if args.output == "elasticsearch":
    #Only import ES module when required
    from elasticsearch import Elasticsearch
    from elasticsearch.helpers import bulk

    print("Initializing Elasticsearch connection and index...")
    index = args.index
    es = Elasticsearch(
            [args.url],
            ca_certs=args.cacerts,
            verify_certs=args.verify_certs,
            )
    if  args.create_index:
        es.indices.create(
                index,
                body={ "settings": {
                            "index.mapping.total_fields.limit": 2000
                        }
                    }
            )
elif args.output == "logstash":
    #Only import requests when logstash is used
    import requests

    print("Initializing Logstash connection...")
    logstash_url = args.url
    if args.verify_certs and args.cacerts:
        verify_certs = args.cacerts
    elif not args.verify_certs:
        from urllib3.exceptions import InsecureRequestWarning
        requests.packages.urllib3.disable_warnings(category=InsecureRequestWarning)
        verify_certs = False
    else:
        verify_certs = True
else:
    print("Output type was not recognized. Exiting...")
    sys.exit()

if args.recursive:
    paths = [ p for path in args.inputs for p in path.glob("**/*.tar.gz") if p.is_file() ]
else:
    paths = [ path for path in args.inputs if path.is_file() ]

print("Calulating total file size...")
total_size = sum([
    member.size
    for path in progressbar.progressbar(paths)
    for member in tarfile.open(path).getmembers() if member.isfile()
    ])

total_success = 0
total_failed = 0

with progressbar.DataTransferBar(max_value=total_size) as progress:
    for path in paths:
        print(f"Importing dataset {path}")
        tf = tarfile.open(path)
        for m in tf.getmembers():
            if m.isfile():
                print(f"- Importing member file {m.name}...")
                logfile = f"{path}/{m.name}"
                mf = tf.extractfile(m)
                def generate_actions(f, progress):
                    for line in f:
                        source = json.loads(line)
                        source["log"] = { "file": { "name": logfile }}
                        source.setdefault("winlog", dict())

                        # Plain data created by nxlog is completely moved to winlog.event_data except blacklisted
                        if "EventID" in source:
                            # Move event id to appropriate location
                            source["winlog"]["event_id"] = source["EventID"]
                            del source["EventID"]

                            # Discard unneeded fields
                            try:
                                del source["type"]
                            except KeyError:
                                pass

                            try:
                                del source["host"]
                            except KeyError:
                                pass

                            # Move fields from top level to winlog.event_data
                            source["winlog"]["event_data"] = {
                                        k: v
                                        for k, v in source.items()
                                        if k not in ("winlog", "log", "Channel", "Hostname", "@timestamp", "@version")
                                    }
                            for k in source["winlog"]["event_data"].keys():
                                del source[k]

                            # Special handling for host name
                            try:
                                source["winlog"]["computer_name"] = source["Hostname"]
                                del source["Hostname"]
                            except KeyError:
                                pass

                            # Special handling for channel
                            try:
                                source["winlog"]["channel"] = source["Channel"]
                                del source["Channel"]
                            except KeyError:
                                pass

                        # Data created with Winlogbeat <7 contains event fields in event_data instead of winlog.event_data - move it
                        if "event_data" in source:
                            source["winlog"]["event_data"] = source["event_data"]
                            del source["event_data"]
                        # Old Winlogbeats also put the channel name in the log_name field move this to new field names
                        if "log_name" in source:
                            source["winlog"]["channel"] = source["log_name"]
                            del source["log_name"]
                        # Some log records contain the channel name "security" in small letters, fix this
                        try:
                            if source["winlog"]["channel"] == "security":
                                source["winlog"]["channel"] = "Security"
                        except KeyError:
                            pass
                        # Old Winlogbeats also put the event id in a different location, move it to the new one
                        if "event_id" in source:
                            source["winlog"]["event_id"] = source["event_id"]
                            del source["event_id"]
                        # Also set event.code to event id
                        source.setdefault("event", dict())["code"] = source["winlog"]["event_id"]

                        progress.update(progress.value + len(line))
                        if args.output == "elasticsearch":
                            yield {
                                    "_index": index,
                                    "_source": source
                                }
                        elif args.output == "logstash":
                            yield source
                if args.output == "elasticsearch":
                    success_count, fail_count = bulk(es, generate_actions(mf, progress), True, raise_on_error=False)
                    total_success += success_count
                    total_failed += fail_count
                    if fail_count > 0:
                        color = "red"
                    else:
                        color = "green"
                elif args.output == "logstash":
                    fail_count = 0
                    success_count = 0
                    for event in generate_actions(mf, progress):
                        r = requests.post(logstash_url, json=event, verify=verify_certs)
                        if r.status_code == 200:
                            success_count += 1
                            total_success += 1
                            color = "green"
                        else:
                            fail_count += 1
                            total_failed += 1
                            color = "red"
                print(colored(f"- Imported {success_count} events, {fail_count} failed", color))
        tf.close()
print(f"Imported {total_success} log records, {total_failed} failed.")