Skip to content

Instantly share code, notes, and snippets.

@mark-hallman
Last active June 18, 2020 17:07
Show Gist options
  • Select an option

  • Save mark-hallman/1bd03cae1dca1b9ddebda9195e394a12 to your computer and use it in GitHub Desktop.

Select an option

Save mark-hallman/1bd03cae1dca1b9ddebda9195e394a12 to your computer and use it in GitHub Desktop.
logstash config file for Plaso psort l2tcsv output format
# Please check https://github.com/cvandeplas/ELK-forensics for more information.
# Created by Christophe Vandeplas <christophe@vandeplas.com>
# Import a mactime output file to your Elasticsearch database.
#
# Do note that Plaso can export directly to Elasticsearch.
# Have a look at the plaso.conf file in this repository.
#
# To generate the mactime file using the CSV output:
# - first generate the dump file with 'log2timeline.py' from Plaso
# - then use 'psort.py' to output to csv
# - transfer the csv to logstash
# Example:
# - log2timeline.py win7-64-nfury-10.3.58.6.dump win7-64-nfury-c-drive/win7-64-nfury-c-drive.E01
# - psort.py -o timelines win7-64-nfury-10.3.58.6.dump > win7-64-nfury-10.3.58.6.csv
# - cat win7-64-nfury-10.3.58.6.csv | nc 127.0.0.1 18005
# - Changes made by Mark Hallman 2019-03-20
# - changed input to file from tcp
# input {
# tcp {
# type => "srl2.0-timelines"
# port => 18005
# }
# }
input {
file {
type => "srl2.0-timelines"
start_position => "beginning"
path => "/usr/share/logstash/data/base-rd-01-supertimeline.csv"
sincedb_path => "/dev/null"
}
}
filter {
if [type] == "srl2.0-timelines" {
csv {
separator => ","
quote_char => "ª" # workaround: don't use a quote character as " gives issues if the field contains a "
columns => ["date","time","timezone","macb","source","sourcetype","eventtype","user","host","short","desc","version","filename","inode","notes","format","extra"]
}
if [date] == "date" {
drop {} # drop the first line that contains the column names
}
mutate { merge => ["date", "time"] } # merge and join need to be in separate mutates
mutate { merge => ["date", "timezone"] } # merge and join need to be in separate mutates
mutate { join => ["date", " "] } # merge and join need to be in separate mutates
date {
match => ["date", "MM/dd/YYYY HH:mm:ss z" ]
}
# extract macb info
if ("M" in [macb]) { mutate { add_tag => ["modified"] } }
if ("A" in [macb]) { mutate { add_tag => ["accessed"] } }
if ("C" in [macb]) { mutate { add_tag => ["changed"] } }
if ("B" in [macb]) { mutate { add_tag => ["birth"] } }
# Extract filenames
if [source] == "FILE" {
grok {
break_on_match => false
match => ["desc", "(:(?<extracted.path>/.*?))?$",
"extracted.path", "(?<extracted.filename>[^/]+?)?$",
"extracted.filename", "((\.(?<extracted.ext>[^./]+))?)?$"
]
}
}
if [source] == "META" {
grok {
break_on_match => false
match => ["filename", "(:(?<extracted.path>/.*?))?$",
"extracted.path", "(?<extracted.filename>[^/]+?)?$",
"extracted.filename", "((\.(?<extracted.ext>[^./]+))?)?$"
]
}
}
# Extract urls
if [source] == "WEBHIST" {
grok { match => ["desc", "Location: (?<extracted.url>.*?)[ $]"] }
}
mutate {
convert => ["inode", "integer",
"version", "integer"]
lowercase => ["extracted.ext"]
remove_field => ["message", "short", "date", "time", "timezone"]
}
}
}
# output {
# stdout {
# codec => rubydebug
# }
# }
output {
if [type] == "srl2.0-timelines" {
elasticsearch {
index => "logsttimelines"
hosts => localhost
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment