This repository has been archived by the owner on Feb 13, 2019. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathreporting.conf
154 lines (138 loc) · 3.67 KB
/
reporting.conf
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
# Enable or disable the available reporting modules [on/off].
# If you add a custom reporting module to your Cuckoo setup, you have to add
# a dedicated entry in this file, or it won't be executed.
# You can also add additional options under the section of your module and
# they will be available in your Python class.
[jsondump]
enabled = yes
# use the c-optimized JSON encoder, requires fitting entire JSON results in memory
ram_boost = yes
indent = 4
encoding = latin-1
[reporthtml]
# required for the WSGI interface
enabled = no
[reporthtmlsummary]
# much smaller, faster report generation, omits API logs and is non-interactive
enabled = yes
[reportpdf]
# Note that this requires reporthtmlsummary to be enabled above as well
enabled = yes
[mmdef]
enabled = no
[maec41]
enabled = no
mode = overview
processtree = true
output_handles = false
static = true
strings = true
virustotal = true
deduplicate = true
[mongodb]
enabled = yes
host = 127.0.0.1
port = 27017
db = cuckoo
# Automatically delete large dict values that exceed mongos 16MB limitation
# Note: This only deletes dict keys from data stored in MongoDB. You would
# still get the full dataset if you parsed the results dict in another
# reporting module or from the jsondump module.
fix_large_docs = yes
# Use ElasticSearch as the "database" which powers Django.
# NOTE: If this is enabled, MongoDB should not be enabled, unless
# search only option is set to yes. Then elastic search is only used for /search web page.
[elasticsearchdb]
enabled = no
searchonly = no
host = 127.0.0.1
port = 9200
# The report data is indexed in the form of {{index-yyyy.mm.dd}}
# so the below index configuration option is actually an index 'prefix'.
index = cuckoo
[retention]
enabled = no
# run at most once every this many hours (unless reporting.conf is modified)
run_every = 6
# The amount of days old a task needs to be before deleting data
# Set a value to no to never delete it
memory = 14
procmemory = 62
pcap = 62
sortedpcap = 14
bsonlogs = 62
dropped = 62
screencaps = 62
reports = 62
mongo = 731
elastic = no
malheur = no
[syslog]
enabled = no
# IP of your syslog server/listener
host = x.x.x.x
# Port of your syslog server/listener
port = 514
# Protocol to send data over
protocol = tcp
# Store a logfile? [in reports directory]
logfile = yes
# if yes, what logname? [Default: syslog.txt]
logname = syslog.log
[moloch]
enabled = no
base = https://172.18.100.105:8005/
node = cuckoo3
capture = /data/moloch/bin/moloch-capture
captureconf = /data/moloch/etc/config.ini
user = admin
pass = admin
realm = Moloch
[resubmitexe]
enabled = yes
resublimit = 2
[malheur]
enabled = yes
maxsimilar = 20
[compression]
enabled = yes
zipmemdump = yes
zipmemstrings = yes
zipprocdump = yes
zipprocstrings = yes
[misp]
enabled = no
apikey =
url =
# minimal malscore, by default all
min_malscore = 0
# by default 5 threads
threads =
# this will retrieve information for iocs
# and activate misp report download from webgui
extend_context = yes
# upload iocs from cuckoo to MISP
upload_iocs = yes
distribution = 0
threat_level_id = 2
analysis = 2
# Sections to report
# Analysis ID will be appended, change
title = Iocs from cuckoo analysis:
network = yes
ids_files = yes
dropped = yes
registry = yes
mutexes = yes
[distributed]
enabled = no
remove_task_on_slave = no
# distributed cuckoo database, to store nodes and tasks info
db = sqlite:///dist.db
# directory where to store samples until analysis is finished
# will move samples to storage/binaries once analysis is finished
samples_directory = /tmp/distributed_cuckoo_samples
# tried before declare node as dead and deactivate it
dead_count = 5
# number of threads witch will retrieve files as memdump, pcap in background
retriever_threads = 4