代码拉取完成,页面将自动刷新
[dalton]
# debug logging
DEBUG = False
# location for job .zip files on disk. Not deleted by dalton; use
# something like a cron job to prune old files if necessary
job_path = /opt/dalton/jobs
# root directory for all the rulesets Dalton knows about.
# inside this directory there should be a 'suricata' and
# a 'snort' directory in which the corresponding rules go.
# it is not recommended that you change this
ruleset_path = /opt/dalton/rulesets
# temp storage for pcaps, configs, etc. Should be cleaned up by Dalton
# and everything in here can be deleted after the job is submitted
temp_path = /tmp/dalton
# IP/host of redis server; dalton_redis is the (resolvable)
# hostname of the redis docker container
redis_host = dalton_redis
# location of configuration files for the sundry supported engines
# it is recommended that you NOT change this since Flask looks in
# the static directory to serve the config files.
engine_conf_path = /opt/dalton/app/static/engine-configs
# timeout (minutes) for keys to expire in Redis for regular (non-teapot)
# submitted jobs (7200 == 5 days)
redis_expire = 7200
# timeout (minutes) for keys to expire in Redis for teapot
# jobs (short and stout jobs that are short lived)
teapot_redis_expire = 62
# timeout (minutes) for share links to expire and job info no longer preserved
# zip file containing job has modification date changed into the future to avoid deletion (43200 = 30 days)
# file mod date will be set now + (share_expire - redis_expire) so the usual redis_expire check will match when expected
share_expire = 43200
# time (seconds) for jobs to run before setting timeout status
job_run_timeout = 3600
# if a Dalton Agent has not checked into the controller in this many
# minutes, clear it from the sensor list (which prevents jobs from
# being submitted to that queue if there are no other sensors with
# that engine (technology) and version.
agent_purge_time = 20
# API Keys valid for Dalton Agents (currently not used)
api_keys = bmV2ZXIgdW5kZXJlc3RpbWF0ZSB5b3VyIG9wcG9uZW50,ZXhwZWN0IHRoZSB1bmV4cGVjdGVk,dGFrZSBpdCBvdXRzaWRl,UGFpbiBkb24ndCBodXJ0
# location of mergecap binary; needed to combine multiple pcaps for Suricata jobs
mergecap_binary = /usr/bin/mergecap
# program that reads the unified2 binary files and outputs text
u2_analyzer = python3 -m idstools.scripts.u2spewfoo
# If not rulesets exist on startup, Dalton will use the rulecat.py script (from py-idstools)
# to download some rulesets on startup
rulecat_script = python3 -m idstools.scripts.rulecat
# On the job submission page, the number of pcaps that can be uploaded
# can be limited in the UI and when Dalton processes pcaps. This value sets
# that limit.
max_pcap_files = 10
#############
# Flowsynth #
#############
[flowsynth-web]
# path to flowsynth script for web gui
# by default, use installed flowsynth module
bin_path = python3 -m flowsynth
# where to store PCAPs temporarily; should be considered public
pcap_path = /tmp/pcaps
此处可能存在不合适展示的内容,页面不予展示。您可通过相关编辑功能自查并修改。
如您确认内容无涉及 不当用语 / 纯广告导流 / 暴力 / 低俗色情 / 侵权 / 盗版 / 虚假 / 无价值内容或违法国家有关法律法规的内容,可点击提交进行申诉,我们将尽快为您处理。