diff --git a/.ca.def b/.ca.def index 1f816af..aa84d53 100755 --- a/.ca.def +++ b/.ca.def @@ -436,27 +436,21 @@ EG_DROP_CMD="$EG_DROP_CMD" # This aggregate list combines Harvesters, Spammers and SMTP Dictionary attacks # from the PHP IP Data at: http://www.projecthoneypot.org/list_of_ips.php DLIST_PHP="$DLIST_PHP" - -DLIST_PHP_URL="rfxn.com/downloads/php_list" -DLIST_PHP_URL_PROT="http" +DLIST_PHP_URL="http://rfxn.com/downloads/php_list" # The Spamhaus Don't Route Or Peer List (DROP) is an advisory "drop all # traffic" list, consisting of stolen 'zombie' netblocks and netblocks # controlled entirely by professional spammers. For more information please # see http://www.spamhaus.org/drop/. DLIST_SPAMHAUS="$DLIST_SPAMHAUS" - -DLIST_SPAMHAUS_URL="www.spamhaus.org/drop/drop.lasso" -DLIST_SPAMHAUS_URL_PROT="http" +DLIST_SPAMHAUS_URL="http://www.spamhaus.org/drop/drop.lasso" # DShield collects data about malicious activity from across the Internet. # This data is cataloged, summarized and can be used to discover trends in # activity, confirm widespread attacks, or assist in preparing better firewall # rules. This is a list of top networks that have exhibited suspicious activity. DLIST_DSHIELD="$DLIST_DSHIELD" - -DLIST_DSHIELD_URL="feeds.dshield.org/top10-2.txt" -DLIST_DSHIELD_URL_PROT="http" +DLIST_DSHIELD_URL="http://feeds.dshield.org/top10-2.txt" # The reserved networks list is addresses which ARIN has marked as reserved # for future assignement and have no business as valid traffic on the internet. @@ -465,9 +459,7 @@ DLIST_DSHIELD_URL_PROT="http" # on the internet from getting blocked; this option is only important when # BLK_RESNET is set to enabled. DLIST_RESERVED="1" - -DLIST_RESERVED_URL="rfxn.com/downloads/reserved.networks" -DLIST_RESERVED_URL_PROT="http" +DLIST_RESERVED_URL="http://rfxn.com/downloads/reserved.networks" # ECN is an extension which helps reduce congestion. Unfortunately some # clueless software/hardware vendors have setup their sites or implemented @@ -477,9 +469,7 @@ DLIST_RESERVED_URL_PROT="http" # is accepted as intended. This option is dependent on setting SYSCTL_ECN="1" # otherwise it stays disabled. DLIST_ECNSHAME="$DLIST_ECNSHAME" - -DLIST_ECNSHAME_URL="rfxn.com/downloads/ecnshame.lst" -DLIST_ECNSHAME_URL_PROT="http" +DLIST_ECNSHAME_URL="http://rfxn.com/downloads/ecnshame.lst" ## # Global Trust @@ -491,11 +481,8 @@ DLIST_ECNSHAME_URL_PROT="http" # leaving USE_RGT=0, ideal for a host serving the files. USE_RGT="$USE_RGT" -GA_URL="$GA_URL" # glob_allow.rules url (no *://) -GA_URL_PROT="http" # protocol for use with wget - -GD_URL="$GD_URL" # glob_deny.rules url (no *://) -GD_URL_PROT="http" # protocol for use with wget +GA_URL="$GA_URL" +GD_URL="$GD_URL" ## # [Logging and control settings] diff --git a/CHANGELOG b/CHANGELOG index 9f2fb08..4b61a2f 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -11,6 +11,7 @@ - New versioning scheme will become consistent across all rfxn.com projects - The old versioning scheme had no real value and had become a never ending release tree +[New] added locking support to prevent multiple start,stop,restart,refresh operations from running on top of each other [Change] replaced usage of ifconfig with ip command for determining interface addresses, preserved ifconfig support for older <=EL4 systems [Change] removed extras dshield package which was rarely utilized, users can of course still manually download it from dshield.org [Change] updates --refresh|-e to utilize new consolidated allow/deny functions and improve performance of refresh (reload) operations @@ -39,6 +40,7 @@ applied against loopback traffic if so desired [Change] consolidated TMP_DROP and TMP_ALLOW chains into REFRESH_TEMP [Change] updated copyright dates in all output and file headers +[Change] removed use of *_URL_PROT variables, URL's should now be fully qualified URI's (e.g: http://domain.com/path/file) [Fix] trust rules refresh cronjob modified to remove MAILTO & SHELL variables which were causing crond 'bad minute' errors on some systems [Fix] reordered chain flushes on refresh() to avoid any possible packet loss or loss of connectivity diff --git a/files/apf b/files/apf index 401c936..7771fe3 100755 --- a/files/apf +++ b/files/apf @@ -11,11 +11,11 @@ VER="1.7.5" CNF="/etc/apf/conf.apf" head() { -echo "Advanced Policy Firewall (APF) v$VER " -echo " Copyright (C) 2002-2014, R-fx Networks " -echo " Copyright (C) 2014, Ryan MacDonald " -echo "This program may be freely redistributed under the terms of the GNU GPL" -echo "" + echo "Advanced Policy Firewall (APF) v$VER " + echo " Copyright (C) 2002-2014, R-fx Networks " + echo " Copyright (C) 2014, Ryan MacDonald " + echo "This program may be freely redistributed under the terms of the GNU GPL" + echo "" } if [ -f "$CNF" ] && [ ! "$CNF" == "" ]; then @@ -90,7 +90,7 @@ else fi fi -# check uptiime is greater than 5 minutes (300s) +# check uptime is greater than 5 minutes (300s) UPSEC=`cat /proc/uptime | tr '.' ' ' | awk '{print$1}'` if [ "$UPSEC" -lt "300" ]; then SET_FASTLOAD_UPSEC=1 @@ -163,9 +163,11 @@ fi case "$1" in -s|--start) + get_state start ;; -f|--flush|--stop) + get_state flush ;; -l|--list) @@ -176,6 +178,7 @@ case "$1" in ;; -r|--restart) + get_state $0 --flush sleep 2 $0 --start @@ -194,6 +197,7 @@ case "$1" in fi ;; -e|--refresh) + get_state refresh ;; -o|--ovars) diff --git a/files/conf.apf b/files/conf.apf index ca9404a..84efcf8 100644 --- a/files/conf.apf +++ b/files/conf.apf @@ -435,27 +435,21 @@ EG_DROP_CMD="eggdrop psybnc bitchx BitchX init udp.pl" # This aggregate list combines Harvesters, Spammers and SMTP Dictionary attacks # from the PHP IP Data at: http://www.projecthoneypot.org/list_of_ips.php DLIST_PHP="0" +DLIST_PHP_URL="http://cdn.rfxn.com/downloads/php_list" -DLIST_PHP_URL="cdn.rfxn.com/downloads/php_list" -DLIST_PHP_URL_PROT="http" - -# The Spamhaus Don't Route Or Peer List (DROP) is an advisory "drop all +# The Spamhaus Don't Route Or Peer List (DROP) is an advisory "drop all # traffic" list, consisting of stolen 'zombie' netblocks and netblocks # controlled entirely by professional spammers. For more information please # see http://www.spamhaus.org/drop/. DLIST_SPAMHAUS="0" - -DLIST_SPAMHAUS_URL="www.spamhaus.org/drop/drop.lasso" -DLIST_SPAMHAUS_URL_PROT="http" +DLIST_SPAMHAUS_URL="http://www.spamhaus.org/drop/drop.lasso" # DShield collects data about malicious activity from across the Internet. # This data is cataloged, summarized and can be used to discover trends in # activity, confirm widespread attacks, or assist in preparing better firewall # rules. This is a list of top networks that have exhibited suspicious activity. DLIST_DSHIELD="0" - -DLIST_DSHIELD_URL="feeds.dshield.org/top10-2.txt" -DLIST_DSHIELD_URL_PROT="http" +DLIST_DSHIELD_URL="http://feeds.dshield.org/top10-2.txt" # The reserved networks list is addresses which ARIN has marked as reserved # for future assignement and have no business as valid traffic on the internet. @@ -464,9 +458,7 @@ DLIST_DSHIELD_URL_PROT="http" # on the internet from getting blocked; this option is only important when # BLK_RESNET is set to enabled. DLIST_RESERVED="1" - -DLIST_RESERVED_URL="cdn.rfxn.com/downloads/reserved.networks" -DLIST_RESERVED_URL_PROT="http" +DLIST_RESERVED_URL="http://cdn.rfxn.com/downloads/reserved.networks" # ECN is an extension which helps reduce congestion. Unfortunately some # clueless software/hardware vendors have setup their sites or implemented @@ -476,9 +468,7 @@ DLIST_RESERVED_URL_PROT="http" # is accepted as intended. This option is dependent on setting SYSCTL_ECN="1" # otherwise it stays disabled. DLIST_ECNSHAME="0" - -DLIST_ECNSHAME_URL="cdn.rfxn.com/downloads/ecnshame.lst" -DLIST_ECNSHAME_URL_PROT="http" +DLIST_ECNSHAME_URL="http://cdn.rfxn.com/downloads/ecnshame.lst" ## # Global Trust @@ -489,12 +479,8 @@ DLIST_ECNSHAME_URL_PROT="http" # a daily basis. The files can be maintained in a static fashion by # leaving USE_RGT=0, ideal for a host serving the files. USE_RGT="0" - -GA_URL="yourhost.com/glob_allow.rules" -GA_URL_PROT="http" - -GD_URL="yourhost.com/glob_deny.rules" -GD_URL_PROT="http" +GA_URL="http://yourhost.com/glob_allow.rules" +GD_URL="http://yourhost.com/glob_deny.rules" ## # [Logging and control settings] diff --git a/files/internals/functions.apf b/files/internals/functions.apf index 52dac64..0bd53cd 100644 --- a/files/internals/functions.apf +++ b/files/internals/functions.apf @@ -672,16 +672,15 @@ if [ -f "$WGET" ] && [ -f "$RESNET" ]; then URL_TMP="/etc/apf/.apf-$$" rm -rf $URL_TMP URL_FILE=`echo $DLIST_RESERVED_URL | tr '/' '\n' | grep "." | tail -n 1` - RD_CON="$DLIST_RESERVED_URL_PROT://$DLIST_RESERVED_URL" mkdir $URL_TMP cd $URL_TMP - eout "{resnet} downloading $DLIST_RESERVED_URL_PROT://$DLIST_RESERVED_URL" - $WGET -t 1 -T 4 $DLIST_RESERVED_URL_PROT://$DLIST_RESERVED_URL >> /dev/null 2>&1 + eout "{resnet} downloading $DLIST_RESERVED_URL" + $WGET -t 1 -T 4 $DLIST_RESERVED_URL >> /dev/null 2>&1 if [ -f "$URL_TMP/$URL_FILE" ]; then eout "{resnet} parsing $URL_FILE into $RESNET" cat $URL_TMP/$URL_FILE > $RESNET else - eout "{resnet} download of $DLIST_RESERVED_URL_PROT://$DLIST_RESERVED_URL failed" + eout "{resnet} download of $DLIST_RESERVED_URL failed" if [ -f "$RESNET" ]; then cp $RESNET.bk $RESNET chmod 600 $RESNET $RESNET.bk @@ -698,15 +697,14 @@ fi } dlist_php() { -if [ ! "$DLIST_PHP_URL_PROT" == "" ] && [ ! "$DLIST_PHP_URL" == "" ] && [ "$DLIST_PHP" == "1" ] && [ -f "$WGET" ]; then +if [ ! "$DLIST_PHP_URL" == "" ] && [ "$DLIST_PHP" == "1" ] && [ -f "$WGET" ]; then URL_TMP="/etc/apf/.apf-$$" rm -rf $URL_TMP /etc/apf/.apf-* URL_FILE=`echo $DLIST_PHP_URL | tr '/' '\n' | grep "." | tail -n 1` - URL_CON="$DLIST_PHP_URL_PROT://$DLIST_PHP_URL" mkdir $URL_TMP cd $URL_TMP - eout "{php} downloading $DLIST_PHP_URL_PROT://$DLIST_PHP_URL" - $WGET -t 1 -T 4 $DLIST_PHP_URL_PROT://$DLIST_PHP_URL >> /dev/null 2>&1 + eout "{php} downloading $DLIST_PHP_URL" + $WGET -t 1 -T 4 $DLIST_PHP_URL >> /dev/null 2>&1 if [ -f "$URL_TMP/$URL_FILE" ]; then eout "{php} parsing $URL_FILE into $PHP_HOSTS" if [ -f "$PHP_HOSTS" ]; then @@ -718,7 +716,7 @@ if [ ! "$DLIST_PHP_URL_PROT" == "" ] && [ ! "$DLIST_PHP_URL" == "" ] && [ "$DLIS fi done else - eout "{php} download of $DLIST_PHP_URL_PROT://$DLIST_PHP_URL failed" + eout "{php} download of $DLIST_PHP_URL failed" fi rm -rf $URL_TMP cd /etc/apf @@ -747,15 +745,14 @@ fi } dlist_dshield() { -if [ ! "$DLIST_DSHIELD_URL_PROT" == "" ] && [ ! "$DLIST_DSHIELD_URL" == "" ] && [ "$DLIST_DSHIELD" == "1" ] && [ -f "$WGET" ]; then +if [ ! "$DLIST_DSHIELD_URL" == "" ] && [ "$DLIST_DSHIELD" == "1" ] && [ -f "$WGET" ]; then URL_TMP="/etc/apf/.apf-$$" rm -rf $URL_TMP /etc/apf/.apf-* URL_FILE=`echo $DLIST_DSHIELD_URL | tr '/' '\n' | grep "." | tail -n 1` - URL_CON="$DLIST_DSHIELD_URL_PROT://$DLIST_DSHIELD_URL" mkdir $URL_TMP cd $URL_TMP - eout "{dshield} downloading $DLIST_DSHIELD_URL_PROT://$DLIST_DSHIELD_URL" - $WGET -t 1 -T 4 $DLIST_DSHIELD_URL_PROT://$DLIST_DSHIELD_URL >> /dev/null 2>&1 + eout "{dshield} downloading $DLIST_DSHIELD_URL" + $WGET -t 1 -T 4 $DLIST_DSHIELD_URL >> /dev/null 2>&1 if [ -f "$URL_TMP/$URL_FILE" ]; then eout "{dshield} parsing $URL_FILE into $DS_HOSTS" if [ -f "$DS_HOSTS" ]; then @@ -767,7 +764,7 @@ if [ ! "$DLIST_DSHIELD_URL_PROT" == "" ] && [ ! "$DLIST_DSHIELD_URL" == "" ] && fi done else - eout "{dshield} download of $DLIST_DSHIELD_URL_PROT://$DLIST_DSHIELD_URL failed" + eout "{dshield} download of $DLIST_DSHIELD_URL failed" fi rm -rf $URL_TMP cd /etc/apf @@ -796,15 +793,14 @@ fi } dlist_spamhaus() { -if [ ! "$DLIST_SPAMHAUS_URL_PROT" == "" ] && [ ! "$DLIST_SPAMHAUS_URL" == "" ] && [ "$DLIST_SPAMHAUS" == "1" ] && [ -f "$WGET" ]; then +if [ ! "$DLIST_SPAMHAUS_URL" == "" ] && [ "$DLIST_SPAMHAUS" == "1" ] && [ -f "$WGET" ]; then URL_TMP="/etc/apf/.apf-$$" rm -rf $URL_TMP /etc/apf/.apf-* URL_FILE=`echo $DLIST_SPAMHAUS_URL | tr '/' '\n' | grep "." | tail -n 1` - URL_CON="$DLIST_SPAMHAUS_URL_PROT://$DLIST_SPAMHAUS_URL" mkdir $URL_TMP cd $URL_TMP - eout "{sdrop} downloading $DLIST_SPAMHAUS_URL_PROT://$DLIST_SPAMHAUS_URL" - $WGET -t 1 -T 4 $DLIST_SPAMHAUS_URL_PROT://$DLIST_SPAMHAUS_URL >> /dev/null 2>&1 + eout "{sdrop} downloading $DLIST_SPAMHAUS_URL" + $WGET -t 1 -T 4 $DLIST_SPAMHAUS_URL >> /dev/null 2>&1 if [ -f "$URL_TMP/$URL_FILE" ]; then eout "{sdrop} parsing $URL_FILE into $DROP_HOSTS" if [ -f "$DROP_HOSTS" ]; then @@ -816,7 +812,7 @@ if [ ! "$DLIST_SPAMHAUS_URL_PROT" == "" ] && [ ! "$DLIST_SPAMHAUS_URL" == "" ] & fi done else - eout "{sdrop} download of $DLIST_SPAMHAUS_URL_PROT://$DLIST_SPAMHAUS_URL failed" + eout "{sdrop} download of $DLIST_SPAMHAUS_URL failed" fi rm -rf $URL_TMP cd /etc/apf @@ -846,15 +842,14 @@ fi } dlist_ecnshame() { -if [ ! "$DLIST_ECNSHAME_URL_PROT" == "" ] && [ ! "$DLIST_ECNSHAME_URL" == "" ] && [ "$DLIST_ECNSHAME" == "1" ] && [ -f "$WGET" ]; then +if [ ! "$DLIST_ECNSHAME_URL" == "" ] && [ "$DLIST_ECNSHAME" == "1" ] && [ -f "$WGET" ]; then URL_TMP="/etc/apf/.apf-$$" rm -rf $URL_TMP /etc/apf/.apf-* URL_FILE=`echo $DLIST_ECNSHAME_URL | tr '/' '\n' | grep "." | tail -n 1` - URL_CON="$DLIST_ECNSHAME_URL_PROT://$DLIST_ECNSHAME_URL" mkdir $URL_TMP cd $URL_TMP - eout "{ecnshame} downloading $DLIST_ECNSHAME_URL_PROT://$DLIST_ECNSHAME_URL" - $WGET -t 1 -T 4 $DLIST_ECNSHAME_URL_PROT://$DLIST_ECNSHAME_URL >> /dev/null 2>&1 + eout "{ecnshame} downloading $DLIST_ECNSHAME_URL" + $WGET -t 1 -T 4 $DLIST_ECNSHAME_URL >> /dev/null 2>&1 if [ -f "$URL_TMP/$URL_FILE" ]; then eout "{ecnshame} parsing $URL_FILE into $ECNSHAME_HOSTS" if [ -f "$ECNSHAME_HOSTS" ]; then @@ -866,7 +861,7 @@ if [ ! "$DLIST_ECNSHAME_URL_PROT" == "" ] && [ ! "$DLIST_ECNSHAME_URL" == "" ] & fi done else - eout "{ecnshame} download of $DLIST_ECNSHAME_URL_PROT://$DLIST_ECNSHAME_URL failed" + eout "{ecnshame} download of $DLIST_ECNSHAME_URL failed" fi rm -rf $URL_TMP cd /etc/apf @@ -889,20 +884,19 @@ fi } glob_allow_download() { -if [ ! "$GA_URL_PROT" == "" ] && [ ! "$GA_URL" == "" ] && [ "$USE_RGT" == "1" ] && [ -f "$WGET" ]; then +if [ ! "$GA_URL" == "" ] && [ "$USE_RGT" == "1" ] && [ -f "$WGET" ]; then URL_TMP="/etc/apf/.apf-$$" rm -rf $URL_TMP URL_FILE=`echo $GA_URL | tr '/' '\n' | grep "." | tail -n 1` - GA_URL_CON="$GA_URL_PROT://$GA_URL" mkdir $URL_TMP cd $URL_TMP - eout "{trust} downloading $GA_URL_PROT://$GA_URL" - $WGET -t 1 -T 4 $GA_URL_PROT://$GA_URL >> /dev/null 2>&1 + eout "{trust} downloading $GA_URL" + $WGET -t 1 -T 4 $GA_URL >> /dev/null 2>&1 if [ -f "$URL_TMP/$URL_FILE" ]; then eout "{trust} parsing $URL_FILE into $GALLOW_HOSTS" cat $URL_TMP/$URL_FILE > $GALLOW_HOSTS else - eout "{trust} download of $GA_URL_PROT://$GA_URL failed" + eout "{trust} download of $GA_URL failed" fi rm -rf $URL_TMP cd /etc/apf @@ -914,20 +908,19 @@ fi } glob_deny_download() { -if [ ! "$GD_URL_PROT" == "" ] && [ ! "$GD_URL" == "" ] && [ "$USE_RGT" == "1" ] && [ -f "$WGET" ]; then +if [ ! "$GD_URL" == "" ] && [ "$USE_RGT" == "1" ] && [ -f "$WGET" ]; then URL_TMP="/etc/apf/.apf-$$" rm -rf $URL_TMP URL_FILE=`echo $GD_URL | tr '/' '\n' | grep "." | tail -n 1` - GD_URL_CON="$GD_URL_PROT://$GD_URL" mkdir $URL_TMP cd $URL_TMP - eout "{trust} downloading $GD_URL_PROT://$GD_URL" - $WGET -t 1 -T 4 $GD_URL_PROT://$GD_URL >> /dev/null 2>&1 + eout "{trust} downloading $GD_URL" + $WGET -t 1 -T 4 $GD_URL >> /dev/null 2>&1 if [ -f "$URL_TMP/$URL_FILE" ]; then eout "{trust} parsing $URL_FILE into $GDENY_HOSTS" cat $URL_TMP/$URL_FILE > $GDENY_HOSTS else - eout "{trust} download of $GD_URL_PROT://$GD_URL failed" + eout "{trust} download of $GD_URL failed" fi rm -rf $URL_TMP cd /etc/apf diff --git a/files/internals/internals.conf b/files/internals/internals.conf index afb3e07..7afd0c6 100644 --- a/files/internals/internals.conf +++ b/files/internals/internals.conf @@ -40,7 +40,7 @@ NAME=`echo $APPN | tr '[:lower:]' '[:upper:]'` TIME=`date +"%D %H:%M:%S"` UTIME=`date +"%s"` KREL=`$UNAME -r | cut -d\. -f 1,2` -LOCK_TIMEOUT="360" +LOCK_TIMEOUT="300" LOCK="$INSPATH/lock.utime" ADR="$INSTALL_PATH/ad/ad.rules"