2023-01-03 21:25:00 +01:00
|
|
|
#!/bin/bash
|
|
|
|
|
2023-01-15 19:13:59 +01:00
|
|
|
set -e
|
2023-01-04 19:41:21 +01:00
|
|
|
|
|
|
|
# Setting env
|
|
|
|
SCAN_DATE=`date "+%F-%H-%M"`
|
2023-01-14 18:05:17 +01:00
|
|
|
export SHALLOT_DIR="/tmp/shallot-$SCAN_DATE"
|
2023-01-04 19:41:21 +01:00
|
|
|
mkdir -p $SHALLOT_DIR
|
2023-01-03 21:25:00 +01:00
|
|
|
|
2023-01-15 19:13:59 +01:00
|
|
|
echo "Shallot scritp v0.1.1"
|
2023-01-03 21:25:00 +01:00
|
|
|
|
|
|
|
if [[ $# -eq 0 ]] ; then
|
2023-01-15 19:13:59 +01:00
|
|
|
echo "[ERRO] No arguments was passed, exiting..."
|
2023-01-03 21:25:00 +01:00
|
|
|
exit 1
|
|
|
|
fi
|
|
|
|
|
|
|
|
if [[ $# -ge 2 ]] ; then
|
2023-01-08 22:54:14 +01:00
|
|
|
echo "[WARRNING] Too much argument was passed, this script uses only first one."
|
2023-01-03 21:25:00 +01:00
|
|
|
fi
|
|
|
|
|
2023-01-15 19:13:59 +01:00
|
|
|
[ $(type -P "npx") ] || echo "[ERRO] npx is not in the path, install npm first!"
|
|
|
|
|
|
|
|
|
2023-01-14 18:05:17 +01:00
|
|
|
export ONIONSITE=$1
|
2023-01-03 21:25:00 +01:00
|
|
|
|
2023-01-15 19:13:59 +01:00
|
|
|
echo "Checking if Tor Browser proxy is running..."
|
2023-01-03 21:25:00 +01:00
|
|
|
|
2023-01-15 19:13:59 +01:00
|
|
|
#NETSTAT_OUTPUT=`netstat -tlnp 2> /dev/null`
|
2023-01-14 18:05:17 +01:00
|
|
|
#IF_TOR_RUNNING=`echo $NETSTAT_OUTPUT | grep -Ezqv "/tor" && echo 0 || echo 1`
|
|
|
|
IF_TOR_RUNNING=`ps -eaf | grep -i tor |sed '/^$/d' | wc -l`
|
|
|
|
if [[ "$IF_TOR_RUNNING" > 1 ]] ; then
|
2023-01-08 22:54:14 +01:00
|
|
|
echo "[INFO] Tor is running!"
|
2023-01-03 21:25:00 +01:00
|
|
|
else
|
2023-01-15 19:13:59 +01:00
|
|
|
echo "[ERRO] Tor is not running, start Tor Browser and connect to Tor, then restart this scritp"
|
2023-01-03 21:25:00 +01:00
|
|
|
exit 1;
|
|
|
|
fi
|
2023-01-04 19:41:21 +01:00
|
|
|
|
2023-01-15 19:13:59 +01:00
|
|
|
IS_ADDRESS_ONION=`echo $1 | grep -Ei ".onion$" | wc -c`
|
|
|
|
if [[ $IS_ADDRESS_ONION > 0 ]] ; then
|
|
|
|
echo "Checking Onion Service, address: $1"
|
|
|
|
else
|
|
|
|
echo "[ERRO] Looks like $1 is not an onion site, exiting..."
|
|
|
|
exit 1
|
|
|
|
fi
|
2023-01-04 19:41:21 +01:00
|
|
|
echo ""
|
|
|
|
|
|
|
|
# OnionScan
|
|
|
|
|
2023-01-15 19:13:59 +01:00
|
|
|
echo "[INFO] Runnning OnionScan aginst address, this will take a while..."
|
2023-01-14 18:05:17 +01:00
|
|
|
export ONIONSCAN_REPORT=$(onionscan --jsonReport --torProxyAddress "127.0.0.1:9150" $1 2>$SHALLOT_DIR/onionscan_error.log | jq)
|
2023-01-04 19:41:21 +01:00
|
|
|
echo $ONIONSCAN_REPORT > $SHALLOT_DIR/onionscan_result.txt
|
|
|
|
if [ $? ] ; then
|
2023-01-15 19:13:59 +01:00
|
|
|
echo "[INFO] OnionScan done! Saved in $SHALLOT_DIR/onionscan_result.txt"
|
2023-01-04 19:41:21 +01:00
|
|
|
else
|
2023-01-15 19:13:59 +01:00
|
|
|
echo "[ERRO] Error occured, exiting, check $SHALLOT_DIR/onionscan_error.log for details."
|
2023-01-04 19:41:21 +01:00
|
|
|
fi
|
|
|
|
|
|
|
|
# HTTP Headers
|
|
|
|
|
|
|
|
echo "Scanning HTTP headers, wait..."
|
|
|
|
|
2023-01-15 19:13:59 +01:00
|
|
|
#export HTTP_HEADERS=$(proxychains -q -f /etc/proxychains4.conf /usr/bin/curl -I -s $1 | tail -n +3 | sed 's/\r//g' | head -n -1 | jq -R 'split(":")|{(.[0]) : .[1]}' | sed 's/\\"//g' 2>$SHALLOT_DIR/http_headers_error.log)
|
|
|
|
export HTTP_HEADERS=$(proxychains -q -f /etc/proxychains4.conf /usr/bin/curl -LIs -D - $1 -o /dev/null | npx curl-headers-to-json | sed 's/\\"//g' | jq 2>$SHALLOT_DIR/http_headers_error.log)
|
2023-01-04 19:41:21 +01:00
|
|
|
echo $HTTP_HEADERS > $SHALLOT_DIR/http_headers.txt
|
|
|
|
|
|
|
|
if [ $? ] ; then
|
2023-01-15 19:13:59 +01:00
|
|
|
echo "[INFO] HTTP headers done! Saved in $SHALLOT_DIR/http_headers.txt"
|
2023-01-04 19:41:21 +01:00
|
|
|
else
|
2023-01-15 19:13:59 +01:00
|
|
|
echo "[ERRO] Error occured, check $SHALLOT_DIR/http_headers_error.log"
|
2023-01-04 19:41:21 +01:00
|
|
|
fi
|
2023-01-08 22:54:14 +01:00
|
|
|
|
|
|
|
# Report analysis
|
|
|
|
|
2023-01-14 18:05:17 +01:00
|
|
|
python3 main.py
|
2023-01-15 19:13:59 +01:00
|
|
|
|
|
|
|
echo "Works done, exiting."
|
|
|
|
exit 0
|