From 7e49043f895ad4a82210fde174ae6d7f956d17bf Mon Sep 17 00:00:00 2001
From: Viswak Hanumanth
Date: Fri, 16 Feb 2018 14:11:45 +0530
Subject: [PATCH 01/13] Custom error ("404 not found") page (#93)
error_page_implementation
---
clone.py | 10 ++++++----
snare.py | 16 ++++++++++++----
2 files changed, 18 insertions(+), 8 deletions(-)
diff --git a/clone.py b/clone.py
index f43dde4e..079bf04f 100644
--- a/clone.py
+++ b/clone.py
@@ -32,7 +32,7 @@
class Cloner(object):
def __init__(self, root, max_depth):
self.visited_urls = []
- self.root = self.add_scheme(root)
+ self.root, self.error_page = self.add_scheme(root)
self.max_depth = max_depth
self.moved_root = None
if len(self.root.host) < 4:
@@ -51,7 +51,8 @@ def add_scheme(url):
new_url = yarl.URL(url)
else:
new_url = yarl.URL('http://' + url)
- return new_url
+ err_url = yarl.URL('http://' + url + '/status_404')
+ return new_url, err_url
async def process_link(self, url, level, check_host=False):
try:
@@ -145,10 +146,10 @@ async def get_body(self, session):
content_type = None
try:
with aiohttp.Timeout(10.0):
- response = await session.get(current_url)
+ response = await session.get(current_url, headers={'Accept': 'text/html'})
content_type = response.content_type
data = await response.read()
-
+
except (aiohttp.ClientError, asyncio.TimeoutError) as client_error:
print(client_error)
else:
@@ -187,6 +188,7 @@ async def run(self):
session = aiohttp.ClientSession()
try:
await self.new_urls.put((self.root, 0))
+ await self.new_urls.put((self.error_page,0))
await self.get_body(session)
except KeyboardInterrupt:
raise
diff --git a/snare.py b/snare.py
index 52fba0f0..103e26f4 100644
--- a/snare.py
+++ b/snare.py
@@ -220,9 +220,9 @@ async def parse_tanner_response(self, requested_name, detection):
content = None
status_code = 200
headers = {}
- p = re.compile('/+')
- requested_name = p.sub('/', requested_name)
-
+ p = re.compile('/+') # Creating a regex object for the pattern of multiple contiguous forward slashes
+ requested_name = p.sub('/', requested_name) # Substituting all occurrences of the pattern with single forward slash
+
if detection['type'] == 1:
query_start = requested_name.find('?')
if query_start != -1:
@@ -232,12 +232,20 @@ async def parse_tanner_response(self, requested_name, detection):
requested_name = self.run_args.index_page
try:
if requested_name[-1] == '/':
- requested_name = requested_name[:-1]
+ requested_name = requested_name[:-1]
requested_name = unquote(requested_name)
file_name = self.meta[requested_name]['hash']
content_type = self.meta[requested_name]['content_type']
except KeyError:
status_code = 404
+ requested_name = '/status_404'
+ file_name = self.meta[requested_name]['hash']
+ content_type = 'text/html'
+ path = os.path.join(self.dir, file_name)
+ with open(path, 'rb') as fh:
+ content = fh.read()
+ content = await self.handle_html_content(content)
+
else:
path = os.path.join(self.dir, file_name)
if os.path.isfile(path):
From b27daa211c540905cb1913756d331fb77d98f8c5 Mon Sep 17 00:00:00 2001
From: Viswak Hanumanth
Date: Tue, 13 Mar 2018 03:32:06 +0530
Subject: [PATCH 02/13] Logging for snare and cloner (#109)
* Creating logger
* Remove conf.py
* Adding clone error logger
* Changes
* Removing level as we have both .log and .err
* Adding quit info
* Removing extra slash
* Requested changes
* minor change
---
clone.py | 26 ++++++++++++++++++++++----
logger.py | 49 +++++++++++++++++++++++++++++++++++++++++++++++++
snare.py | 26 +++++++++++++++++++-------
3 files changed, 90 insertions(+), 11 deletions(-)
create mode 100644 logger.py
diff --git a/clone.py b/clone.py
index 079bf04f..121f6f43 100644
--- a/clone.py
+++ b/clone.py
@@ -27,6 +27,8 @@
import cssutils
import yarl
from bs4 import BeautifulSoup
+import logger
+import logging
class Cloner(object):
@@ -41,9 +43,10 @@ def __init__(self, root, max_depth):
if not os.path.exists(self.target_path):
os.mkdir(self.target_path)
-
+
self.new_urls = Queue()
self.meta = {}
+ self.logger = logging.getLogger(__name__)
@staticmethod
def add_scheme(url):
@@ -82,7 +85,7 @@ async def process_link(self, url, level, check_host=False):
try:
res = url.relative().human_repr()
except ValueError:
- print(url)
+ self.logger.error(url)
return res
async def replace_links(self, data, level):
@@ -151,7 +154,7 @@ async def get_body(self, session):
data = await response.read()
except (aiohttp.ClientError, asyncio.TimeoutError) as client_error:
- print(client_error)
+ self.logger.error(client_error)
else:
await response.release()
if data is not None:
@@ -181,7 +184,7 @@ async def get_root_host(self):
self.moved_root = resp._url_obj
resp.close()
except aiohttp.errors.ClientError as err:
- print("Can\'t connect to target host.")
+ self.logger.error("Can\'t connect to target host.")
exit(-1)
async def run(self):
@@ -210,7 +213,14 @@ def main():
parser = argparse.ArgumentParser()
parser.add_argument("--target", help="domain of the site to be cloned", required=True)
parser.add_argument("--max-depth", help="max depth of the cloning", required=False, default=sys.maxsize)
+ parser.add_argument("--log_path", help="path to the error log file")
args = parser.parse_args()
+ if args.log_path:
+ log_err = args.log_path + "clone.err"
+ else:
+ log_err = "/opt/snare/clone.err"
+ logger.Logger.create_clone_logger(log_err, __package__)
+ print("Error logs will be stored in {}\n".format(log_err))
try:
cloner = Cloner(args.target, int(args.max_depth))
loop.run_until_complete(cloner.get_root_host())
@@ -220,4 +230,12 @@ def main():
if __name__ == '__main__':
+ print("""
+ ______ __ ______ _ ____________
+ / ____// / / __ // | / / ____/ __ \\
+ / / / / / / / // |/ / __/ / /_/ /
+ / /___ / /____ / /_/ // /| / /___/ _, _/
+/_____//______//_____//_/ |_/_____/_/ |_|
+
+ """)
main()
diff --git a/logger.py b/logger.py
new file mode 100644
index 00000000..e0b146e1
--- /dev/null
+++ b/logger.py
@@ -0,0 +1,49 @@
+import logging
+import logging.handlers
+
+class LevelFilter(logging.Filter):
+ """Filters (lets through) all messages with level < LEVEL"""
+
+ def __init__(self, level):
+ self.level = level
+
+ def filter(self, record):
+ return record.levelno < self.level # "<" instead of "<=": since logger.setLevel is inclusive, this should be exclusive
+
+class Logger:
+
+ @staticmethod
+ def create_logger(debug_filename, err_filename, logger_name):
+ logger = logging.getLogger(logger_name)
+ logger.setLevel(logging.DEBUG)
+ logger.propagate = False
+ formatter = logging.Formatter(
+ fmt='%(asctime)s %(levelname)s:%(name)s:%(funcName)s: %(message)s', datefmt='%Y-%m-%d %H:%M:%S')
+
+ # ERROR log to 'snare.err'
+ error_log_handler = logging.handlers.RotatingFileHandler(err_filename, encoding='utf-8')
+ error_log_handler.setLevel(logging.ERROR)
+ error_log_handler.setFormatter(formatter)
+ logger.addHandler(error_log_handler)
+
+ # DEBUG log to 'snare.log'
+ debug_log_handler = logging.handlers.RotatingFileHandler(debug_filename, encoding='utf-8')
+ debug_log_handler.setLevel(logging.DEBUG)
+ debug_log_handler.setFormatter(formatter)
+ max_level_filter = LevelFilter(logging.ERROR)
+ debug_log_handler.addFilter(max_level_filter)
+ logger.addHandler(debug_log_handler)
+
+ return logger
+
+ @staticmethod
+ def create_clone_logger(err_filename, logger_name):
+ logger = logging.getLogger(logger_name)
+ formatter = logging.Formatter(
+ fmt='%(asctime)s %(levelname)s:%(name)s:%(funcName)s: %(message)s', datefmt='%Y-%m-%d %H:%M:%S')
+ # ERROR log to 'clone.err'
+ error_log_handler = logging.handlers.RotatingFileHandler(err_filename, encoding='utf-8')
+ error_log_handler.setLevel(logging.ERROR)
+ error_log_handler.setFormatter(formatter)
+ logger.addHandler(error_log_handler)
+
\ No newline at end of file
diff --git a/snare.py b/snare.py
index 103e26f4..5a599d7d 100644
--- a/snare.py
+++ b/snare.py
@@ -33,6 +33,8 @@
import pip
from aiohttp import MultiDict
import re
+import logging
+import logger
try:
from aiohttp.web import StaticResource as StaticRoute
@@ -53,6 +55,8 @@ def __init__(self, meta, run_args, debug=False, keep_alive=75, **kwargs):
self.dir = '/opt/snare/pages/{}'.format(run_args.page_dir)
self.meta = meta
+
+ self.logger = logging.getLogger(__name__)
self.sroute = StaticRoute(
name=None, prefix='/',
@@ -71,11 +75,11 @@ async def get_dorks(self):
try:
dorks = await r.json()
except json.decoder.JSONDecodeError as e:
- print(e)
+ self.logger.error('Error getting dorks: %s', e)
finally:
await r.release()
except asyncio.TimeoutError:
- print('Dorks timeout')
+ self.logger.info('Dorks timeout')
return dorks['response']['dorks'] if dorks else []
async def submit_slurp(self, data):
@@ -90,7 +94,7 @@ async def submit_slurp(self, data):
assert r.status == 200
r.close()
except Exception as e:
- print(e)
+ self.logger.error('Error submitting slurp: %s', e)
def create_data(self, request, response_status):
data = dict(
@@ -127,7 +131,7 @@ async def submit_data(self, data):
try:
event_result = await r.json()
except json.decoder.JSONDecodeError as e:
- print(e, data)
+ self.logger.error('Error submitting data: {} {}'.format(e, data))
finally:
await r.release()
except Exception as e:
@@ -167,14 +171,14 @@ async def handle_html_content(self, content):
return content
async def handle_request(self, request, payload):
- print('Request path: {0}'.format(request.path))
+ self.logger.info('Request path: {0}'.format(request.path))
data = self.create_data(request, 200)
if request.method == 'POST':
post_data = await payload.read()
post_data = MultiDict(parse_qsl(post_data.decode('utf-8')))
- print('POST data:')
+ self.logger.info('POST data:')
for key, val in post_data.items():
- print('\t- {0}: {1}'.format(key, val))
+ self.logger.info('\t- {0}: {1}'.format(key, val))
data['post_data'] = dict(post_data)
# Submit the event to the TANNER service
@@ -456,12 +460,17 @@ async def check_tanner():
parser.add_argument("--update-timeout", help="update snare every timeout ", default='24H')
parser.add_argument("--server-header", help="set server-header", default='nignx/1.3.8')
parser.add_argument("--no-dorks", help="disable the use of dorks", action='store_true')
+ parser.add_argument("--log-dir", help="path to directory of the log file", default='/opt/snare/')
args = parser.parse_args()
base_path = '/opt/snare/'
base_page_path = '/opt/snare/pages/'
config = configparser.ConfigParser()
config.read(os.path.join(base_path, args.config))
+
+ log_debug = args.log_dir + "snare.log"
+ log_err = args.log_dir + "snare.err"
+ logger.Logger.create_logger(log_debug, log_err, __package__)
if args.list_pages:
print('Available pages:\n')
@@ -507,6 +516,9 @@ async def check_tanner():
drop_privileges()
print('serving on {0} with uuid {1}'.format(srv.sockets[0].getsockname()[:2], snare_uuid.decode('utf-8')))
+ print("Debug logs will be stored in", log_debug)
+ print("Error logs will be stored in", log_err)
+ print("(Press CTRL+C to quit)")
try:
loop.run_forever()
except (KeyboardInterrupt, TypeError) as e:
From 83c204d6cf86e551934b3c8c469be8385f6a24c1 Mon Sep 17 00:00:00 2001
From: Viswak Hanumanth
Date: Fri, 13 Apr 2018 00:27:20 +0530
Subject: [PATCH 03/13] Add new parameter css-validate to cloner (#120)
* Adding Cloner parameter for css validation
* minor change
* Suggested changes
---
clone.py | 24 ++++++++++++++++--------
1 file changed, 16 insertions(+), 8 deletions(-)
diff --git a/clone.py b/clone.py
index 121f6f43..c7b1e0fc 100644
--- a/clone.py
+++ b/clone.py
@@ -32,7 +32,7 @@
class Cloner(object):
- def __init__(self, root, max_depth):
+ def __init__(self, root, max_depth, css_validate):
self.visited_urls = []
self.root, self.error_page = self.add_scheme(root)
self.max_depth = max_depth
@@ -42,8 +42,8 @@ def __init__(self, root, max_depth):
self.target_path = '/opt/snare/pages/{}'.format(self.root.host)
if not os.path.exists(self.target_path):
- os.mkdir(self.target_path)
-
+ os.mkdir(self.target_path)
+ self.css_validate = css_validate
self.new_urls = Queue()
self.meta = {}
self.logger = logging.getLogger(__name__)
@@ -165,8 +165,8 @@ async def get_body(self, session):
data = str(soup).encode()
with open(os.path.join(self.target_path, hash_name), 'wb') as index_fh:
index_fh.write(data)
- if content_type == 'text/css':
- css = cssutils.parseString(data)
+ if content_type == 'text/css':
+ css = cssutils.parseString(data, validate=self.css_validate)
for carved_url in cssutils.getUrls(css):
if carved_url.startswith('data'):
continue
@@ -199,7 +199,14 @@ async def run(self):
with open(os.path.join(self.target_path, 'meta.json'), 'w') as mj:
json.dump(self.meta, mj)
await session.close()
-
+
+def str_to_bool(v):
+ if v.lower() == 'true':
+ return True
+ elif v.lower() == 'false':
+ return False
+ else:
+ raise argparse.ArgumentTypeError('Boolean value expected')
def main():
if os.getuid() != 0:
@@ -214,15 +221,16 @@ def main():
parser.add_argument("--target", help="domain of the site to be cloned", required=True)
parser.add_argument("--max-depth", help="max depth of the cloning", required=False, default=sys.maxsize)
parser.add_argument("--log_path", help="path to the error log file")
+ parser.add_argument("--css-validate", help="set whether css validation is required", type=str_to_bool, default=None)
args = parser.parse_args()
if args.log_path:
log_err = args.log_path + "clone.err"
else:
- log_err = "/opt/snare/clone.err"
+ log_err = "/opt/snare/clone.err"
logger.Logger.create_clone_logger(log_err, __package__)
print("Error logs will be stored in {}\n".format(log_err))
try:
- cloner = Cloner(args.target, int(args.max_depth))
+ cloner = Cloner(args.target, int(args.max_depth), args.css_validate)
loop.run_until_complete(cloner.get_root_host())
loop.run_until_complete(cloner.run())
except KeyboardInterrupt:
From f912a9c2dc1514e71aa863b72eaee2d4ed5c328a Mon Sep 17 00:00:00 2001
From: Viswak Hanumanth
Date: Fri, 1 Jun 2018 16:18:25 +0530
Subject: [PATCH 04/13] Re-write cloner and move to setup structure (#141)
* Re-writing snare with latest web server
* Pep8 formatting
* Breaking server.py
* Break snare.py
* Remove duplicates
* Add port to server
* Breaking server.py
* Break snare.py
* Remove duplicates
* Improve project structure
* Suggested changes
* pep8 formatting
* Suggested changes
* rewrite and add clone to setup structure
* Formatting
* Formatting
* Pep8 formatting
---
{tests => bin}/__init__.py | 0
bin/clone | 65 +++
bin/snare | 209 +++++++
converter.py | 31 -
setup.py | 13 +
snare.py | 531 ------------------
snare/__init__.py | 0
clone.py => snare/cloner.py | 102 +---
snare/html_handler.py | 63 +++
snare/middlewares.py | 38 ++
snare/server.py | 93 +++
snare/tanner_handler.py | 126 +++++
snare/tests/__init__.py | 0
{tests => snare/tests}/test_add_meta_tag.py | 0
{tests => snare/tests}/test_converter.py | 0
{tests => snare/tests}/test_create_data.py | 0
{tests => snare/tests}/test_get_dorks.py | 0
{tests => snare/tests}/test_handle_error.py | 0
.../tests}/test_handle_html_content.py | 0
{tests => snare/tests}/test_handle_request.py | 0
.../tests}/test_parse_tanner_response.py | 0
{tests => snare/tests}/test_submit_data.py | 0
.../tests}/test_versions_manager.py | 0
snare/utils/__init__.py | 0
{utils => snare/utils}/asyncmock.py | 0
logger.py => snare/utils/logger.py | 22 +-
{utils => snare/utils}/page_path_generator.py | 0
snare/utils/snare_helpers.py | 105 ++++
versions_manager.py | 17 -
29 files changed, 742 insertions(+), 673 deletions(-)
rename {tests => bin}/__init__.py (100%)
create mode 100644 bin/clone
create mode 100644 bin/snare
delete mode 100644 converter.py
create mode 100644 setup.py
delete mode 100644 snare.py
create mode 100644 snare/__init__.py
rename clone.py => snare/cloner.py (66%)
create mode 100644 snare/html_handler.py
create mode 100644 snare/middlewares.py
create mode 100644 snare/server.py
create mode 100644 snare/tanner_handler.py
create mode 100644 snare/tests/__init__.py
rename {tests => snare/tests}/test_add_meta_tag.py (100%)
rename {tests => snare/tests}/test_converter.py (100%)
rename {tests => snare/tests}/test_create_data.py (100%)
rename {tests => snare/tests}/test_get_dorks.py (100%)
rename {tests => snare/tests}/test_handle_error.py (100%)
rename {tests => snare/tests}/test_handle_html_content.py (100%)
rename {tests => snare/tests}/test_handle_request.py (100%)
rename {tests => snare/tests}/test_parse_tanner_response.py (100%)
rename {tests => snare/tests}/test_submit_data.py (100%)
rename {tests => snare/tests}/test_versions_manager.py (100%)
create mode 100644 snare/utils/__init__.py
rename {utils => snare/utils}/asyncmock.py (100%)
rename logger.py => snare/utils/logger.py (81%)
rename {utils => snare/utils}/page_path_generator.py (100%)
create mode 100644 snare/utils/snare_helpers.py
delete mode 100644 versions_manager.py
diff --git a/tests/__init__.py b/bin/__init__.py
similarity index 100%
rename from tests/__init__.py
rename to bin/__init__.py
diff --git a/bin/clone b/bin/clone
new file mode 100644
index 00000000..a890bbb3
--- /dev/null
+++ b/bin/clone
@@ -0,0 +1,65 @@
+#!/usr/bin/env python3
+
+"""
+Copyright (C) 2015-2016 MushMush Foundation
+
+This program is free software: you can redistribute it and/or modify
+it under the terms of the GNU General Public License as published by
+the Free Software Foundation, either version 3 of the License, or
+(at your option) any later version.
+
+This program is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+GNU General Public License for more details.
+"""
+
+import argparse
+import asyncio
+import os
+import sys
+from snare.utils import logger
+from snare.cloner import Cloner
+from snare.utils.snare_helpers import str_to_bool
+
+def main():
+ if os.getuid() != 0:
+ print('Clone has to be run as root!')
+ sys.exit(1)
+ if not os.path.exists('/opt/snare'):
+ os.mkdir('/opt/snare')
+ if not os.path.exists('/opt/snare/pages'):
+ os.mkdir('/opt/snare/pages')
+ loop = asyncio.get_event_loop()
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--target", help="domain of the site to be cloned", required=True)
+ parser.add_argument("--max-depth", help="max depth of the cloning", required=False, default=sys.maxsize)
+ parser.add_argument("--log_path", help="path to the error log file")
+ parser.add_argument(
+ "--css-validate", help="set whether css validation is required", type=str_to_bool, default=None
+ )
+ args = parser.parse_args()
+ if args.log_path:
+ log_err = args.log_path + "clone.err"
+ else:
+ log_err = "/opt/snare/clone.err"
+ logger.Logger.create_clone_logger(log_err, __package__)
+ print("Error logs will be stored in {}\n".format(log_err))
+ try:
+ cloner = Cloner(args.target, int(args.max_depth), args.css_validate)
+ loop.run_until_complete(cloner.get_root_host())
+ loop.run_until_complete(cloner.run())
+ except KeyboardInterrupt:
+ pass
+
+
+if __name__ == '__main__':
+ print("""
+ ______ __ ______ _ ____________
+ / ____// / / __ // | / / ____/ __ \\
+ / / / / / / / // |/ / __/ / /_/ /
+ / /___ / /____ / /_/ // /| / /___/ _, _/
+/_____//______//_____//_/ |_/_____/_/ |_|
+
+ """)
+ main()
diff --git a/bin/snare b/bin/snare
new file mode 100644
index 00000000..058acbe7
--- /dev/null
+++ b/bin/snare
@@ -0,0 +1,209 @@
+#!/usr/bin/python3
+
+"""
+Copyright (C) 2015-2016 MushMush Foundation
+
+This program is free software: you can redistribute it and/or modify
+it under the terms of the GNU General Public License as published by
+the Free Software Foundation, either version 3 of the License, or
+(at your option) any later version.
+
+This program is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+GNU General Public License for more details.
+"""
+import argparse
+import asyncio
+import pwd
+import grp
+import configparser
+import json
+import multiprocessing
+import os
+import sys
+import time
+import uuid
+from concurrent.futures import ProcessPoolExecutor
+import aiohttp
+import git
+import pip
+import netifaces as ni
+from snare.server import HttpRequestHandler
+from snare.utils.logger import Logger
+from snare.utils import snare_helpers
+from snare.utils.snare_helpers import str_to_bool
+
+
+def create_initial_config():
+ cfg = configparser.ConfigParser()
+ cfg['WEB-TOOLS'] = dict(google='', bing='')
+ with open('/opt/snare/snare.cfg', 'w') as configfile:
+ cfg.write(configfile)
+
+
+def snare_setup():
+ if os.getuid() != 0:
+ print('Snare has to be started as root!')
+ sys.exit(1)
+ # Create folders
+ if not os.path.exists('/opt/snare'):
+ os.mkdir('/opt/snare')
+ if not os.path.exists('/opt/snare/pages'):
+ os.mkdir('/opt/snare/pages')
+ # Write pid to pid file
+ with open('/opt/snare/snare.pid', 'wb') as pid_fh:
+ pid_fh.write(str(os.getpid()).encode('utf-8'))
+ # Config file
+ if not os.path.exists('/opt/snare/snare.cfg'):
+ create_initial_config()
+ # Read or create the sensor id
+ uuid_file_path = '/opt/snare/snare.uuid'
+ if os.path.exists(uuid_file_path):
+ with open(uuid_file_path, 'rb') as uuid_fh:
+ snare_uuid = uuid_fh.read()
+ return snare_uuid
+ else:
+ with open(uuid_file_path, 'wb') as uuid_fh:
+ snare_uuid = str(uuid.uuid4()).encode('utf-8')
+ uuid_fh.write(snare_uuid)
+ return snare_uuid
+
+
+def drop_privileges():
+ uid_name = 'nobody'
+ wanted_user = pwd.getpwnam(uid_name)
+ gid_name = grp.getgrgid(wanted_user.pw_gid).gr_name
+ wanted_group = grp.getgrnam(gid_name)
+ os.setgid(wanted_group.gr_gid)
+ os.setuid(wanted_user.pw_uid)
+ new_user = pwd.getpwuid(os.getuid())
+ new_group = grp.getgrgid(os.getgid())
+ print('privileges dropped, running as "{}:{}"'.format(new_user.pw_name, new_group.gr_name))
+
+
+def compare_version_info(timeout):
+ while True:
+ repo = git.Repo(os.getcwd())
+ try:
+ rem = repo.remote()
+ res = rem.fetch()
+ diff_list = res[0].commit.diff(repo.heads.master)
+ except TimeoutError:
+ print('timeout fetching the repository version')
+ else:
+ if diff_list:
+ print('you are running an outdated version, SNARE will be updated and restarted')
+ repo.git.reset('--hard')
+ repo.heads.master.checkout()
+ repo.git.clean('-xdf')
+ repo.remotes.origin.pull()
+ pip.main(['install', '-r', 'requirements.txt'])
+ os.execv(sys.executable, [sys.executable, __file__] + sys.argv[1:])
+ return
+ else:
+ print('you are running the latest version')
+ time.sleep(timeout)
+
+
+async def check_tanner():
+ vm = snare_helpers.VersionManager()
+ async with aiohttp.ClientSession() as client:
+ req_url = 'http://{}:8090/version'.format(args.tanner)
+ try:
+ resp = await client.get(req_url)
+ result = await resp.json()
+ version = result["version"]
+ vm.check_compatibility(version)
+ except aiohttp.ClientOSError:
+ print("Can't connect to tanner host {}".format(req_url))
+ exit(1)
+ else:
+ await resp.release()
+
+if __name__ == '__main__':
+ print(r"""
+ _____ _ _____ ____ ______
+ / ___// | / / | / __ \/ ____/
+ \__ \/ |/ / /| | / /_/ / __/
+ ___/ / /| / ___ |/ _, _/ /___
+/____/_/ |_/_/ |_/_/ |_/_____/
+
+ """)
+ parser = argparse.ArgumentParser()
+ page_group = parser.add_mutually_exclusive_group(required=True)
+ page_group.add_argument("--page-dir", help="name of the folder to be served")
+ page_group.add_argument("--list-pages", help="list available pages", action='store_true')
+ parser.add_argument("--index-page", help="file name of the index page", default='index.html')
+ parser.add_argument("--port", help="port to listen on", default='8080')
+ parser.add_argument("--interface", help="interface to bind to")
+ parser.add_argument("--host-ip", help="host ip to bind to", default='localhost')
+ parser.add_argument("--debug", help="run web server in debug mode", default=False)
+ parser.add_argument("--tanner", help="ip of the tanner service", default='tanner.mushmush.org')
+ parser.add_argument("--skip-check-version", help="skip check for update", action='store_true')
+ parser.add_argument("--slurp-enabled", help="enable nsq logging", action='store_true')
+ parser.add_argument("--slurp-host", help="nsq logging host", default='slurp.mushmush.org')
+ parser.add_argument("--slurp-auth", help="nsq logging auth", default='slurp')
+ parser.add_argument("--config", help="snare config file", default='snare.cfg')
+ parser.add_argument("--auto-update", help="auto update SNARE if new version available ", default=True)
+ parser.add_argument("--update-timeout", help="update snare every timeout ", default='24H')
+ parser.add_argument("--server-header", help="set server-header", default='nignx/1.3.8')
+ parser.add_argument("--no-dorks", help="disable the use of dorks", type=str_to_bool, default=True)
+ parser.add_argument("--log-dir", help="path to directory of the log file", default='/opt/snare/')
+ args = parser.parse_args()
+ base_path = '/opt/snare/'
+ base_page_path = '/opt/snare/pages/'
+ snare_uuid = snare_setup()
+ config = configparser.ConfigParser()
+ config.read(os.path.join(base_path, args.config))
+ log_debug = args.log_dir + "snare.log"
+ log_err = args.log_dir + "snare.err"
+ Logger.create_logger(log_debug, log_err, __package__)
+ if args.list_pages:
+ print('Available pages:\n')
+ for page in os.listdir(base_page_path):
+ print('\t- {}'.format(page))
+ print('\nuse with --page-dir {page_name}\n\n')
+ exit()
+ full_page_path = os.path.join(base_page_path, args.page_dir)
+ if not os.path.exists(full_page_path):
+ print("--page-dir: {0} does not exist".format(args.page_dir))
+ exit()
+ args.index_page = os.path.join("/", args.index_page)
+
+ if not os.path.exists(os.path.join(full_page_path, 'meta.json')):
+ conv = snare_helpers.Converter()
+ conv.convert(full_page_path)
+ print("pages was converted. Try to clone again for the better result.")
+
+ with open(os.path.join(full_page_path, 'meta.json')) as meta:
+ meta_info = json.load(meta)
+ if not os.path.exists(os.path.join(base_page_path, args.page_dir,
+ os.path.join(meta_info[args.index_page]['hash']))):
+ print('can\'t create meta tag')
+ else:
+ snare_helpers.add_meta_tag(args.page_dir, meta_info[args.index_page]['hash'], config)
+ loop = asyncio.get_event_loop()
+ loop.run_until_complete(check_tanner())
+
+ pool = ProcessPoolExecutor(max_workers=multiprocessing.cpu_count())
+ compare_version_fut = None
+ if args.auto_update is True:
+ timeout = snare_helpers.parse_timeout(args.update_timeout)
+ compare_version_fut = loop.run_in_executor(pool, compare_version_info, timeout)
+
+ if args.host_ip == 'localhost' and args.interface:
+ args.host_ip = ni.ifaddresses(args.interface)[2][0]['addr']
+
+ app = HttpRequestHandler(meta_info, args, snare_uuid, debug=args.debug, keep_alive=75)
+ drop_privileges()
+ print('serving with uuid {0}'.format(snare_uuid.decode('utf-8')))
+ print("Debug logs will be stored in", log_debug)
+ print("Error logs will be stored in", log_err)
+ try:
+ app.start()
+ except (KeyboardInterrupt, TypeError) as e:
+ print(e)
+ finally:
+ if compare_version_fut:
+ compare_version_fut.cancel()
diff --git a/converter.py b/converter.py
deleted file mode 100644
index 19525549..00000000
--- a/converter.py
+++ /dev/null
@@ -1,31 +0,0 @@
-import os
-import hashlib
-from os import walk
-import mimetypes
-import json
-import shutil
-
-
-class Converter:
- def __init__(self):
- self.meta = {}
-
- def convert(self, path):
- files_to_convert = []
-
- for (dirpath, dirnames, filenames) in walk(path):
- for fn in filenames:
- files_to_convert.append(os.path.join(dirpath, fn))
-
- for fn in files_to_convert:
- path_len = len(path)
- file_name = fn[path_len:]
- m = hashlib.md5()
- m.update(fn.encode('utf-8'))
- hash_name = m.hexdigest()
- self.meta[file_name] = {'hash': hash_name, 'content_type': mimetypes.guess_type(file_name)[0]}
- shutil.copyfile(fn, os.path.join(path, hash_name))
- os.remove(fn)
-
- with open(os.path.join(path, 'meta.json'), 'w') as mj:
- json.dump(self.meta, mj)
diff --git a/setup.py b/setup.py
new file mode 100644
index 00000000..4f7b0f02
--- /dev/null
+++ b/setup.py
@@ -0,0 +1,13 @@
+#!/usr/bin/env python
+from setuptools import find_packages
+from distutils.core import setup
+
+setup(name='Snare',
+ version='0.2.0',
+ description='Super Next generation Advanced Reactive honEypot',
+ author='MushMush Foundation',
+ author_email='glastopf@public.honeynet.org',
+ url='https://github.com/mushorg/snare',
+ packages=find_packages(exclude=['*.pyc']),
+ scripts=['bin/snare', 'bin/clone'],
+ )
diff --git a/snare.py b/snare.py
deleted file mode 100644
index 5a599d7d..00000000
--- a/snare.py
+++ /dev/null
@@ -1,531 +0,0 @@
-#!/usr/bin/python3
-
-"""
-Copyright (C) 2015-2016 MushMush Foundation
-
-This program is free software: you can redistribute it and/or modify
-it under the terms of the GNU General Public License as published by
-the Free Software Foundation, either version 3 of the License, or
-(at your option) any later version.
-
-This program is distributed in the hope that it will be useful,
-but WITHOUT ANY WARRANTY; without even the implied warranty of
-MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-GNU General Public License for more details.
-"""
-import argparse
-import asyncio
-import configparser
-import grp
-import json
-import mimetypes
-import multiprocessing
-import os
-import pwd
-import sys
-import time
-import uuid
-from concurrent.futures import ProcessPoolExecutor
-from urllib.parse import urlparse, unquote, parse_qsl
-from versions_manager import VersionManager
-import aiohttp
-import git
-import pip
-from aiohttp import MultiDict
-import re
-import logging
-import logger
-
-try:
- from aiohttp.web import StaticResource as StaticRoute
-except ImportError:
- from aiohttp.web import StaticResource
-
-from bs4 import BeautifulSoup
-import cssutils
-import netifaces as ni
-from converter import Converter
-
-
-class HttpRequestHandler(aiohttp.server.ServerHttpProtocol):
- def __init__(self, meta, run_args, debug=False, keep_alive=75, **kwargs):
- self.dorks = []
-
- self.run_args = run_args
- self.dir = '/opt/snare/pages/{}'.format(run_args.page_dir)
-
- self.meta = meta
-
- self.logger = logging.getLogger(__name__)
-
- self.sroute = StaticRoute(
- name=None, prefix='/',
- directory=self.dir
- )
- super().__init__(debug=debug, keep_alive=keep_alive, access_log=None, **kwargs)
-
- async def get_dorks(self):
- dorks = None
- try:
- with aiohttp.Timeout(10.0):
- with aiohttp.ClientSession() as session:
- r = await session.get(
- 'http://{0}:8090/dorks'.format(self.run_args.tanner)
- )
- try:
- dorks = await r.json()
- except json.decoder.JSONDecodeError as e:
- self.logger.error('Error getting dorks: %s', e)
- finally:
- await r.release()
- except asyncio.TimeoutError:
- self.logger.info('Dorks timeout')
- return dorks['response']['dorks'] if dorks else []
-
- async def submit_slurp(self, data):
- try:
- with aiohttp.Timeout(10.0):
- with aiohttp.ClientSession(connector=aiohttp.TCPConnector(verify_ssl=False)) as session:
- r = await session.post(
- 'https://{0}:8080/api?auth={1}&chan=snare_test&msg={2}'.format(
- self.run_args.slurp_host, self.run_args.slurp_auth, data
- ), data=json.dumps(data)
- )
- assert r.status == 200
- r.close()
- except Exception as e:
- self.logger.error('Error submitting slurp: %s', e)
-
- def create_data(self, request, response_status):
- data = dict(
- method=None,
- path=None,
- headers=None,
- uuid=snare_uuid.decode('utf-8'),
- peer=None,
- status=response_status
- )
- if self.transport:
- peer = dict(
- ip=self.transport.get_extra_info('peername')[0],
- port=self.transport.get_extra_info('peername')[1]
- )
- data['peer'] = peer
- if request:
- header = {key: value for (key, value) in request.headers.items()}
- data['method'] = request.method
- data['headers'] = header
- data['path'] = request.path
- if ('Cookie' in header):
- data['cookies'] = {cookie.split('=')[0]: cookie.split('=')[1] for cookie in header['Cookie'].split(';')}
- return data
-
- async def submit_data(self, data):
- event_result = None
- try:
- with aiohttp.Timeout(10.0):
- with aiohttp.ClientSession() as session:
- r = await session.post(
- 'http://{0}:8090/event'.format(self.run_args.tanner), data=json.dumps(data)
- )
- try:
- event_result = await r.json()
- except json.decoder.JSONDecodeError as e:
- self.logger.error('Error submitting data: {} {}'.format(e, data))
- finally:
- await r.release()
- except Exception as e:
- raise e
- return event_result
-
- async def handle_html_content(self, content):
- soup = BeautifulSoup(content, 'html.parser')
- if self.run_args.no_dorks is not True:
- for p_elem in soup.find_all('p'):
- if p_elem.findChildren():
- continue
- css = None
- if 'style' in p_elem.attrs:
- css = cssutils.parseStyle(p_elem.attrs['style'])
- text_list = p_elem.text.split()
- p_new = soup.new_tag('p', style=css.cssText if css else None)
- for idx, word in enumerate(text_list):
- # Fetch dorks if required
- if len(self.dorks) <= 0:
- self.dorks = await self.get_dorks()
- word += ' '
- if idx % 5 == 0:
- a_tag = soup.new_tag(
- 'a',
- href=self.dorks.pop(),
- style='color:{color};text-decoration:none;cursor:text;'.format(
- color=css.color if css and 'color' in css.keys() else '#000000'
- )
- )
- a_tag.string = word
- p_new.append(a_tag)
- else:
- p_new.append(soup.new_string(word))
- p_elem.replace_with(p_new)
- content = soup.encode('utf-8')
- return content
-
- async def handle_request(self, request, payload):
- self.logger.info('Request path: {0}'.format(request.path))
- data = self.create_data(request, 200)
- if request.method == 'POST':
- post_data = await payload.read()
- post_data = MultiDict(parse_qsl(post_data.decode('utf-8')))
- self.logger.info('POST data:')
- for key, val in post_data.items():
- self.logger.info('\t- {0}: {1}'.format(key, val))
- data['post_data'] = dict(post_data)
-
- # Submit the event to the TANNER service
- event_result = await self.submit_data(data)
-
- # Log the event to slurp service if enabled
- if self.run_args.slurp_enabled:
- await self.submit_slurp(request.path)
-
- content, content_type, headers, status_code = await self.parse_tanner_response(
- request.path, event_result['response']['message']['detection'])
- response = aiohttp.Response(
- self.writer, status=status_code, http_version=request.version
- )
- for name, val in headers.items():
- response.add_header(name, val)
-
- response.add_header('Server', self.run_args.server_header)
-
- if 'cookies' in data and 'sess_uuid' in data['cookies']:
- previous_sess_uuid = data['cookies']['sess_uuid']
- else:
- previous_sess_uuid = None
-
- if event_result is not None and ('sess_uuid' in event_result['response']['message']):
- cur_sess_id = event_result['response']['message']['sess_uuid']
- if previous_sess_uuid is None or not previous_sess_uuid.strip() or previous_sess_uuid != cur_sess_id:
- response.add_header('Set-Cookie', 'sess_uuid=' + cur_sess_id)
-
- if not content_type:
- response.add_header('Content-Type', 'text/plain')
- else:
- response.add_header('Content-Type', content_type)
- if content:
- response.add_header('Content-Length', str(len(content)))
- response.send_headers()
- if content:
- response.write(content)
- await response.write_eof()
-
- async def parse_tanner_response(self, requested_name, detection):
- content_type = None
- content = None
- status_code = 200
- headers = {}
- p = re.compile('/+') # Creating a regex object for the pattern of multiple contiguous forward slashes
- requested_name = p.sub('/', requested_name) # Substituting all occurrences of the pattern with single forward slash
-
- if detection['type'] == 1:
- query_start = requested_name.find('?')
- if query_start != -1:
- requested_name = requested_name[:query_start]
-
- if requested_name == '/':
- requested_name = self.run_args.index_page
- try:
- if requested_name[-1] == '/':
- requested_name = requested_name[:-1]
- requested_name = unquote(requested_name)
- file_name = self.meta[requested_name]['hash']
- content_type = self.meta[requested_name]['content_type']
- except KeyError:
- status_code = 404
- requested_name = '/status_404'
- file_name = self.meta[requested_name]['hash']
- content_type = 'text/html'
- path = os.path.join(self.dir, file_name)
- with open(path, 'rb') as fh:
- content = fh.read()
- content = await self.handle_html_content(content)
-
- else:
- path = os.path.join(self.dir, file_name)
- if os.path.isfile(path):
- with open(path, 'rb') as fh:
- content = fh.read()
- if content_type:
- if 'text/html' in content_type:
- content = await self.handle_html_content(content)
-
- elif detection['type'] == 2:
- payload_content = detection['payload']
- if payload_content['page']:
- try:
- file_name = self.meta[payload_content['page']]['hash']
- content_type = self.meta[payload_content['page']]['content_type']
- page_path = os.path.join(self.dir, file_name)
- with open(page_path, encoding='utf-8') as p:
- content = p.read()
- except KeyError:
- content = ''
- content_type = r'text\html'
-
- soup = BeautifulSoup(content, 'html.parser')
- script_tag = soup.new_tag('div')
- script_tag.append(BeautifulSoup(payload_content['value'], 'html.parser'))
- soup.body.append(script_tag)
- content = str(soup).encode()
- else:
- content_type = mimetypes.guess_type(payload_content['value'])[0]
- content = payload_content['value'].encode('utf-8')
-
- if 'headers' in payload_content:
- headers = payload_content['headers']
- else:
- payload_content = detection['payload']
- status_code = payload_content['status_code']
-
- return (content, content_type, headers, status_code)
-
- async def handle_error(self, status=500, message=None,
- payload=None, exc=None, headers=None, reason=None):
-
- data = self.create_data(message, status)
- data['error'] = exc
- await self.submit_data(data)
- super().handle_error(status, message, payload, exc, headers, reason)
-
-
-def create_initial_config():
- cfg = configparser.ConfigParser()
- cfg['WEB-TOOLS'] = dict(google='', bing='')
- with open('/opt/snare/snare.cfg', 'w') as configfile:
- cfg.write(configfile)
-
-
-def snare_setup():
- if os.getuid() != 0:
- print('Snare has to be started as root!')
- sys.exit(1)
- # Create folders
- if not os.path.exists('/opt/snare'):
- os.mkdir('/opt/snare')
- if not os.path.exists('/opt/snare/pages'):
- os.mkdir('/opt/snare/pages')
- # Write pid to pid file
- with open('/opt/snare/snare.pid', 'wb') as pid_fh:
- pid_fh.write(str(os.getpid()).encode('utf-8'))
- # Config file
- if not os.path.exists('/opt/snare/snare.cfg'):
- create_initial_config()
- # Read or create the sensor id
- uuid_file_path = '/opt/snare/snare.uuid'
- if os.path.exists(uuid_file_path):
- with open(uuid_file_path, 'rb') as uuid_fh:
- snare_uuid = uuid_fh.read()
- return snare_uuid
- else:
- with open(uuid_file_path, 'wb') as uuid_fh:
- snare_uuid = str(uuid.uuid4()).encode('utf-8')
- uuid_fh.write(snare_uuid)
- return snare_uuid
-
-
-def drop_privileges():
- uid_name = 'nobody'
- wanted_user = pwd.getpwnam(uid_name)
- gid_name = grp.getgrgid(wanted_user.pw_gid).gr_name
- wanted_group = grp.getgrnam(gid_name)
- os.setgid(wanted_group.gr_gid)
- os.setuid(wanted_user.pw_uid)
- new_user = pwd.getpwuid(os.getuid())
- new_group = grp.getgrgid(os.getgid())
- print('privileges dropped, running as "{}:{}"'.format(new_user.pw_name, new_group.gr_name))
-
-
-def add_meta_tag(page_dir, index_page):
- google_content = config['WEB-TOOLS']['google']
- bing_content = config['WEB-TOOLS']['bing']
-
- if not google_content and not bing_content:
- return
-
- main_page_path = os.path.join('/opt/snare/pages/', page_dir, index_page)
- with open(main_page_path) as main:
- main_page = main.read()
- soup = BeautifulSoup(main_page, 'html.parser')
-
- if (google_content and soup.find("meta", attrs={"name": "google-site-verification"}) is None):
- google_meta = soup.new_tag('meta')
- google_meta.attrs['name'] = 'google-site-verification'
- google_meta.attrs['content'] = google_content
- soup.head.append(google_meta)
- if (bing_content and soup.find("meta", attrs={"name": "msvalidate.01"}) is None):
- bing_meta = soup.new_tag('meta')
- bing_meta.attrs['name'] = 'msvalidate.01'
- bing_meta.attrs['content'] = bing_content
- soup.head.append(bing_meta)
-
- html = soup.prettify("utf-8")
- with open(main_page_path, "wb") as file:
- file.write(html)
-
-
-def compare_version_info(timeout):
- while True:
- repo = git.Repo(os.getcwd())
- try:
- rem = repo.remote()
- res = rem.fetch()
- diff_list = res[0].commit.diff(repo.heads.master)
- except TimeoutError:
- print('timeout fetching the repository version')
- else:
- if diff_list:
- print('you are running an outdated version, SNARE will be updated and restarted')
- repo.git.reset('--hard')
- repo.heads.master.checkout()
- repo.git.clean('-xdf')
- repo.remotes.origin.pull()
- pip.main(['install', '-r', 'requirements.txt'])
- os.execv(sys.executable, [sys.executable, __file__] + sys.argv[1:])
- return
- else:
- print('you are running the latest version')
- time.sleep(timeout)
-
-
-def parse_timeout(timeout):
- result = None
- timeouts_coeff = {
- 'M': 60,
- 'H': 3600,
- 'D': 86400
- }
-
- form = timeout[-1]
- if form not in timeouts_coeff.keys():
- print('Bad timeout format, default will be used')
- parse_timeout('24H')
- else:
- result = int(timeout[:-1])
- result *= timeouts_coeff[form]
- return result
-
-
-async def check_tanner():
- vm = VersionManager()
- with aiohttp.ClientSession() as client:
- req_url = 'http://{}:8090/version'.format(args.tanner)
- try:
- resp = await client.get(req_url)
- result = await resp.json()
- version = result["version"]
- vm.check_compatibility(version)
- except aiohttp.errors.ClientOSError:
- print("Can't connect to tanner host {}".format(req_url))
- exit(1)
- else:
- await resp.release()
-
-
-if __name__ == '__main__':
- print(r"""
- _____ _ _____ ____ ______
- / ___// | / / | / __ \/ ____/
- \__ \/ |/ / /| | / /_/ / __/
- ___/ / /| / ___ |/ _, _/ /___
-/____/_/ |_/_/ |_/_/ |_/_____/
-
- """)
- snare_uuid = snare_setup()
- parser = argparse.ArgumentParser()
- page_group = parser.add_mutually_exclusive_group(required=True)
- page_group.add_argument("--page-dir", help="name of the folder to be served")
- page_group.add_argument("--list-pages", help="list available pages", action='store_true')
- parser.add_argument("--index-page", help="file name of the index page", default='index.html')
- parser.add_argument("--port", help="port to listen on", default='8080')
- parser.add_argument("--interface", help="interface to bind to")
- parser.add_argument("--host-ip", help="host ip to bind to", default='localhost')
- parser.add_argument("--debug", help="run web server in debug mode", default=False)
- parser.add_argument("--tanner", help="ip of the tanner service", default='tanner.mushmush.org')
- parser.add_argument("--skip-check-version", help="skip check for update", action='store_true')
- parser.add_argument("--slurp-enabled", help="enable nsq logging", action='store_true')
- parser.add_argument("--slurp-host", help="nsq logging host", default='slurp.mushmush.org')
- parser.add_argument("--slurp-auth", help="nsq logging auth", default='slurp')
- parser.add_argument("--config", help="snare config file", default='snare.cfg')
- parser.add_argument("--auto-update", help="auto update SNARE if new version available ", default=True)
- parser.add_argument("--update-timeout", help="update snare every timeout ", default='24H')
- parser.add_argument("--server-header", help="set server-header", default='nignx/1.3.8')
- parser.add_argument("--no-dorks", help="disable the use of dorks", action='store_true')
- parser.add_argument("--log-dir", help="path to directory of the log file", default='/opt/snare/')
-
- args = parser.parse_args()
- base_path = '/opt/snare/'
- base_page_path = '/opt/snare/pages/'
- config = configparser.ConfigParser()
- config.read(os.path.join(base_path, args.config))
-
- log_debug = args.log_dir + "snare.log"
- log_err = args.log_dir + "snare.err"
- logger.Logger.create_logger(log_debug, log_err, __package__)
-
- if args.list_pages:
- print('Available pages:\n')
- for page in os.listdir(base_page_path):
- print('\t- {}'.format(page))
- print('\nuse with --page-dir {page_name}\n\n')
- exit()
- full_page_path = os.path.join(base_page_path, args.page_dir)
- if not os.path.exists(full_page_path):
- print("--page-dir: {0} does not exist".format(args.page_dir))
- exit()
- args.index_page = os.path.join("/", args.index_page)
-
- if not os.path.exists(os.path.join(full_page_path, 'meta.json')):
- conv = Converter()
- conv.convert(full_page_path)
- print("pages was converted. Try to clone again for the better result.")
-
- with open(os.path.join(full_page_path, 'meta.json')) as meta:
- meta_info = json.load(meta)
- if not os.path.exists(os.path.join(base_page_path, args.page_dir,
- os.path.join(meta_info[args.index_page]['hash']))):
- print('can\'t create meta tag')
- else:
- add_meta_tag(args.page_dir, meta_info[args.index_page]['hash'])
- loop = asyncio.get_event_loop()
- loop.run_until_complete(check_tanner())
-
- pool = ProcessPoolExecutor(max_workers=multiprocessing.cpu_count())
- compare_version_fut = None
- if args.auto_update is True:
- timeout = parse_timeout(args.update_timeout)
- compare_version_fut = loop.run_in_executor(pool, compare_version_info, timeout)
-
- if args.host_ip == 'localhost' and args.interface:
- host_ip = ni.ifaddresses(args.interface)[2][0]['addr']
- else:
- host_ip = args.host_ip
- future = loop.create_server(
- lambda: HttpRequestHandler(meta_info, args, debug=args.debug, keep_alive=75),
- args.host_ip, int(args.port))
- srv = loop.run_until_complete(future)
-
- drop_privileges()
- print('serving on {0} with uuid {1}'.format(srv.sockets[0].getsockname()[:2], snare_uuid.decode('utf-8')))
- print("Debug logs will be stored in", log_debug)
- print("Error logs will be stored in", log_err)
- print("(Press CTRL+C to quit)")
- try:
- loop.run_forever()
- except (KeyboardInterrupt, TypeError) as e:
- print(e)
- finally:
- if compare_version_fut:
- compare_version_fut.cancel()
- srv.close()
- loop.run_until_complete(srv.wait_closed())
- loop.close()
diff --git a/snare/__init__.py b/snare/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/clone.py b/snare/cloner.py
similarity index 66%
rename from clone.py
rename to snare/cloner.py
index c7b1e0fc..b1632c0b 100644
--- a/clone.py
+++ b/snare/cloner.py
@@ -1,40 +1,21 @@
-#!/usr/bin/env python3
-
-"""
-Copyright (C) 2015-2016 MushMush Foundation
-
-This program is free software: you can redistribute it and/or modify
-it under the terms of the GNU General Public License as published by
-the Free Software Foundation, either version 3 of the License, or
-(at your option) any later version.
-
-This program is distributed in the hope that it will be useful,
-but WITHOUT ANY WARRANTY; without even the implied warranty of
-MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-GNU General Public License for more details.
-"""
-
-import argparse
+import os
+import sys
+import logging
import asyncio
+from asyncio import Queue
import hashlib
import json
-import os
import re
-import sys
-from asyncio import Queue
-
import aiohttp
import cssutils
import yarl
from bs4 import BeautifulSoup
-import logger
-import logging
class Cloner(object):
def __init__(self, root, max_depth, css_validate):
self.visited_urls = []
- self.root, self.error_page = self.add_scheme(root)
+ self.root, self.error_page = self.add_scheme(root)
self.max_depth = max_depth
self.moved_root = None
if len(self.root.host) < 4:
@@ -42,7 +23,7 @@ def __init__(self, root, max_depth, css_validate):
self.target_path = '/opt/snare/pages/{}'.format(self.root.host)
if not os.path.exists(self.target_path):
- os.mkdir(self.target_path)
+ os.mkdir(self.target_path)
self.css_validate = css_validate
self.new_urls = Queue()
self.meta = {}
@@ -148,11 +129,10 @@ async def get_body(self, session):
data = None
content_type = None
try:
- with aiohttp.Timeout(10.0):
- response = await session.get(current_url, headers={'Accept': 'text/html'})
- content_type = response.content_type
- data = await response.read()
-
+ response = await session.get(current_url, headers={'Accept': 'text/html'}, timeout=10.0)
+ content_type = response.content_type
+ data = await response.read()
+
except (aiohttp.ClientError, asyncio.TimeoutError) as client_error:
self.logger.error(client_error)
else:
@@ -165,8 +145,8 @@ async def get_body(self, session):
data = str(soup).encode()
with open(os.path.join(self.target_path, hash_name), 'wb') as index_fh:
index_fh.write(data)
- if content_type == 'text/css':
- css = cssutils.parseString(data, validate=self.css_validate)
+ if content_type == 'text/css':
+ css = cssutils.parseString(data, validate=self.css_validate)
for carved_url in cssutils.getUrls(css):
if carved_url.startswith('data'):
continue
@@ -178,20 +158,20 @@ async def get_body(self, session):
async def get_root_host(self):
try:
- with aiohttp.ClientSession() as session:
+ async with aiohttp.ClientSession() as session:
resp = await session.get(self.root)
- if resp._url_obj.host != self.root.host:
+ if resp.host != self.root.host:
self.moved_root = resp._url_obj
resp.close()
- except aiohttp.errors.ClientError as err:
- self.logger.error("Can\'t connect to target host.")
+ except aiohttp.ClientError as err:
+ self.logger.error("Can\'t connect to target host: %s", err)
exit(-1)
async def run(self):
session = aiohttp.ClientSession()
try:
await self.new_urls.put((self.root, 0))
- await self.new_urls.put((self.error_page,0))
+ await self.new_urls.put((self.error_page, 0))
await self.get_body(session)
except KeyboardInterrupt:
raise
@@ -199,51 +179,3 @@ async def run(self):
with open(os.path.join(self.target_path, 'meta.json'), 'w') as mj:
json.dump(self.meta, mj)
await session.close()
-
-def str_to_bool(v):
- if v.lower() == 'true':
- return True
- elif v.lower() == 'false':
- return False
- else:
- raise argparse.ArgumentTypeError('Boolean value expected')
-
-def main():
- if os.getuid() != 0:
- print('Clone has to be run as root!')
- sys.exit(1)
- if not os.path.exists('/opt/snare'):
- os.mkdir('/opt/snare')
- if not os.path.exists('/opt/snare/pages'):
- os.mkdir('/opt/snare/pages')
- loop = asyncio.get_event_loop()
- parser = argparse.ArgumentParser()
- parser.add_argument("--target", help="domain of the site to be cloned", required=True)
- parser.add_argument("--max-depth", help="max depth of the cloning", required=False, default=sys.maxsize)
- parser.add_argument("--log_path", help="path to the error log file")
- parser.add_argument("--css-validate", help="set whether css validation is required", type=str_to_bool, default=None)
- args = parser.parse_args()
- if args.log_path:
- log_err = args.log_path + "clone.err"
- else:
- log_err = "/opt/snare/clone.err"
- logger.Logger.create_clone_logger(log_err, __package__)
- print("Error logs will be stored in {}\n".format(log_err))
- try:
- cloner = Cloner(args.target, int(args.max_depth), args.css_validate)
- loop.run_until_complete(cloner.get_root_host())
- loop.run_until_complete(cloner.run())
- except KeyboardInterrupt:
- pass
-
-
-if __name__ == '__main__':
- print("""
- ______ __ ______ _ ____________
- / ____// / / __ // | / / ____/ __ \\
- / / / / / / / // |/ / __/ / /_/ /
- / /___ / /____ / /_/ // /| / /___/ _, _/
-/_____//______//_____//_/ |_/_____/_/ |_|
-
- """)
- main()
diff --git a/snare/html_handler.py b/snare/html_handler.py
new file mode 100644
index 00000000..73f2c443
--- /dev/null
+++ b/snare/html_handler.py
@@ -0,0 +1,63 @@
+import asyncio
+import json
+import logging
+from bs4 import BeautifulSoup
+import cssutils
+import aiohttp
+
+
+class HtmlHandler():
+ def __init__(self, no_dorks, tanner):
+ self.no_dorks = no_dorks
+ self.dorks = []
+ self.logger = logging.getLogger(__name__)
+ self.tanner = tanner
+
+ async def get_dorks(self):
+ dorks = None
+ try:
+ async with aiohttp.ClientSession() as session:
+ r = await session.get(
+ 'http://{0}:8090/dorks'.format(self.tanner), timeout=10.0
+ )
+ try:
+ dorks = await r.json()
+ except json.decoder.JSONDecodeError as e:
+ self.logger.error('Error getting dorks: %s', e)
+ finally:
+ await r.release()
+ except asyncio.TimeoutError:
+ self.logger.info('Dorks timeout')
+ return dorks['response']['dorks'] if dorks else []
+
+ async def handle_content(self, content):
+ soup = BeautifulSoup(content, 'html.parser')
+ if self.no_dorks is not True:
+ for p_elem in soup.find_all('p'):
+ if p_elem.findChildren():
+ continue
+ css = None
+ if 'style' in p_elem.attrs:
+ css = cssutils.parseStyle(p_elem.attrs['style'])
+ text_list = p_elem.text.split()
+ p_new = soup.new_tag('p', style=css.cssText if css else None)
+ for idx, word in enumerate(text_list):
+ # Fetch dorks if required
+ if len(self.dorks) <= 0:
+ self.dorks = await self.get_dorks()
+ word += ' '
+ if idx % 5 == 0:
+ a_tag = soup.new_tag(
+ 'a',
+ href=self.dorks.pop(),
+ style='color:{color};text-decoration:none;cursor:text;'.format(
+ color=css.color if css and 'color' in css.keys() else '#000000'
+ )
+ )
+ a_tag.string = word
+ p_new.append(a_tag)
+ else:
+ p_new.append(soup.new_string(word))
+ p_elem.replace_with(p_new)
+ content = soup.encode('utf-8')
+ return content
diff --git a/snare/middlewares.py b/snare/middlewares.py
new file mode 100644
index 00000000..a8875e3c
--- /dev/null
+++ b/snare/middlewares.py
@@ -0,0 +1,38 @@
+import aiohttp_jinja2
+from aiohttp import web
+
+
+class SnareMiddleware():
+
+ def __init__(self, file_name):
+ self.error_404 = file_name
+
+ async def handle_404(self, request):
+ return aiohttp_jinja2.render_template(self.error_404, request, {})
+
+ async def handle_500(self, request):
+ return aiohttp_jinja2.render_template('500.html', request, {})
+
+ def create_error_middleware(self, overrides):
+
+ @web.middleware
+ async def error_middleware(request, handler):
+ try:
+ response = await handler(request)
+ override = overrides.get(response.status)
+ if override:
+ return await override(request)
+ return response
+ except web.HTTPException as ex:
+ override = overrides.get(ex.status)
+ if override:
+ return await override(request)
+ raise
+ return error_middleware
+
+ def setup_middlewares(self, app):
+ error_middleware = self.create_error_middleware({
+ 404: self.handle_404,
+ 500: self.handle_500
+ })
+ app.middlewares.append(error_middleware)
diff --git a/snare/server.py b/snare/server.py
new file mode 100644
index 00000000..d9cf1cab
--- /dev/null
+++ b/snare/server.py
@@ -0,0 +1,93 @@
+import logging
+import json
+import aiohttp
+from aiohttp import web
+from aiohttp.web import StaticResource as StaticRoute
+import multidict
+import aiohttp_jinja2
+import jinja2
+from snare.middlewares import SnareMiddleware
+from snare.tanner_handler import TannerHandler
+
+
+class HttpRequestHandler():
+ def __init__(self, meta, run_args, snare_uuid, debug=False, keep_alive=75, **kwargs):
+ self.run_args = run_args
+ self.dir = '/opt/snare/pages/{}'.format(run_args.page_dir)
+ self.meta = meta
+ self.snare_uuid = snare_uuid
+ self.logger = logging.getLogger(__name__)
+ self.sroute = StaticRoute(
+ name=None, prefix='/',
+ directory=self.dir
+ )
+ self.tanner_handler = TannerHandler(run_args, meta, snare_uuid)
+
+ async def submit_slurp(self, data):
+ try:
+ async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(verify_ssl=False)) as session:
+ r = await session.post(
+ 'https://{0}:8080/api?auth={1}&chan=snare_test&msg={2}'.format(
+ self.run_args.slurp_host, self.run_args.slurp_auth, data
+ ), data=json.dumps(data), timeout=10.0
+ )
+ assert r.status == 200
+ r.close()
+ except Exception as e:
+ self.logger.error('Error submitting slurp: %s', e)
+
+ async def handle_request(self, request):
+ self.logger.info('Request path: {0}'.format(request.path))
+ data = self.tanner_handler.create_data(request, 200)
+ if request.method == 'POST':
+ post_data = await request.post()
+ self.logger.info('POST data:')
+ for key, val in post_data.items():
+ self.logger.info('\t- {0}: {1}'.format(key, val))
+ data['post_data'] = dict(post_data)
+
+ # Submit the event to the TANNER service
+ event_result = await self.tanner_handler.submit_data(data)
+
+ # Log the event to slurp service if enabled
+ if self.run_args.slurp_enabled:
+ await self.submit_slurp(request.path)
+
+ content, content_type, headers, status_code = await self.tanner_handler.parse_tanner_response(
+ request.path, event_result['response']['message']['detection'])
+
+ response_headers = multidict.CIMultiDict()
+
+ for name, val in headers.items():
+ response_headers.add(name, val)
+
+ response_headers.add('Server', self.run_args.server_header)
+
+ if 'cookies' in data and 'sess_uuid' in data['cookies']:
+ previous_sess_uuid = data['cookies']['sess_uuid']
+ else:
+ previous_sess_uuid = None
+
+ if event_result is not None and ('sess_uuid' in event_result['response']['message']):
+ cur_sess_id = event_result['response']['message']['sess_uuid']
+ if previous_sess_uuid is None or not previous_sess_uuid.strip() or previous_sess_uuid != cur_sess_id:
+ response_headers.add('Set-Cookie', 'sess_uuid=' + cur_sess_id)
+
+ if not content_type:
+ response_content_type = 'text/plain'
+ else:
+ response_content_type = content_type
+ response = web.Response(
+ body=content, status=status_code, headers=response_headers, content_type=response_content_type
+ )
+ return response
+
+ def start(self):
+ app = web.Application()
+ app.add_routes([web.route('*', '/{tail:.*}', self.handle_request)])
+ aiohttp_jinja2.setup(
+ app, loader=jinja2.FileSystemLoader(self.dir)
+ )
+ middleware = SnareMiddleware(self.meta['/status_404']['hash'])
+ middleware.setup_middlewares(app)
+ web.run_app(app, host=self.run_args.host_ip, port=self.run_args.port)
diff --git a/snare/tanner_handler.py b/snare/tanner_handler.py
new file mode 100644
index 00000000..a6f8298f
--- /dev/null
+++ b/snare/tanner_handler.py
@@ -0,0 +1,126 @@
+import re
+import os
+from urllib.parse import unquote
+import mimetypes
+import json
+import logging
+import aiohttp
+from bs4 import BeautifulSoup
+from snare.html_handler import HtmlHandler
+
+
+class TannerHandler():
+ def __init__(self, run_args, meta, snare_uuid):
+ self.run_args = run_args
+ self.meta = meta
+ self.dir = '/opt/snare/pages/{}'.format(run_args.page_dir)
+ self.snare_uuid = snare_uuid
+ self.html_handler = HtmlHandler(run_args.no_dorks, run_args.tanner)
+ self.logger = logging.getLogger(__name__)
+
+ def create_data(self, request, response_status):
+ data = dict(
+ method=None,
+ path=None,
+ headers=None,
+ uuid=self.snare_uuid.decode('utf-8'),
+ peer=None,
+ status=response_status
+ )
+ if request.transport:
+ peer = dict(
+ ip=request.transport.get_extra_info('peername')[0],
+ port=request.transport.get_extra_info('peername')[1]
+ )
+ data['peer'] = peer
+ if request.path:
+ header = {key: value for (key, value) in request.headers.items()}
+ data['method'] = request.method
+ data['headers'] = header
+ data['path'] = request.path
+ if ('Cookie' in header):
+ data['cookies'] = {
+ cookie.split('=')[0]: cookie.split('=')[1] for cookie in header['Cookie'].split(';')
+ }
+ return data
+
+ async def submit_data(self, data):
+ event_result = None
+ try:
+ async with aiohttp.ClientSession() as session:
+ r = await session.post(
+ 'http://{0}:8090/event'.format(self.run_args.tanner), data=json.dumps(data),
+ timeout=10.0
+ )
+ try:
+ event_result = await r.json()
+ except json.decoder.JSONDecodeError as e:
+ self.logger.error('Error submitting data: {} {}'.format(e, data))
+ finally:
+ await r.release()
+ except Exception as e:
+ raise e
+ return event_result
+
+ async def parse_tanner_response(self, requested_name, detection):
+ content_type = None
+ content = None
+ status_code = 200
+ headers = {}
+ # Creating a regex object for the pattern of multiple contiguous forward slashes
+ p = re.compile('/+')
+ # Substituting all occurrences of the pattern with single forward slash
+ requested_name = p.sub('/', requested_name)
+
+ if detection['type'] == 1:
+ query_start = requested_name.find('?')
+ if query_start != -1:
+ requested_name = requested_name[:query_start]
+ if requested_name == '/':
+ requested_name = self.run_args.index_page
+ try:
+ if requested_name[-1] == '/':
+ requested_name = requested_name[:-1]
+ requested_name = unquote(requested_name)
+ file_name = self.meta[requested_name]['hash']
+ content_type = self.meta[requested_name]['content_type']
+ except KeyError:
+ status_code = 404
+ else:
+ path = os.path.join(self.dir, file_name)
+ if os.path.isfile(path):
+ with open(path, 'rb') as fh:
+ content = fh.read()
+ if content_type:
+ if 'text/html' in content_type:
+ content = await self.html_handler.handle_content(content)
+
+ elif detection['type'] == 2:
+ payload_content = detection['payload']
+ if payload_content['page']:
+ try:
+ file_name = self.meta[payload_content['page']]['hash']
+ content_type = self.meta[payload_content['page']]['content_type']
+ page_path = os.path.join(self.dir, file_name)
+ with open(page_path, encoding='utf-8') as p:
+ content = p.read()
+ except KeyError:
+ content = ''
+ content_type = r'text\html'
+
+ soup = BeautifulSoup(content, 'html.parser')
+ script_tag = soup.new_tag('div')
+ script_tag.append(BeautifulSoup(payload_content['value'], 'html.parser'))
+ soup.body.append(script_tag)
+ content = str(soup).encode()
+ else:
+ content_type = mimetypes.guess_type(payload_content['value'])[0]
+ content = payload_content['value'].encode('utf-8')
+
+ if 'headers' in payload_content:
+ headers = payload_content['headers']
+ else:
+ payload_content = detection['payload']
+ status_code = payload_content['status_code']
+
+ return (content, content_type, headers, status_code)
diff --git a/snare/tests/__init__.py b/snare/tests/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/tests/test_add_meta_tag.py b/snare/tests/test_add_meta_tag.py
similarity index 100%
rename from tests/test_add_meta_tag.py
rename to snare/tests/test_add_meta_tag.py
diff --git a/tests/test_converter.py b/snare/tests/test_converter.py
similarity index 100%
rename from tests/test_converter.py
rename to snare/tests/test_converter.py
diff --git a/tests/test_create_data.py b/snare/tests/test_create_data.py
similarity index 100%
rename from tests/test_create_data.py
rename to snare/tests/test_create_data.py
diff --git a/tests/test_get_dorks.py b/snare/tests/test_get_dorks.py
similarity index 100%
rename from tests/test_get_dorks.py
rename to snare/tests/test_get_dorks.py
diff --git a/tests/test_handle_error.py b/snare/tests/test_handle_error.py
similarity index 100%
rename from tests/test_handle_error.py
rename to snare/tests/test_handle_error.py
diff --git a/tests/test_handle_html_content.py b/snare/tests/test_handle_html_content.py
similarity index 100%
rename from tests/test_handle_html_content.py
rename to snare/tests/test_handle_html_content.py
diff --git a/tests/test_handle_request.py b/snare/tests/test_handle_request.py
similarity index 100%
rename from tests/test_handle_request.py
rename to snare/tests/test_handle_request.py
diff --git a/tests/test_parse_tanner_response.py b/snare/tests/test_parse_tanner_response.py
similarity index 100%
rename from tests/test_parse_tanner_response.py
rename to snare/tests/test_parse_tanner_response.py
diff --git a/tests/test_submit_data.py b/snare/tests/test_submit_data.py
similarity index 100%
rename from tests/test_submit_data.py
rename to snare/tests/test_submit_data.py
diff --git a/tests/test_versions_manager.py b/snare/tests/test_versions_manager.py
similarity index 100%
rename from tests/test_versions_manager.py
rename to snare/tests/test_versions_manager.py
diff --git a/snare/utils/__init__.py b/snare/utils/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/utils/asyncmock.py b/snare/utils/asyncmock.py
similarity index 100%
rename from utils/asyncmock.py
rename to snare/utils/asyncmock.py
diff --git a/logger.py b/snare/utils/logger.py
similarity index 81%
rename from logger.py
rename to snare/utils/logger.py
index e0b146e1..3778f988 100644
--- a/logger.py
+++ b/snare/utils/logger.py
@@ -1,6 +1,7 @@
import logging
import logging.handlers
+
class LevelFilter(logging.Filter):
"""Filters (lets through) all messages with level < LEVEL"""
@@ -8,24 +9,27 @@ def __init__(self, level):
self.level = level
def filter(self, record):
- return record.levelno < self.level # "<" instead of "<=": since logger.setLevel is inclusive, this should be exclusive
+ return record.levelno < self.level
+ # "<" instead of "<=": since logger.setLevel is inclusive, this should be exclusive
+
class Logger:
-
+
@staticmethod
def create_logger(debug_filename, err_filename, logger_name):
logger = logging.getLogger(logger_name)
logger.setLevel(logging.DEBUG)
logger.propagate = False
formatter = logging.Formatter(
- fmt='%(asctime)s %(levelname)s:%(name)s:%(funcName)s: %(message)s', datefmt='%Y-%m-%d %H:%M:%S')
-
+ fmt='%(asctime)s %(levelname)s:%(name)s:%(funcName)s: %(message)s', datefmt='%Y-%m-%d %H:%M:%S'
+ )
+
# ERROR log to 'snare.err'
error_log_handler = logging.handlers.RotatingFileHandler(err_filename, encoding='utf-8')
error_log_handler.setLevel(logging.ERROR)
error_log_handler.setFormatter(formatter)
logger.addHandler(error_log_handler)
-
+
# DEBUG log to 'snare.log'
debug_log_handler = logging.handlers.RotatingFileHandler(debug_filename, encoding='utf-8')
debug_log_handler.setLevel(logging.DEBUG)
@@ -33,17 +37,17 @@ def create_logger(debug_filename, err_filename, logger_name):
max_level_filter = LevelFilter(logging.ERROR)
debug_log_handler.addFilter(max_level_filter)
logger.addHandler(debug_log_handler)
-
+
return logger
-
+
@staticmethod
def create_clone_logger(err_filename, logger_name):
logger = logging.getLogger(logger_name)
formatter = logging.Formatter(
- fmt='%(asctime)s %(levelname)s:%(name)s:%(funcName)s: %(message)s', datefmt='%Y-%m-%d %H:%M:%S')
+ fmt='%(asctime)s %(levelname)s:%(name)s:%(funcName)s: %(message)s', datefmt='%Y-%m-%d %H:%M:%S'
+ )
# ERROR log to 'clone.err'
error_log_handler = logging.handlers.RotatingFileHandler(err_filename, encoding='utf-8')
error_log_handler.setLevel(logging.ERROR)
error_log_handler.setFormatter(formatter)
logger.addHandler(error_log_handler)
-
\ No newline at end of file
diff --git a/utils/page_path_generator.py b/snare/utils/page_path_generator.py
similarity index 100%
rename from utils/page_path_generator.py
rename to snare/utils/page_path_generator.py
diff --git a/snare/utils/snare_helpers.py b/snare/utils/snare_helpers.py
new file mode 100644
index 00000000..ffebd940
--- /dev/null
+++ b/snare/utils/snare_helpers.py
@@ -0,0 +1,105 @@
+import os
+from os import walk
+import hashlib
+import mimetypes
+import json
+import shutil
+import argparse
+from distutils.version import StrictVersion
+from bs4 import BeautifulSoup
+
+
+class VersionManager:
+ def __init__(self):
+ self.version = "0.2.0"
+ self.version_mapper = {
+ "0.1.0": ["0.1.0", "0.4.0"],
+ "0.2.0": ["0.5.0", "0.5.0"]
+ }
+
+ def check_compatibility(self, tanner_version):
+ min_version = self.version_mapper[self.version][0]
+ max_version = self.version_mapper[self.version][1]
+ if not (StrictVersion(min_version) <= StrictVersion(tanner_version) <= StrictVersion(max_version)):
+ raise RuntimeError("Wrong tanner version: {}. Compatible versions are {} - {}"
+ .format(tanner_version, min_version, max_version))
+
+
+class Converter:
+ def __init__(self):
+ self.meta = {}
+
+ def convert(self, path):
+ files_to_convert = []
+
+ for (dirpath, dirnames, filenames) in walk(path):
+ for fn in filenames:
+ files_to_convert.append(os.path.join(dirpath, fn))
+
+ for fn in files_to_convert:
+ path_len = len(path)
+ file_name = fn[path_len:]
+ m = hashlib.md5()
+ m.update(fn.encode('utf-8'))
+ hash_name = m.hexdigest()
+ self.meta[file_name] = {'hash': hash_name, 'content_type': mimetypes.guess_type(file_name)[0]}
+ shutil.copyfile(fn, os.path.join(path, hash_name))
+ os.remove(fn)
+
+ with open(os.path.join(path, 'meta.json'), 'w') as mj:
+ json.dump(self.meta, mj)
+
+
+def add_meta_tag(page_dir, index_page, config):
+ google_content = config['WEB-TOOLS']['google']
+ bing_content = config['WEB-TOOLS']['bing']
+
+ if not google_content and not bing_content:
+ return
+
+ main_page_path = os.path.join('/opt/snare/pages/', page_dir, index_page)
+ with open(main_page_path) as main:
+ main_page = main.read()
+ soup = BeautifulSoup(main_page, 'html.parser')
+
+ if (google_content and soup.find("meta", attrs={"name": "google-site-verification"}) is None):
+ google_meta = soup.new_tag('meta')
+ google_meta.attrs['name'] = 'google-site-verification'
+ google_meta.attrs['content'] = google_content
+ soup.head.append(google_meta)
+ if (bing_content and soup.find("meta", attrs={"name": "msvalidate.01"}) is None):
+ bing_meta = soup.new_tag('meta')
+ bing_meta.attrs['name'] = 'msvalidate.01'
+ bing_meta.attrs['content'] = bing_content
+ soup.head.append(bing_meta)
+
+ html = soup.prettify("utf-8")
+ with open(main_page_path, "wb") as file:
+ file.write(html)
+
+
+def parse_timeout(timeout):
+ result = None
+ timeouts_coeff = {
+ 'M': 60,
+ 'H': 3600,
+ 'D': 86400
+ }
+
+ form = timeout[-1]
+ if form not in timeouts_coeff.keys():
+ print('Bad timeout format, default will be used')
+ parse_timeout('24H')
+ else:
+ result = int(timeout[:-1])
+ result *= timeouts_coeff[form]
+ return result
+
+
+def str_to_bool(v):
+ if v.lower() == 'true':
+ return True
+ elif v.lower() == 'false':
+ return False
+ else:
+ raise argparse.ArgumentTypeError('Boolean value expected')
diff --git a/versions_manager.py b/versions_manager.py
deleted file mode 100644
index fba8b410..00000000
--- a/versions_manager.py
+++ /dev/null
@@ -1,17 +0,0 @@
-from distutils.version import StrictVersion
-
-
-class VersionManager:
- def __init__(self):
- self.version = "0.2.0"
- self.version_mapper = {
- "0.1.0": ["0.1.0", "0.4.0"],
- "0.2.0": ["0.5.0", "0.5.0"]
- }
-
- def check_compatibility(self, tanner_version):
- min_version = self.version_mapper[self.version][0]
- max_version = self.version_mapper[self.version][1]
- if not (StrictVersion(min_version) <= StrictVersion(tanner_version) <= StrictVersion(max_version)):
- raise RuntimeError("Wrong tanner version: {}. Compatible versions are {} - {}"
- .format(tanner_version, min_version, max_version))
From 83b36c68a4e197f9165189b2ee50e388f75aedef Mon Sep 17 00:00:00 2001
From: Viswak Hanumanth
Date: Tue, 5 Jun 2018 00:22:13 +0530
Subject: [PATCH 05/13] Rewrite tests (#142)
* Rewriting tests after latest snare update
* Update requirements.txt
* Update requirements.txt
* Update .travis.yml
* error fixes
* Minor Change
---
.travis.yml | 2 +-
requirements.txt | 5 +-
snare/tests/test_add_meta_tag.py | 12 ++--
snare/tests/test_converter.py | 3 +-
snare/tests/test_create_data.py | 52 ++++++++------
snare/tests/test_get_dorks.py | 34 +++++----
snare/tests/test_handle_error.py | 70 ------------------
snare/tests/test_handle_html_content.py | 39 ++++------
snare/tests/test_handle_request.py | 87 +++++++++++------------
snare/tests/test_parse_tanner_response.py | 41 ++++++-----
snare/tests/test_submit_data.py | 30 ++++----
snare/tests/test_versions_manager.py | 2 +-
12 files changed, 155 insertions(+), 222 deletions(-)
delete mode 100644 snare/tests/test_handle_error.py
diff --git a/.travis.yml b/.travis.yml
index faf78826..09f04e02 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -5,5 +5,5 @@ python:
install: "pip install -r requirements.txt"
# command to run tests
script:
- - nosetests -w ./tests -vv
+ - nosetests -w snare/tests -vv
- pycodestyle . --max-line-length=120
diff --git a/requirements.txt b/requirements.txt
index 33f64ad3..6720e057 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,8 +1,9 @@
-aiohttp<2.0
+aiohttp
+aiohttp_jinja2
+yarl
beautifulsoup4
cssutils
gitpython
netifaces
-yarl==0.9.8
python-magic
pycodestyle
diff --git a/snare/tests/test_add_meta_tag.py b/snare/tests/test_add_meta_tag.py
index fe87facd..df28b88a 100644
--- a/snare/tests/test_add_meta_tag.py
+++ b/snare/tests/test_add_meta_tag.py
@@ -1,10 +1,10 @@
import unittest
import os
-from bs4 import BeautifulSoup
-import snare
import shutil
import configparser
-from utils.page_path_generator import generate_unique_path
+from bs4 import BeautifulSoup
+from snare.utils.snare_helpers import add_meta_tag
+from snare.utils.page_path_generator import generate_unique_path
class TestAddMetaTag(unittest.TestCase):
@@ -19,9 +19,9 @@ def setUp(self):
f.write(self.content)
def test_add_meta_tag(self):
- snare.config = configparser.ConfigParser()
- snare.config['WEB-TOOLS'] = dict(google='test google content', bing='test bing content')
- snare.add_meta_tag(self.page_dir, self.index_page)
+ config = configparser.ConfigParser()
+ config['WEB-TOOLS'] = dict(google='test google content', bing='test bing content')
+ add_meta_tag(self.page_dir, self.index_page, config)
with open(os.path.join(self.main_page_path, 'index.html')) as main:
main_page = main.read()
soup = BeautifulSoup(main_page, 'html.parser')
diff --git a/snare/tests/test_converter.py b/snare/tests/test_converter.py
index 21c051a1..20dfedc2 100644
--- a/snare/tests/test_converter.py
+++ b/snare/tests/test_converter.py
@@ -1,9 +1,8 @@
import unittest
import os
-import sys
import shutil
import json
-from converter import Converter
+from snare.utils.snare_helpers import Converter
class TestConverter(unittest.TestCase):
diff --git a/snare/tests/test_create_data.py b/snare/tests/test_create_data.py
index b27212e3..a07ab0f0 100644
--- a/snare/tests/test_create_data.py
+++ b/snare/tests/test_create_data.py
@@ -1,38 +1,49 @@
import unittest
from unittest.mock import Mock
-import asyncio
-import argparse
-import aiohttp
import shutil
import os
-import json
-from aiohttp.protocol import HttpVersion
-from utils.asyncmock import AsyncMock
-from snare import HttpRequestHandler
-import snare
-from utils.page_path_generator import generate_unique_path
+import asyncio
+import argparse
+from yarl import URL
+from aiohttp import HttpVersion
+from aiohttp import web
+from aiohttp.http_parser import RawRequestMessage
+from snare.tanner_handler import TannerHandler
+from snare.utils.page_path_generator import generate_unique_path
-class TestHandleRequest(unittest.TestCase):
+class TestCreateData(unittest.TestCase):
def setUp(self):
- self.meta = {}
+ meta = {}
run_args = argparse.ArgumentParser()
run_args.add_argument("--tanner")
run_args.add_argument("--page-dir")
self.main_page_path = generate_unique_path()
os.makedirs(self.main_page_path)
- self.page_dir = self.main_page_path.rsplit('/')[-1]
- self.args = run_args.parse_args(['--page-dir', self.page_dir])
- snare.snare_uuid = ('9c10172f-7ce2-4fb4-b1c6-abc70141db56').encode('utf-8')
- self.handler = HttpRequestHandler(self.meta, self.args)
- self.headers = {
+ page_dir = self.main_page_path.rsplit('/')[-1]
+ args = run_args.parse_args(['--page-dir', page_dir])
+ snare_uuid = ('9c10172f-7ce2-4fb4-b1c6-abc70141db56').encode('utf-8')
+ args.no_dorks = True
+ self.handler = TannerHandler(args, meta, snare_uuid)
+ headers = {
'Host': 'test_host', 'status': 200,
'Cookie': 'sess_uuid=prev_test_uuid; test_cookie=test'
}
- self.request = aiohttp.protocol.RawRequestMessage(
- method='POST', path='/', version=HttpVersion(major=1, minor=1), headers=self.headers,
- raw_headers=None, should_close=None, compression=None)
+ message = RawRequestMessage(
+ method='POST', path='/', version=HttpVersion(major=1, minor=1), headers=headers,
+ raw_headers=None, should_close=None, compression=None, upgrade=None, chunked=None,
+ url=URL('http://test_url/')
+ )
+ loop = asyncio.get_event_loop()
+ RequestHandler = Mock()
+ protocol = RequestHandler()
+ self.request = web.Request(
+ message=message, payload=None, protocol=protocol, payload_writer=None,
+ task='POST', loop=loop
+ )
+ self.request.transport.get_extra_info = Mock(return_value=(['test_ip', 'test_port']))
self.response_status = "test_status"
+ self.data = None
self.expected_data = {
'method': 'POST', 'path': '/',
'headers': {'Host': 'test_host', 'status': 200,
@@ -42,9 +53,6 @@ def setUp(self):
'status': 'test_status',
'cookies': {'sess_uuid': 'prev_test_uuid', ' test_cookie': 'test'}
}
- asyncio.BaseTransport = Mock()
- self.handler.transport = asyncio.BaseTransport()
- self.handler.transport.get_extra_info = Mock(return_value=['test_ip', 'test_port'])
def test_create_data(self):
self.data = self.handler.create_data(self.request, self.response_status)
diff --git a/snare/tests/test_get_dorks.py b/snare/tests/test_get_dorks.py
index ccddcc99..4fe98d72 100644
--- a/snare/tests/test_get_dorks.py
+++ b/snare/tests/test_get_dorks.py
@@ -1,33 +1,31 @@
import unittest
-from unittest.mock import Mock
import asyncio
-import argparse
-import aiohttp
import shutil
-import yarl
import os
-from utils.asyncmock import AsyncMock
-from snare import HttpRequestHandler
-from utils.page_path_generator import generate_unique_path
+import yarl
+import aiohttp
+from snare.utils.asyncmock import AsyncMock
+from snare.html_handler import HtmlHandler
+from snare.utils.page_path_generator import generate_unique_path
class TestGetDorks(unittest.TestCase):
def setUp(self):
- self.meta = {}
- run_args = argparse.ArgumentParser()
- run_args.add_argument("--tanner")
- run_args.add_argument("--page-dir")
self.main_page_path = generate_unique_path()
os.makedirs(self.main_page_path)
- self.page_dir = self.main_page_path.rsplit('/')[-1]
- self.args = run_args.parse_args(['--page-dir', self.page_dir])
self.dorks = dict(response={'dorks': "test_dorks"})
self.loop = asyncio.new_event_loop()
aiohttp.ClientSession.get = AsyncMock(
- return_value=aiohttp.ClientResponse(url=yarl.URL("http://www.example.com"), method="GET")
- )
- self.handler = HttpRequestHandler(self.meta, self.args)
- self.handler.run_args.tanner = "tanner.mushmush.org"
+ return_value=aiohttp.ClientResponse(
+ url=yarl.URL("http://www.example.com"), method="GET", writer=None, continue100=1,
+ timer=None, request_info=None, traces=None, loop=self.loop,
+ session=None
+ )
+ )
+ no_dorks = True
+ tanner = "tanner.mushmush.org"
+ self.handler = HtmlHandler(no_dorks, tanner)
+ self.data = None
def test_get_dorks(self):
aiohttp.ClientResponse.json = AsyncMock(return_value=dict(response={'dorks': "test_dorks"}))
@@ -35,7 +33,7 @@ def test_get_dorks(self):
async def test():
self.data = await self.handler.get_dorks()
self.loop.run_until_complete(test())
- aiohttp.ClientSession.get.assert_called_with('http://tanner.mushmush.org:8090/dorks')
+ aiohttp.ClientSession.get.assert_called_with('http://tanner.mushmush.org:8090/dorks', timeout=10.0)
def test_return_dorks(self):
aiohttp.ClientResponse.json = AsyncMock(return_value=self.dorks)
diff --git a/snare/tests/test_handle_error.py b/snare/tests/test_handle_error.py
deleted file mode 100644
index bedf8497..00000000
--- a/snare/tests/test_handle_error.py
+++ /dev/null
@@ -1,70 +0,0 @@
-import unittest
-from unittest.mock import Mock
-import asyncio
-import argparse
-import aiohttp
-import shutil
-import os
-from utils.asyncmock import AsyncMock
-from snare import HttpRequestHandler
-from utils.page_path_generator import generate_unique_path
-
-
-class TestHandleError(unittest.TestCase):
- def setUp(self):
- self.meta = {}
- run_args = argparse.ArgumentParser()
- run_args.add_argument("--tanner")
- run_args.add_argument("--page-dir")
- self.main_page_path = generate_unique_path()
- os.makedirs(self.main_page_path)
- self.page_dir = self.main_page_path.rsplit('/')[-1]
- self.args = run_args.parse_args(['--page-dir', self.page_dir])
- self.loop = asyncio.new_event_loop()
- self.status = 500
- self.message = "test"
- self.payload = "test"
- self.exc = "[Errno 0] test"
- self.headers = "test"
- self.reason = "test"
- self.data = dict(
- method='GET',
- path='/',
- headers="test_headers",
- uuid="test_uuid",
- peer="test_peer",
- status="test_status",
- error=self.exc
- )
- aiohttp.server.ServerHttpProtocol.handle_error = Mock()
- self.handler = HttpRequestHandler(self.meta, self.args)
- self.handler.create_data = Mock(return_value=self.data)
- self.handler.submit_data = AsyncMock()
-
- def test_create_error_data(self):
-
- async def test():
- await self.handler.handle_error(
- self.status, self.message, self.payload, self.exc, self.headers, self.reason)
- self.loop.run_until_complete(test())
- self.handler.create_data.assert_called_with(self.message, self.status)
-
- def test_submit_error_data(self):
-
- async def test():
- await self.handler.handle_error(
- self.status, self.message, self.payload, self.exc, self.headers, self.reason)
- self.loop.run_until_complete(test())
- self.handler.submit_data.assert_called_with(self.data)
-
- def test_handle_error_data(self):
-
- async def test():
- await self.handler.handle_error(
- self.status, self.message, self.payload, self.exc, self.headers, self.reason)
- self.loop.run_until_complete(test())
- aiohttp.server.ServerHttpProtocol.handle_error.assert_called_with(
- self.status, self.message, self.payload, self.exc, self.headers, self.reason)
-
- def tearDown(self):
- shutil.rmtree(self.main_page_path)
diff --git a/snare/tests/test_handle_html_content.py b/snare/tests/test_handle_html_content.py
index 4ab3b45a..984a6b22 100644
--- a/snare/tests/test_handle_html_content.py
+++ b/snare/tests/test_handle_html_content.py
@@ -1,27 +1,17 @@
import unittest
-from unittest.mock import Mock
import asyncio
-import argparse
-import aiohttp
import shutil
import os
-import yarl
from bs4 import BeautifulSoup
-from utils.asyncmock import AsyncMock
-from snare import HttpRequestHandler
-from utils.page_path_generator import generate_unique_path
+from snare.utils.asyncmock import AsyncMock
+from snare.html_handler import HtmlHandler
+from snare.utils.page_path_generator import generate_unique_path
class TestHandleHtmlContent(unittest.TestCase):
def setUp(self):
- self.meta = {}
- run_args = argparse.ArgumentParser()
- run_args.add_argument("--tanner")
- run_args.add_argument("--page-dir")
- run_args.add_argument("--no-dorks")
self.main_page_path = generate_unique_path()
os.makedirs(self.main_page_path)
- self.page_dir = self.main_page_path.rsplit('/')[-1]
self.content = '''
@@ -34,38 +24,39 @@ def setUp(self):
self.expected_content += ' A\n \n paragraph to be tested\n
\n \n\n'
self.no_dorks_content = '\n \n \n A paragraph to be tested\n'
self.no_dorks_content += '
\n \n\n'
- self.args = run_args.parse_args(['--page-dir', self.page_dir])
self.loop = asyncio.new_event_loop()
- self.handler = HttpRequestHandler(self.meta, self.args)
- self.handler.dir = self.main_page_path
+ self.return_content = None
+ no_dorks = True
+ tanner = "tanner.mushmush.org"
+ self.handler = HtmlHandler(no_dorks, tanner)
def test_handle_content(self):
- self.handler.run_args.no_dorks = False
+ self.handler.no_dorks = False
self.handler.get_dorks = AsyncMock(return_value=["test_dork1"])
async def test():
- self.return_content = await self.handler.handle_html_content(self.content)
+ self.return_content = await self.handler.handle_content(self.content)
self.loop.run_until_complete(test())
soup = BeautifulSoup(self.return_content, "html.parser")
- self.return_content = soup.decode("utf-8")
- self.assertEquals(self.return_content, self.expected_content)
+ return_content = soup.decode("utf-8")
+ self.assertEquals(return_content, self.expected_content)
def test_handle_content_no_dorks(self):
- self.handler.run_args.no_dorks = True
+ self.handler.no_dorks = True
async def test():
- self.return_content = await self.handler.handle_html_content(self.content)
+ self.return_content = await self.handler.handle_content(self.content)
self.loop.run_until_complete(test())
soup = BeautifulSoup(self.return_content, "html.parser")
self.return_content = soup.decode("utf-8")
self.assertEquals(self.return_content, self.no_dorks_content)
def test_handle_content_exception(self):
- self.handler.run_args.no_dorks = False
+ self.handler.no_dorks = False
self.handler.get_dorks = AsyncMock(return_value=[])
async def test():
- self.return_content = await self.handler.handle_html_content(self.content)
+ self.return_content = await self.handler.handle_content(self.content)
with self.assertRaises(IndexError):
self.loop.run_until_complete(test())
diff --git a/snare/tests/test_handle_request.py b/snare/tests/test_handle_request.py
index d8698c13..ee545ca2 100644
--- a/snare/tests/test_handle_request.py
+++ b/snare/tests/test_handle_request.py
@@ -1,33 +1,35 @@
import unittest
from unittest.mock import Mock
-from unittest.mock import call
import asyncio
import argparse
-import aiohttp
import shutil
import os
-import json
-import yarl
-from aiohttp.protocol import HttpVersion
-from utils.asyncmock import AsyncMock
-from snare import HttpRequestHandler
-from utils.page_path_generator import generate_unique_path
+import aiohttp
+from aiohttp.http_parser import RawRequestMessage
+from aiohttp import HttpVersion
+from aiohttp import web
+from yarl import URL
+from snare.server import HttpRequestHandler
+from snare.utils.asyncmock import AsyncMock
+from snare.utils.page_path_generator import generate_unique_path
class TestHandleRequest(unittest.TestCase):
def setUp(self):
- self.meta = {}
+ meta = {}
run_args = argparse.ArgumentParser()
run_args.add_argument("--tanner")
run_args.add_argument("--page-dir")
self.main_page_path = generate_unique_path()
os.makedirs(self.main_page_path)
self.page_dir = self.main_page_path.rsplit('/')[-1]
- self.args = run_args.parse_args(['--page-dir', self.page_dir])
- self.loop = asyncio.new_event_loop()
- self.handler = HttpRequestHandler(self.meta, self.args)
- self.handler.run_args.server_header = "test_server"
- self.handler.run_args.slurp_enabled = True
+ args = run_args.parse_args(['--page-dir', self.page_dir])
+ uuid = ('9c10172f-7ce2-4fb4-b1c6-abc70141db56').encode('utf-8')
+ args.tanner = 'tanner.mushmush.org'
+ args.no_dorks = True
+ args.server_header = "test_server"
+ args.slurp_enabled = True
+ self.handler = HttpRequestHandler(meta, args, uuid)
self.data = {
'method': 'GET', 'path': '/',
'headers': {
@@ -37,64 +39,59 @@ def setUp(self):
'sess_uuid': 'prev_test_uuid'
}
}
+ self.loop = asyncio.new_event_loop()
self.content = ''
self.content_type = 'test_type'
- self.event_result = dict(response=dict(message=dict(detection={'type': 1}, sess_uuid="test_uuid")))
- self.request = aiohttp.protocol.RawRequestMessage(
+ event_result = dict(response=dict(message=dict(detection={'type': 1}, sess_uuid="test_uuid")))
+ RequestHandler = Mock()
+ protocol = RequestHandler()
+ message = RawRequestMessage(
method='POST', path='/', version=HttpVersion(major=1, minor=1), headers=self.data['headers'],
- raw_headers=None, should_close=None, compression=None)
- self.handler.create_data = Mock(return_value=self.data)
- self.handler.submit_data = AsyncMock(return_value=self.event_result)
+ raw_headers=None, should_close=None, compression=None, upgrade=None, chunked=None,
+ url=URL('http://test_url/')
+ )
+ self.request = web.Request(
+ message=message, payload=None, protocol=protocol, payload_writer=None,
+ task='POST', loop=self.loop
+ )
+ self.handler.tanner_handler.create_data = Mock(return_value=self.data)
+ self.handler.tanner_handler.submit_data = AsyncMock(return_value=event_result)
self.handler.submit_slurp = AsyncMock()
- self.payload = aiohttp.streams.EmptyStreamReader()
- aiohttp.Response.add_header = Mock()
- aiohttp.Response.write = Mock()
- aiohttp.Response.send_headers = Mock()
- aiohttp.Response.write_eof = AsyncMock()
+ web.Response.add_header = Mock()
+ web.Response.write = Mock()
+ web.Response.send_headers = Mock()
+ web.Response.write_eof = AsyncMock()
aiohttp.streams.EmptyStreamReader.read = AsyncMock(return_value=b'con1=test1&con2=test2')
- self.handler.parse_tanner_response = AsyncMock(
+ self.handler.tanner_handler.parse_tanner_response = AsyncMock(
return_value=(self.content, self.content_type, self.data['headers'], self.data['headers']['status']))
def test_create_request_data(self):
async def test():
- await self.handler.handle_request(self.request, self.payload)
+ await self.handler.handle_request(self.request)
self.loop.run_until_complete(test())
- self.handler.create_data.assert_called_with(self.request, 200)
+ self.handler.tanner_handler.create_data.assert_called_with(self.request, 200)
def test_submit_request_data(self):
async def test():
- await self.handler.handle_request(self.request, self.payload)
+ await self.handler.handle_request(self.request)
self.loop.run_until_complete(test())
- self.handler.submit_data.assert_called_with(self.data)
+ self.handler.tanner_handler.submit_data.assert_called_with(self.data)
def test_submit_request_slurp(self):
async def test():
- await self.handler.handle_request(self.request, self.payload)
+ await self.handler.handle_request(self.request)
self.loop.run_until_complete(test())
self.handler.submit_slurp.assert_called_with(self.request.path)
def test_parse_response(self):
async def test():
- await self.handler.handle_request(self.request, self.payload)
- self.loop.run_until_complete(test())
- self.handler.parse_tanner_response.assert_called_with(self.request.path, {'type': 1})
-
- def test_handle_response(self):
- calls = [call('Host', 'test_host'), call('status', 200), call('Server', 'test_server'),
- call('Set-Cookie', 'sess_uuid=test_uuid'), call('Content-Type', 'test_type'),
- call('Content-Length', str(len(self.content)))]
-
- async def test():
- await self.handler.handle_request(self.request, self.payload)
+ await self.handler.handle_request(self.request)
self.loop.run_until_complete(test())
- aiohttp.Response.add_header.assert_has_calls(calls)
- aiohttp.Response.send_headers.assert_called_with()
- aiohttp.Response.write.assert_called_with(self.content)
- aiohttp.Response.write_eof.assert_called_with()
+ self.handler.tanner_handler.parse_tanner_response.assert_called_with(self.request.path, {'type': 1})
def tearDown(self):
shutil.rmtree(self.main_page_path)
diff --git a/snare/tests/test_parse_tanner_response.py b/snare/tests/test_parse_tanner_response.py
index 9578e563..6219526a 100644
--- a/snare/tests/test_parse_tanner_response.py
+++ b/snare/tests/test_parse_tanner_response.py
@@ -1,18 +1,12 @@
import unittest
-from unittest.mock import Mock
-from unittest.mock import call
import asyncio
import argparse
-import aiohttp
import shutil
import os
import json
-import yarl
-from aiohttp.protocol import HttpVersion
-from utils.asyncmock import AsyncMock
-from snare import HttpRequestHandler
-from utils.page_path_generator import generate_unique_path
-from urllib.parse import unquote
+from snare.utils.asyncmock import AsyncMock
+from snare.utils.page_path_generator import generate_unique_path
+from snare.tanner_handler import TannerHandler
class TestParseTannerResponse(unittest.TestCase):
@@ -22,25 +16,34 @@ def setUp(self):
run_args.add_argument("--page-dir")
self.main_page_path = generate_unique_path()
os.makedirs(self.main_page_path)
- self.page_dir = self.main_page_path.rsplit('/')[-1]
- self.meta_content = {"/index.html": {"hash": "hash_name", "content_type": "text/html"}}
+ page_dir = self.main_page_path.rsplit('/')[-1]
+ meta_content = {"/index.html": {"hash": "hash_name", "content_type": "text/html"}}
self.page_content = ""
self.content_type = "text/html"
with open(os.path.join(self.main_page_path, "hash_name"), 'w') as f:
f.write(self.page_content)
with open(os.path.join(self.main_page_path, "meta.json"), 'w') as f:
- json.dump(self.meta_content, f)
- self.args = run_args.parse_args(['--page-dir', self.page_dir])
+ json.dump(meta_content, f)
+ args = run_args.parse_args(['--page-dir', page_dir])
+ args.index_page = '/index.html'
+ args.no_dorks = True
+ args.tanner = "tanner.mushmush.org"
+ uuid = "test_uuid"
+ self.handler = TannerHandler(args, meta_content, uuid)
self.requested_name = '/'
- self.loop = asyncio.new_event_loop()
- self.handler = HttpRequestHandler(self.meta_content, self.args)
- self.handler.run_args.index_page = '/index.html'
- self.handler.handle_html_content = AsyncMock(return_value=self.page_content)
+ self.loop = asyncio.get_event_loop()
+ self.handler.html_handler.handle_content = AsyncMock(return_value=self.page_content)
+ self.res1 = None
+ self.res2 = None
+ self.res3 = None
+ self.res4 = None
+ self.detection = None
+ self.expected_content = None
+ self.call_content = None
def test_parse_type_one(self):
self.detection = {"type": 1}
self.call_content = b''
- self.expected_content = self.page_content
async def test():
(self.res1, self.res2,
@@ -96,7 +99,7 @@ async def test():
(self.res1, self.res2,
self.res3, self.res4) = await self.handler.parse_tanner_response(self.requested_name, self.detection)
self.loop.run_until_complete(test())
- self.handler.handle_html_content.assert_called_with(self.call_content)
+ self.handler.html_handler.handle_content.assert_called_with(self.call_content)
def test_parse_exception(self):
self.detection = {}
diff --git a/snare/tests/test_submit_data.py b/snare/tests/test_submit_data.py
index fa2190e4..d3e61133 100644
--- a/snare/tests/test_submit_data.py
+++ b/snare/tests/test_submit_data.py
@@ -1,27 +1,26 @@
import unittest
-from unittest.mock import Mock
import asyncio
import argparse
-import aiohttp
import shutil
import os
import json
import yarl
-from utils.asyncmock import AsyncMock
-from snare import HttpRequestHandler
-from utils.page_path_generator import generate_unique_path
+import aiohttp
+from snare.utils.asyncmock import AsyncMock
+from snare.tanner_handler import TannerHandler
+from snare.utils.page_path_generator import generate_unique_path
class TestSubmitData(unittest.TestCase):
def setUp(self):
- self.meta = {}
+ meta = {}
run_args = argparse.ArgumentParser()
run_args.add_argument("--tanner")
run_args.add_argument("--page-dir")
self.main_page_path = generate_unique_path()
os.makedirs(self.main_page_path)
- self.page_dir = self.main_page_path.rsplit('/')[-1]
- self.args = run_args.parse_args(['--page-dir', self.page_dir])
+ page_dir = self.main_page_path.rsplit('/')[-1]
+ args = run_args.parse_args(['--page-dir', page_dir])
self.loop = asyncio.new_event_loop()
self.data = {
'method': 'GET', 'path': '/',
@@ -34,10 +33,17 @@ def setUp(self):
}
}
aiohttp.ClientSession.post = AsyncMock(
- return_value=aiohttp.ClientResponse(url=yarl.URL("http://www.example.com"), method="GET")
+ return_value=aiohttp.ClientResponse(
+ url=yarl.URL("http://www.example.com"), method="GET", writer=None, continue100=1,
+ timer=None, request_info=None, traces=None, loop=self.loop,
+ session=None
+ )
)
- self.handler = HttpRequestHandler(self.meta, self.args)
- self.handler.run_args.tanner = "tanner.mushmush.org"
+ uuid = "test_uuid"
+ args.tanner = "tanner.mushmush.org"
+ args.no_dorks = True
+ self.handler = TannerHandler(args, meta, uuid)
+ self.result = None
def test_post_data(self):
aiohttp.ClientResponse.json = AsyncMock(return_value=dict(detection={'type': 1}, sess_uuid="test_uuid"))
@@ -46,7 +52,7 @@ async def test():
self.result = await self.handler.submit_data(self.data)
self.loop.run_until_complete(test())
aiohttp.ClientSession.post.assert_called_with(
- 'http://tanner.mushmush.org:8090/event', data=json.dumps(self.data)
+ 'http://tanner.mushmush.org:8090/event', data=json.dumps(self.data), timeout=10.0
)
def test_event_result(self):
diff --git a/snare/tests/test_versions_manager.py b/snare/tests/test_versions_manager.py
index f634a227..280baf21 100644
--- a/snare/tests/test_versions_manager.py
+++ b/snare/tests/test_versions_manager.py
@@ -1,5 +1,5 @@
import unittest
-from versions_manager import VersionManager
+from snare.utils.snare_helpers import VersionManager
class TestVersion(unittest.TestCase):
From 14bfbe7d479ee1da74e125020bbcb3f1b7c173d2 Mon Sep 17 00:00:00 2001
From: Viswak Hanumanth
Date: Wed, 11 Jul 2018 05:52:36 +0530
Subject: [PATCH 06/13] Fix attack requests from snare-develop (#149)
* phase#1
* Fix tests
---
snare/server.py | 6 +++---
snare/tanner_handler.py | 2 +-
snare/tests/test_create_data.py | 2 +-
snare/tests/test_handle_request.py | 4 ++--
4 files changed, 7 insertions(+), 7 deletions(-)
diff --git a/snare/server.py b/snare/server.py
index d9cf1cab..74c9cbbc 100644
--- a/snare/server.py
+++ b/snare/server.py
@@ -37,7 +37,7 @@ async def submit_slurp(self, data):
self.logger.error('Error submitting slurp: %s', e)
async def handle_request(self, request):
- self.logger.info('Request path: {0}'.format(request.path))
+ self.logger.info('Request path: {0}'.format(request.path_qs))
data = self.tanner_handler.create_data(request, 200)
if request.method == 'POST':
post_data = await request.post()
@@ -51,10 +51,10 @@ async def handle_request(self, request):
# Log the event to slurp service if enabled
if self.run_args.slurp_enabled:
- await self.submit_slurp(request.path)
+ await self.submit_slurp(request.path_qs)
content, content_type, headers, status_code = await self.tanner_handler.parse_tanner_response(
- request.path, event_result['response']['message']['detection'])
+ request.path_qs, event_result['response']['message']['detection'])
response_headers = multidict.CIMultiDict()
diff --git a/snare/tanner_handler.py b/snare/tanner_handler.py
index a6f8298f..c8810612 100644
--- a/snare/tanner_handler.py
+++ b/snare/tanner_handler.py
@@ -37,7 +37,7 @@ def create_data(self, request, response_status):
header = {key: value for (key, value) in request.headers.items()}
data['method'] = request.method
data['headers'] = header
- data['path'] = request.path
+ data['path'] = request.path_qs
if ('Cookie' in header):
data['cookies'] = {
cookie.split('=')[0]: cookie.split('=')[1] for cookie in header['Cookie'].split(';')
diff --git a/snare/tests/test_create_data.py b/snare/tests/test_create_data.py
index a07ab0f0..c89e1db7 100644
--- a/snare/tests/test_create_data.py
+++ b/snare/tests/test_create_data.py
@@ -45,7 +45,7 @@ def setUp(self):
self.response_status = "test_status"
self.data = None
self.expected_data = {
- 'method': 'POST', 'path': '/',
+ 'method': 'POST', 'path': 'http://test_url/',
'headers': {'Host': 'test_host', 'status': 200,
'Cookie': 'sess_uuid=prev_test_uuid; test_cookie=test'},
'uuid': '9c10172f-7ce2-4fb4-b1c6-abc70141db56',
diff --git a/snare/tests/test_handle_request.py b/snare/tests/test_handle_request.py
index ee545ca2..18f7590e 100644
--- a/snare/tests/test_handle_request.py
+++ b/snare/tests/test_handle_request.py
@@ -84,14 +84,14 @@ def test_submit_request_slurp(self):
async def test():
await self.handler.handle_request(self.request)
self.loop.run_until_complete(test())
- self.handler.submit_slurp.assert_called_with(self.request.path)
+ self.handler.submit_slurp.assert_called_with(self.request.path_qs)
def test_parse_response(self):
async def test():
await self.handler.handle_request(self.request)
self.loop.run_until_complete(test())
- self.handler.tanner_handler.parse_tanner_response.assert_called_with(self.request.path, {'type': 1})
+ self.handler.tanner_handler.parse_tanner_response.assert_called_with(self.request.path_qs, {'type': 1})
def tearDown(self):
shutil.rmtree(self.main_page_path)
From dad423243c4e0c06a2666717de282a516d325faa Mon Sep 17 00:00:00 2001
From: Viswak Hanumanth
Date: Fri, 16 Feb 2018 14:11:45 +0530
Subject: [PATCH 07/13] Custom error ("404 not found") page (#93)
error_page_implementation
---
clone.py | 12 +++++++-----
snare.py | 16 ++++++++++++----
2 files changed, 19 insertions(+), 9 deletions(-)
diff --git a/clone.py b/clone.py
index 43035550..079bf04f 100644
--- a/clone.py
+++ b/clone.py
@@ -32,7 +32,7 @@
class Cloner(object):
def __init__(self, root, max_depth):
self.visited_urls = []
- self.root = self.add_scheme(root)
+ self.root, self.error_page = self.add_scheme(root)
self.max_depth = max_depth
self.moved_root = None
if len(self.root.host) < 4:
@@ -51,7 +51,8 @@ def add_scheme(url):
new_url = yarl.URL(url)
else:
new_url = yarl.URL('http://' + url)
- return new_url
+ err_url = yarl.URL('http://' + url + '/status_404')
+ return new_url, err_url
async def process_link(self, url, level, check_host=False):
try:
@@ -145,11 +146,11 @@ async def get_body(self, session):
content_type = None
try:
with aiohttp.Timeout(10.0):
- response = await session.get(current_url)
+ response = await session.get(current_url, headers={'Accept': 'text/html'})
content_type = response.content_type
data = await response.read()
-
- except (ValueError,aiohttp.ClientError, asyncio.TimeoutError) as client_error:
+
+ except (aiohttp.ClientError, asyncio.TimeoutError) as client_error:
print(client_error)
else:
await response.release()
@@ -187,6 +188,7 @@ async def run(self):
session = aiohttp.ClientSession()
try:
await self.new_urls.put((self.root, 0))
+ await self.new_urls.put((self.error_page,0))
await self.get_body(session)
except KeyboardInterrupt:
raise
diff --git a/snare.py b/snare.py
index 4d8a3fe1..4c45c37b 100644
--- a/snare.py
+++ b/snare.py
@@ -224,9 +224,9 @@ async def parse_tanner_response(self, requested_name, detection):
content = None
status_code = 200
headers = {}
- p = re.compile('/+')
- requested_name = p.sub('/', requested_name)
-
+ p = re.compile('/+') # Creating a regex object for the pattern of multiple contiguous forward slashes
+ requested_name = p.sub('/', requested_name) # Substituting all occurrences of the pattern with single forward slash
+
if detection['type'] == 1:
query_start = requested_name.find('?')
if query_start != -1:
@@ -236,12 +236,20 @@ async def parse_tanner_response(self, requested_name, detection):
requested_name = self.run_args.index_page
try:
if requested_name[-1] == '/':
- requested_name = requested_name[:-1]
+ requested_name = requested_name[:-1]
requested_name = unquote(requested_name)
file_name = self.meta[requested_name]['hash']
content_type = self.meta[requested_name]['content_type']
except KeyError:
status_code = 404
+ requested_name = '/status_404'
+ file_name = self.meta[requested_name]['hash']
+ content_type = 'text/html'
+ path = os.path.join(self.dir, file_name)
+ with open(path, 'rb') as fh:
+ content = fh.read()
+ content = await self.handle_html_content(content)
+
else:
path = os.path.join(self.dir, file_name)
if os.path.isfile(path):
From 0a2a1f866e6fb6f9c2face0be0635c9db8f9c6f8 Mon Sep 17 00:00:00 2001
From: Viswak Hanumanth
Date: Tue, 13 Mar 2018 03:32:06 +0530
Subject: [PATCH 08/13] Logging for snare and cloner (#109)
* Creating logger
* Remove conf.py
* Adding clone error logger
* Changes
* Removing level as we have both .log and .err
* Adding quit info
* Removing extra slash
* Requested changes
* minor change
---
clone.py | 26 ++++++++++++++++++++++----
logger.py | 49 +++++++++++++++++++++++++++++++++++++++++++++++++
snare.py | 26 +++++++++++++++++++-------
3 files changed, 90 insertions(+), 11 deletions(-)
create mode 100644 logger.py
diff --git a/clone.py b/clone.py
index 079bf04f..121f6f43 100644
--- a/clone.py
+++ b/clone.py
@@ -27,6 +27,8 @@
import cssutils
import yarl
from bs4 import BeautifulSoup
+import logger
+import logging
class Cloner(object):
@@ -41,9 +43,10 @@ def __init__(self, root, max_depth):
if not os.path.exists(self.target_path):
os.mkdir(self.target_path)
-
+
self.new_urls = Queue()
self.meta = {}
+ self.logger = logging.getLogger(__name__)
@staticmethod
def add_scheme(url):
@@ -82,7 +85,7 @@ async def process_link(self, url, level, check_host=False):
try:
res = url.relative().human_repr()
except ValueError:
- print(url)
+ self.logger.error(url)
return res
async def replace_links(self, data, level):
@@ -151,7 +154,7 @@ async def get_body(self, session):
data = await response.read()
except (aiohttp.ClientError, asyncio.TimeoutError) as client_error:
- print(client_error)
+ self.logger.error(client_error)
else:
await response.release()
if data is not None:
@@ -181,7 +184,7 @@ async def get_root_host(self):
self.moved_root = resp._url_obj
resp.close()
except aiohttp.errors.ClientError as err:
- print("Can\'t connect to target host.")
+ self.logger.error("Can\'t connect to target host.")
exit(-1)
async def run(self):
@@ -210,7 +213,14 @@ def main():
parser = argparse.ArgumentParser()
parser.add_argument("--target", help="domain of the site to be cloned", required=True)
parser.add_argument("--max-depth", help="max depth of the cloning", required=False, default=sys.maxsize)
+ parser.add_argument("--log_path", help="path to the error log file")
args = parser.parse_args()
+ if args.log_path:
+ log_err = args.log_path + "clone.err"
+ else:
+ log_err = "/opt/snare/clone.err"
+ logger.Logger.create_clone_logger(log_err, __package__)
+ print("Error logs will be stored in {}\n".format(log_err))
try:
cloner = Cloner(args.target, int(args.max_depth))
loop.run_until_complete(cloner.get_root_host())
@@ -220,4 +230,12 @@ def main():
if __name__ == '__main__':
+ print("""
+ ______ __ ______ _ ____________
+ / ____// / / __ // | / / ____/ __ \\
+ / / / / / / / // |/ / __/ / /_/ /
+ / /___ / /____ / /_/ // /| / /___/ _, _/
+/_____//______//_____//_/ |_/_____/_/ |_|
+
+ """)
main()
diff --git a/logger.py b/logger.py
new file mode 100644
index 00000000..e0b146e1
--- /dev/null
+++ b/logger.py
@@ -0,0 +1,49 @@
+import logging
+import logging.handlers
+
+class LevelFilter(logging.Filter):
+ """Filters (lets through) all messages with level < LEVEL"""
+
+ def __init__(self, level):
+ self.level = level
+
+ def filter(self, record):
+ return record.levelno < self.level # "<" instead of "<=": since logger.setLevel is inclusive, this should be exclusive
+
+class Logger:
+
+ @staticmethod
+ def create_logger(debug_filename, err_filename, logger_name):
+ logger = logging.getLogger(logger_name)
+ logger.setLevel(logging.DEBUG)
+ logger.propagate = False
+ formatter = logging.Formatter(
+ fmt='%(asctime)s %(levelname)s:%(name)s:%(funcName)s: %(message)s', datefmt='%Y-%m-%d %H:%M:%S')
+
+ # ERROR log to 'snare.err'
+ error_log_handler = logging.handlers.RotatingFileHandler(err_filename, encoding='utf-8')
+ error_log_handler.setLevel(logging.ERROR)
+ error_log_handler.setFormatter(formatter)
+ logger.addHandler(error_log_handler)
+
+ # DEBUG log to 'snare.log'
+ debug_log_handler = logging.handlers.RotatingFileHandler(debug_filename, encoding='utf-8')
+ debug_log_handler.setLevel(logging.DEBUG)
+ debug_log_handler.setFormatter(formatter)
+ max_level_filter = LevelFilter(logging.ERROR)
+ debug_log_handler.addFilter(max_level_filter)
+ logger.addHandler(debug_log_handler)
+
+ return logger
+
+ @staticmethod
+ def create_clone_logger(err_filename, logger_name):
+ logger = logging.getLogger(logger_name)
+ formatter = logging.Formatter(
+ fmt='%(asctime)s %(levelname)s:%(name)s:%(funcName)s: %(message)s', datefmt='%Y-%m-%d %H:%M:%S')
+ # ERROR log to 'clone.err'
+ error_log_handler = logging.handlers.RotatingFileHandler(err_filename, encoding='utf-8')
+ error_log_handler.setLevel(logging.ERROR)
+ error_log_handler.setFormatter(formatter)
+ logger.addHandler(error_log_handler)
+
\ No newline at end of file
diff --git a/snare.py b/snare.py
index 4c45c37b..af3c9639 100644
--- a/snare.py
+++ b/snare.py
@@ -33,6 +33,8 @@
import pip
from aiohttp import MultiDict
import re
+import logging
+import logger
try:
from aiohttp.web import StaticResource as StaticRoute
@@ -53,6 +55,8 @@ def __init__(self, meta, run_args, debug=False, keep_alive=75, **kwargs):
self.dir = '/opt/snare/pages/{}'.format(run_args.page_dir)
self.meta = meta
+
+ self.logger = logging.getLogger(__name__)
self.sroute = StaticRoute(
name=None, prefix='/',
@@ -71,11 +75,11 @@ async def get_dorks(self):
try:
dorks = await r.json()
except json.decoder.JSONDecodeError as e:
- print(e)
+ self.logger.error('Error getting dorks: %s', e)
finally:
await r.release()
except asyncio.TimeoutError:
- print('Dorks timeout')
+ self.logger.info('Dorks timeout')
return dorks['response']['dorks'] if dorks else []
async def submit_slurp(self, data):
@@ -90,7 +94,7 @@ async def submit_slurp(self, data):
assert r.status == 200
r.close()
except Exception as e:
- print(e)
+ self.logger.error('Error submitting slurp: %s', e)
def create_data(self, request, response_status):
data = dict(
@@ -127,7 +131,7 @@ async def submit_data(self, data):
try:
event_result = await r.json()
except json.decoder.JSONDecodeError as e:
- print(e, data)
+ self.logger.error('Error submitting data: {} {}'.format(e, data))
finally:
await r.release()
except Exception as e:
@@ -171,14 +175,14 @@ async def handle_html_content(self, content):
return content
async def handle_request(self, request, payload):
- print('Request path: {0}'.format(request.path))
+ self.logger.info('Request path: {0}'.format(request.path))
data = self.create_data(request, 200)
if request.method == 'POST':
post_data = await payload.read()
post_data = MultiDict(parse_qsl(post_data.decode('utf-8')))
- print('POST data:')
+ self.logger.info('POST data:')
for key, val in post_data.items():
- print('\t- {0}: {1}'.format(key, val))
+ self.logger.info('\t- {0}: {1}'.format(key, val))
data['post_data'] = dict(post_data)
# Submit the event to the TANNER service
@@ -460,12 +464,17 @@ async def check_tanner():
parser.add_argument("--update-timeout", help="update snare every timeout ", default='24H')
parser.add_argument("--server-header", help="set server-header", default='nignx/1.3.8')
parser.add_argument("--no-dorks", help="disable the use of dorks", action='store_true')
+ parser.add_argument("--log-dir", help="path to directory of the log file", default='/opt/snare/')
args = parser.parse_args()
base_path = '/opt/snare/'
base_page_path = '/opt/snare/pages/'
config = configparser.ConfigParser()
config.read(os.path.join(base_path, args.config))
+
+ log_debug = args.log_dir + "snare.log"
+ log_err = args.log_dir + "snare.err"
+ logger.Logger.create_logger(log_debug, log_err, __package__)
if args.list_pages:
print('Available pages:\n')
@@ -511,6 +520,9 @@ async def check_tanner():
drop_privileges()
print('serving on {0} with uuid {1}'.format(srv.sockets[0].getsockname()[:2], snare_uuid.decode('utf-8')))
+ print("Debug logs will be stored in", log_debug)
+ print("Error logs will be stored in", log_err)
+ print("(Press CTRL+C to quit)")
try:
loop.run_forever()
except (KeyboardInterrupt, TypeError) as e:
From ae58eb7d6ee6fd403931bd274acb27c7a043649e Mon Sep 17 00:00:00 2001
From: Viswak Hanumanth
Date: Fri, 13 Apr 2018 00:27:20 +0530
Subject: [PATCH 09/13] Add new parameter css-validate to cloner (#120)
* Adding Cloner parameter for css validation
* minor change
* Suggested changes
---
clone.py | 24 ++++++++++++++++--------
1 file changed, 16 insertions(+), 8 deletions(-)
diff --git a/clone.py b/clone.py
index 121f6f43..c7b1e0fc 100644
--- a/clone.py
+++ b/clone.py
@@ -32,7 +32,7 @@
class Cloner(object):
- def __init__(self, root, max_depth):
+ def __init__(self, root, max_depth, css_validate):
self.visited_urls = []
self.root, self.error_page = self.add_scheme(root)
self.max_depth = max_depth
@@ -42,8 +42,8 @@ def __init__(self, root, max_depth):
self.target_path = '/opt/snare/pages/{}'.format(self.root.host)
if not os.path.exists(self.target_path):
- os.mkdir(self.target_path)
-
+ os.mkdir(self.target_path)
+ self.css_validate = css_validate
self.new_urls = Queue()
self.meta = {}
self.logger = logging.getLogger(__name__)
@@ -165,8 +165,8 @@ async def get_body(self, session):
data = str(soup).encode()
with open(os.path.join(self.target_path, hash_name), 'wb') as index_fh:
index_fh.write(data)
- if content_type == 'text/css':
- css = cssutils.parseString(data)
+ if content_type == 'text/css':
+ css = cssutils.parseString(data, validate=self.css_validate)
for carved_url in cssutils.getUrls(css):
if carved_url.startswith('data'):
continue
@@ -199,7 +199,14 @@ async def run(self):
with open(os.path.join(self.target_path, 'meta.json'), 'w') as mj:
json.dump(self.meta, mj)
await session.close()
-
+
+def str_to_bool(v):
+ if v.lower() == 'true':
+ return True
+ elif v.lower() == 'false':
+ return False
+ else:
+ raise argparse.ArgumentTypeError('Boolean value expected')
def main():
if os.getuid() != 0:
@@ -214,15 +221,16 @@ def main():
parser.add_argument("--target", help="domain of the site to be cloned", required=True)
parser.add_argument("--max-depth", help="max depth of the cloning", required=False, default=sys.maxsize)
parser.add_argument("--log_path", help="path to the error log file")
+ parser.add_argument("--css-validate", help="set whether css validation is required", type=str_to_bool, default=None)
args = parser.parse_args()
if args.log_path:
log_err = args.log_path + "clone.err"
else:
- log_err = "/opt/snare/clone.err"
+ log_err = "/opt/snare/clone.err"
logger.Logger.create_clone_logger(log_err, __package__)
print("Error logs will be stored in {}\n".format(log_err))
try:
- cloner = Cloner(args.target, int(args.max_depth))
+ cloner = Cloner(args.target, int(args.max_depth), args.css_validate)
loop.run_until_complete(cloner.get_root_host())
loop.run_until_complete(cloner.run())
except KeyboardInterrupt:
From 9875203b759c965367a55f8d01319b20b075efee Mon Sep 17 00:00:00 2001
From: Viswak Hanumanth
Date: Fri, 1 Jun 2018 16:18:25 +0530
Subject: [PATCH 10/13] Re-write cloner and move to setup structure (#141)
* Re-writing snare with latest web server
* Pep8 formatting
* Breaking server.py
* Break snare.py
* Remove duplicates
* Add port to server
* Breaking server.py
* Break snare.py
* Remove duplicates
* Improve project structure
* Suggested changes
* pep8 formatting
* Suggested changes
* rewrite and add clone to setup structure
* Formatting
* Formatting
* Pep8 formatting
---
{tests => bin}/__init__.py | 0
bin/clone | 65 +++
bin/snare | 209 +++++++
converter.py | 31 -
setup.py | 13 +
snare.py | 535 ------------------
snare/__init__.py | 0
clone.py => snare/cloner.py | 102 +---
snare/html_handler.py | 63 +++
snare/middlewares.py | 38 ++
snare/server.py | 93 +++
snare/tanner_handler.py | 126 +++++
snare/tests/__init__.py | 0
{tests => snare/tests}/test_add_meta_tag.py | 0
{tests => snare/tests}/test_converter.py | 0
{tests => snare/tests}/test_create_data.py | 0
{tests => snare/tests}/test_get_dorks.py | 0
{tests => snare/tests}/test_handle_error.py | 0
.../tests}/test_handle_html_content.py | 0
{tests => snare/tests}/test_handle_request.py | 0
.../tests}/test_parse_tanner_response.py | 0
{tests => snare/tests}/test_submit_data.py | 0
.../tests}/test_versions_manager.py | 0
snare/utils/__init__.py | 0
{utils => snare/utils}/asyncmock.py | 0
logger.py => snare/utils/logger.py | 22 +-
{utils => snare/utils}/page_path_generator.py | 0
snare/utils/snare_helpers.py | 105 ++++
versions_manager.py | 17 -
29 files changed, 742 insertions(+), 677 deletions(-)
rename {tests => bin}/__init__.py (100%)
create mode 100644 bin/clone
create mode 100644 bin/snare
delete mode 100644 converter.py
create mode 100644 setup.py
delete mode 100644 snare.py
create mode 100644 snare/__init__.py
rename clone.py => snare/cloner.py (66%)
create mode 100644 snare/html_handler.py
create mode 100644 snare/middlewares.py
create mode 100644 snare/server.py
create mode 100644 snare/tanner_handler.py
create mode 100644 snare/tests/__init__.py
rename {tests => snare/tests}/test_add_meta_tag.py (100%)
rename {tests => snare/tests}/test_converter.py (100%)
rename {tests => snare/tests}/test_create_data.py (100%)
rename {tests => snare/tests}/test_get_dorks.py (100%)
rename {tests => snare/tests}/test_handle_error.py (100%)
rename {tests => snare/tests}/test_handle_html_content.py (100%)
rename {tests => snare/tests}/test_handle_request.py (100%)
rename {tests => snare/tests}/test_parse_tanner_response.py (100%)
rename {tests => snare/tests}/test_submit_data.py (100%)
rename {tests => snare/tests}/test_versions_manager.py (100%)
create mode 100644 snare/utils/__init__.py
rename {utils => snare/utils}/asyncmock.py (100%)
rename logger.py => snare/utils/logger.py (81%)
rename {utils => snare/utils}/page_path_generator.py (100%)
create mode 100644 snare/utils/snare_helpers.py
delete mode 100644 versions_manager.py
diff --git a/tests/__init__.py b/bin/__init__.py
similarity index 100%
rename from tests/__init__.py
rename to bin/__init__.py
diff --git a/bin/clone b/bin/clone
new file mode 100644
index 00000000..a890bbb3
--- /dev/null
+++ b/bin/clone
@@ -0,0 +1,65 @@
+#!/usr/bin/env python3
+
+"""
+Copyright (C) 2015-2016 MushMush Foundation
+
+This program is free software: you can redistribute it and/or modify
+it under the terms of the GNU General Public License as published by
+the Free Software Foundation, either version 3 of the License, or
+(at your option) any later version.
+
+This program is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+GNU General Public License for more details.
+"""
+
+import argparse
+import asyncio
+import os
+import sys
+from snare.utils import logger
+from snare.cloner import Cloner
+from snare.utils.snare_helpers import str_to_bool
+
+def main():
+ if os.getuid() != 0:
+ print('Clone has to be run as root!')
+ sys.exit(1)
+ if not os.path.exists('/opt/snare'):
+ os.mkdir('/opt/snare')
+ if not os.path.exists('/opt/snare/pages'):
+ os.mkdir('/opt/snare/pages')
+ loop = asyncio.get_event_loop()
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--target", help="domain of the site to be cloned", required=True)
+ parser.add_argument("--max-depth", help="max depth of the cloning", required=False, default=sys.maxsize)
+ parser.add_argument("--log_path", help="path to the error log file")
+ parser.add_argument(
+ "--css-validate", help="set whether css validation is required", type=str_to_bool, default=None
+ )
+ args = parser.parse_args()
+ if args.log_path:
+ log_err = args.log_path + "clone.err"
+ else:
+ log_err = "/opt/snare/clone.err"
+ logger.Logger.create_clone_logger(log_err, __package__)
+ print("Error logs will be stored in {}\n".format(log_err))
+ try:
+ cloner = Cloner(args.target, int(args.max_depth), args.css_validate)
+ loop.run_until_complete(cloner.get_root_host())
+ loop.run_until_complete(cloner.run())
+ except KeyboardInterrupt:
+ pass
+
+
+if __name__ == '__main__':
+ print("""
+ ______ __ ______ _ ____________
+ / ____// / / __ // | / / ____/ __ \\
+ / / / / / / / // |/ / __/ / /_/ /
+ / /___ / /____ / /_/ // /| / /___/ _, _/
+/_____//______//_____//_/ |_/_____/_/ |_|
+
+ """)
+ main()
diff --git a/bin/snare b/bin/snare
new file mode 100644
index 00000000..058acbe7
--- /dev/null
+++ b/bin/snare
@@ -0,0 +1,209 @@
+#!/usr/bin/python3
+
+"""
+Copyright (C) 2015-2016 MushMush Foundation
+
+This program is free software: you can redistribute it and/or modify
+it under the terms of the GNU General Public License as published by
+the Free Software Foundation, either version 3 of the License, or
+(at your option) any later version.
+
+This program is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+GNU General Public License for more details.
+"""
+import argparse
+import asyncio
+import pwd
+import grp
+import configparser
+import json
+import multiprocessing
+import os
+import sys
+import time
+import uuid
+from concurrent.futures import ProcessPoolExecutor
+import aiohttp
+import git
+import pip
+import netifaces as ni
+from snare.server import HttpRequestHandler
+from snare.utils.logger import Logger
+from snare.utils import snare_helpers
+from snare.utils.snare_helpers import str_to_bool
+
+
+def create_initial_config():
+ cfg = configparser.ConfigParser()
+ cfg['WEB-TOOLS'] = dict(google='', bing='')
+ with open('/opt/snare/snare.cfg', 'w') as configfile:
+ cfg.write(configfile)
+
+
+def snare_setup():
+ if os.getuid() != 0:
+ print('Snare has to be started as root!')
+ sys.exit(1)
+ # Create folders
+ if not os.path.exists('/opt/snare'):
+ os.mkdir('/opt/snare')
+ if not os.path.exists('/opt/snare/pages'):
+ os.mkdir('/opt/snare/pages')
+ # Write pid to pid file
+ with open('/opt/snare/snare.pid', 'wb') as pid_fh:
+ pid_fh.write(str(os.getpid()).encode('utf-8'))
+ # Config file
+ if not os.path.exists('/opt/snare/snare.cfg'):
+ create_initial_config()
+ # Read or create the sensor id
+ uuid_file_path = '/opt/snare/snare.uuid'
+ if os.path.exists(uuid_file_path):
+ with open(uuid_file_path, 'rb') as uuid_fh:
+ snare_uuid = uuid_fh.read()
+ return snare_uuid
+ else:
+ with open(uuid_file_path, 'wb') as uuid_fh:
+ snare_uuid = str(uuid.uuid4()).encode('utf-8')
+ uuid_fh.write(snare_uuid)
+ return snare_uuid
+
+
+def drop_privileges():
+ uid_name = 'nobody'
+ wanted_user = pwd.getpwnam(uid_name)
+ gid_name = grp.getgrgid(wanted_user.pw_gid).gr_name
+ wanted_group = grp.getgrnam(gid_name)
+ os.setgid(wanted_group.gr_gid)
+ os.setuid(wanted_user.pw_uid)
+ new_user = pwd.getpwuid(os.getuid())
+ new_group = grp.getgrgid(os.getgid())
+ print('privileges dropped, running as "{}:{}"'.format(new_user.pw_name, new_group.gr_name))
+
+
+def compare_version_info(timeout):
+ while True:
+ repo = git.Repo(os.getcwd())
+ try:
+ rem = repo.remote()
+ res = rem.fetch()
+ diff_list = res[0].commit.diff(repo.heads.master)
+ except TimeoutError:
+ print('timeout fetching the repository version')
+ else:
+ if diff_list:
+ print('you are running an outdated version, SNARE will be updated and restarted')
+ repo.git.reset('--hard')
+ repo.heads.master.checkout()
+ repo.git.clean('-xdf')
+ repo.remotes.origin.pull()
+ pip.main(['install', '-r', 'requirements.txt'])
+ os.execv(sys.executable, [sys.executable, __file__] + sys.argv[1:])
+ return
+ else:
+ print('you are running the latest version')
+ time.sleep(timeout)
+
+
+async def check_tanner():
+ vm = snare_helpers.VersionManager()
+ async with aiohttp.ClientSession() as client:
+ req_url = 'http://{}:8090/version'.format(args.tanner)
+ try:
+ resp = await client.get(req_url)
+ result = await resp.json()
+ version = result["version"]
+ vm.check_compatibility(version)
+ except aiohttp.ClientOSError:
+ print("Can't connect to tanner host {}".format(req_url))
+ exit(1)
+ else:
+ await resp.release()
+
+if __name__ == '__main__':
+ print(r"""
+ _____ _ _____ ____ ______
+ / ___// | / / | / __ \/ ____/
+ \__ \/ |/ / /| | / /_/ / __/
+ ___/ / /| / ___ |/ _, _/ /___
+/____/_/ |_/_/ |_/_/ |_/_____/
+
+ """)
+ parser = argparse.ArgumentParser()
+ page_group = parser.add_mutually_exclusive_group(required=True)
+ page_group.add_argument("--page-dir", help="name of the folder to be served")
+ page_group.add_argument("--list-pages", help="list available pages", action='store_true')
+ parser.add_argument("--index-page", help="file name of the index page", default='index.html')
+ parser.add_argument("--port", help="port to listen on", default='8080')
+ parser.add_argument("--interface", help="interface to bind to")
+ parser.add_argument("--host-ip", help="host ip to bind to", default='localhost')
+ parser.add_argument("--debug", help="run web server in debug mode", default=False)
+ parser.add_argument("--tanner", help="ip of the tanner service", default='tanner.mushmush.org')
+ parser.add_argument("--skip-check-version", help="skip check for update", action='store_true')
+ parser.add_argument("--slurp-enabled", help="enable nsq logging", action='store_true')
+ parser.add_argument("--slurp-host", help="nsq logging host", default='slurp.mushmush.org')
+ parser.add_argument("--slurp-auth", help="nsq logging auth", default='slurp')
+ parser.add_argument("--config", help="snare config file", default='snare.cfg')
+ parser.add_argument("--auto-update", help="auto update SNARE if new version available ", default=True)
+ parser.add_argument("--update-timeout", help="update snare every timeout ", default='24H')
+ parser.add_argument("--server-header", help="set server-header", default='nignx/1.3.8')
+ parser.add_argument("--no-dorks", help="disable the use of dorks", type=str_to_bool, default=True)
+ parser.add_argument("--log-dir", help="path to directory of the log file", default='/opt/snare/')
+ args = parser.parse_args()
+ base_path = '/opt/snare/'
+ base_page_path = '/opt/snare/pages/'
+ snare_uuid = snare_setup()
+ config = configparser.ConfigParser()
+ config.read(os.path.join(base_path, args.config))
+ log_debug = args.log_dir + "snare.log"
+ log_err = args.log_dir + "snare.err"
+ Logger.create_logger(log_debug, log_err, __package__)
+ if args.list_pages:
+ print('Available pages:\n')
+ for page in os.listdir(base_page_path):
+ print('\t- {}'.format(page))
+ print('\nuse with --page-dir {page_name}\n\n')
+ exit()
+ full_page_path = os.path.join(base_page_path, args.page_dir)
+ if not os.path.exists(full_page_path):
+ print("--page-dir: {0} does not exist".format(args.page_dir))
+ exit()
+ args.index_page = os.path.join("/", args.index_page)
+
+ if not os.path.exists(os.path.join(full_page_path, 'meta.json')):
+ conv = snare_helpers.Converter()
+ conv.convert(full_page_path)
+ print("pages was converted. Try to clone again for the better result.")
+
+ with open(os.path.join(full_page_path, 'meta.json')) as meta:
+ meta_info = json.load(meta)
+ if not os.path.exists(os.path.join(base_page_path, args.page_dir,
+ os.path.join(meta_info[args.index_page]['hash']))):
+ print('can\'t create meta tag')
+ else:
+ snare_helpers.add_meta_tag(args.page_dir, meta_info[args.index_page]['hash'], config)
+ loop = asyncio.get_event_loop()
+ loop.run_until_complete(check_tanner())
+
+ pool = ProcessPoolExecutor(max_workers=multiprocessing.cpu_count())
+ compare_version_fut = None
+ if args.auto_update is True:
+ timeout = snare_helpers.parse_timeout(args.update_timeout)
+ compare_version_fut = loop.run_in_executor(pool, compare_version_info, timeout)
+
+ if args.host_ip == 'localhost' and args.interface:
+ args.host_ip = ni.ifaddresses(args.interface)[2][0]['addr']
+
+ app = HttpRequestHandler(meta_info, args, snare_uuid, debug=args.debug, keep_alive=75)
+ drop_privileges()
+ print('serving with uuid {0}'.format(snare_uuid.decode('utf-8')))
+ print("Debug logs will be stored in", log_debug)
+ print("Error logs will be stored in", log_err)
+ try:
+ app.start()
+ except (KeyboardInterrupt, TypeError) as e:
+ print(e)
+ finally:
+ if compare_version_fut:
+ compare_version_fut.cancel()
diff --git a/converter.py b/converter.py
deleted file mode 100644
index 19525549..00000000
--- a/converter.py
+++ /dev/null
@@ -1,31 +0,0 @@
-import os
-import hashlib
-from os import walk
-import mimetypes
-import json
-import shutil
-
-
-class Converter:
- def __init__(self):
- self.meta = {}
-
- def convert(self, path):
- files_to_convert = []
-
- for (dirpath, dirnames, filenames) in walk(path):
- for fn in filenames:
- files_to_convert.append(os.path.join(dirpath, fn))
-
- for fn in files_to_convert:
- path_len = len(path)
- file_name = fn[path_len:]
- m = hashlib.md5()
- m.update(fn.encode('utf-8'))
- hash_name = m.hexdigest()
- self.meta[file_name] = {'hash': hash_name, 'content_type': mimetypes.guess_type(file_name)[0]}
- shutil.copyfile(fn, os.path.join(path, hash_name))
- os.remove(fn)
-
- with open(os.path.join(path, 'meta.json'), 'w') as mj:
- json.dump(self.meta, mj)
diff --git a/setup.py b/setup.py
new file mode 100644
index 00000000..4f7b0f02
--- /dev/null
+++ b/setup.py
@@ -0,0 +1,13 @@
+#!/usr/bin/env python
+from setuptools import find_packages
+from distutils.core import setup
+
+setup(name='Snare',
+ version='0.2.0',
+ description='Super Next generation Advanced Reactive honEypot',
+ author='MushMush Foundation',
+ author_email='glastopf@public.honeynet.org',
+ url='https://github.com/mushorg/snare',
+ packages=find_packages(exclude=['*.pyc']),
+ scripts=['bin/snare', 'bin/clone'],
+ )
diff --git a/snare.py b/snare.py
deleted file mode 100644
index af3c9639..00000000
--- a/snare.py
+++ /dev/null
@@ -1,535 +0,0 @@
-#!/usr/bin/python3
-
-"""
-Copyright (C) 2015-2016 MushMush Foundation
-
-This program is free software: you can redistribute it and/or modify
-it under the terms of the GNU General Public License as published by
-the Free Software Foundation, either version 3 of the License, or
-(at your option) any later version.
-
-This program is distributed in the hope that it will be useful,
-but WITHOUT ANY WARRANTY; without even the implied warranty of
-MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-GNU General Public License for more details.
-"""
-import argparse
-import asyncio
-import configparser
-import grp
-import json
-import mimetypes
-import multiprocessing
-import os
-import pwd
-import sys
-import time
-import uuid
-from concurrent.futures import ProcessPoolExecutor
-from urllib.parse import urlparse, unquote, parse_qsl
-from versions_manager import VersionManager
-import aiohttp
-import git
-import pip
-from aiohttp import MultiDict
-import re
-import logging
-import logger
-
-try:
- from aiohttp.web import StaticResource as StaticRoute
-except ImportError:
- from aiohttp.web import StaticResource
-
-from bs4 import BeautifulSoup
-import cssutils
-import netifaces as ni
-from converter import Converter
-
-
-class HttpRequestHandler(aiohttp.server.ServerHttpProtocol):
- def __init__(self, meta, run_args, debug=False, keep_alive=75, **kwargs):
- self.dorks = []
-
- self.run_args = run_args
- self.dir = '/opt/snare/pages/{}'.format(run_args.page_dir)
-
- self.meta = meta
-
- self.logger = logging.getLogger(__name__)
-
- self.sroute = StaticRoute(
- name=None, prefix='/',
- directory=self.dir
- )
- super().__init__(debug=debug, keep_alive=keep_alive, access_log=None, **kwargs)
-
- async def get_dorks(self):
- dorks = None
- try:
- with aiohttp.Timeout(10.0):
- with aiohttp.ClientSession() as session:
- r = await session.get(
- 'http://{0}:8090/dorks'.format(self.run_args.tanner)
- )
- try:
- dorks = await r.json()
- except json.decoder.JSONDecodeError as e:
- self.logger.error('Error getting dorks: %s', e)
- finally:
- await r.release()
- except asyncio.TimeoutError:
- self.logger.info('Dorks timeout')
- return dorks['response']['dorks'] if dorks else []
-
- async def submit_slurp(self, data):
- try:
- with aiohttp.Timeout(10.0):
- with aiohttp.ClientSession(connector=aiohttp.TCPConnector(verify_ssl=False)) as session:
- r = await session.post(
- 'https://{0}:8080/api?auth={1}&chan=snare_test&msg={2}'.format(
- self.run_args.slurp_host, self.run_args.slurp_auth, data
- ), data=json.dumps(data)
- )
- assert r.status == 200
- r.close()
- except Exception as e:
- self.logger.error('Error submitting slurp: %s', e)
-
- def create_data(self, request, response_status):
- data = dict(
- method=None,
- path=None,
- headers=None,
- uuid=snare_uuid.decode('utf-8'),
- peer=None,
- status=response_status
- )
- if self.transport:
- peer = dict(
- ip=self.transport.get_extra_info('peername')[0],
- port=self.transport.get_extra_info('peername')[1]
- )
- data['peer'] = peer
- if request:
- header = {key: value for (key, value) in request.headers.items()}
- data['method'] = request.method
- data['headers'] = header
- data['path'] = request.path
- if ('Cookie' in header):
- data['cookies'] = {cookie.split('=')[0]: cookie.split('=')[1] for cookie in header['Cookie'].split(';')}
- return data
-
- async def submit_data(self, data):
- event_result = None
- try:
- with aiohttp.Timeout(10.0):
- with aiohttp.ClientSession() as session:
- r = await session.post(
- 'http://{0}:8090/event'.format(self.run_args.tanner), data=json.dumps(data)
- )
- try:
- event_result = await r.json()
- except json.decoder.JSONDecodeError as e:
- self.logger.error('Error submitting data: {} {}'.format(e, data))
- finally:
- await r.release()
- except Exception as e:
- raise e
- return event_result
-
- async def handle_html_content(self, content):
- soup = BeautifulSoup(content, 'html.parser')
- if self.run_args.no_dorks is not True:
- for p_elem in soup.find_all('p'):
- if p_elem.findChildren():
- continue
- css = None
- if 'style' in p_elem.attrs:
- css = cssutils.parseStyle(p_elem.attrs['style'])
- text_list = p_elem.text.split()
- p_new = soup.new_tag('p', style=css.cssText if css else None)
- for idx, word in enumerate(text_list):
- # Fetch dorks if required
- if len(self.dorks) <= 0:
- self.dorks = await self.get_dorks()
-
- word += ' '
- if idx % 5 == 0:
- try:
- a_tag = soup.new_tag(
- 'a',
- href=self.dorks.pop(),
- style='color:{color};text-decoration:none;cursor:text;'.format(
- color=css.color if css and 'color' in css.keys() else '#000000'
- )
- )
- except IndexError:
- continue
- a_tag.string = word
- p_new.append(a_tag)
- else:
- p_new.append(soup.new_string(word))
- p_elem.replace_with(p_new)
- content = soup.encode('utf-8')
- return content
-
- async def handle_request(self, request, payload):
- self.logger.info('Request path: {0}'.format(request.path))
- data = self.create_data(request, 200)
- if request.method == 'POST':
- post_data = await payload.read()
- post_data = MultiDict(parse_qsl(post_data.decode('utf-8')))
- self.logger.info('POST data:')
- for key, val in post_data.items():
- self.logger.info('\t- {0}: {1}'.format(key, val))
- data['post_data'] = dict(post_data)
-
- # Submit the event to the TANNER service
- event_result = await self.submit_data(data)
-
- # Log the event to slurp service if enabled
- if self.run_args.slurp_enabled:
- await self.submit_slurp(request.path)
-
- content, content_type, headers, status_code = await self.parse_tanner_response(
- request.path, event_result['response']['message']['detection'])
- response = aiohttp.Response(
- self.writer, status=status_code, http_version=request.version
- )
- for name, val in headers.items():
- response.add_header(name, val)
-
- response.add_header('Server', self.run_args.server_header)
-
- if 'cookies' in data and 'sess_uuid' in data['cookies']:
- previous_sess_uuid = data['cookies']['sess_uuid']
- else:
- previous_sess_uuid = None
-
- if event_result is not None and ('sess_uuid' in event_result['response']['message']):
- cur_sess_id = event_result['response']['message']['sess_uuid']
- if previous_sess_uuid is None or not previous_sess_uuid.strip() or previous_sess_uuid != cur_sess_id:
- response.add_header('Set-Cookie', 'sess_uuid=' + cur_sess_id)
-
- if not content_type:
- response.add_header('Content-Type', 'text/plain')
- else:
- response.add_header('Content-Type', content_type)
- if content:
- response.add_header('Content-Length', str(len(content)))
- response.send_headers()
- if content:
- response.write(content)
- await response.write_eof()
-
- async def parse_tanner_response(self, requested_name, detection):
- content_type = None
- content = None
- status_code = 200
- headers = {}
- p = re.compile('/+') # Creating a regex object for the pattern of multiple contiguous forward slashes
- requested_name = p.sub('/', requested_name) # Substituting all occurrences of the pattern with single forward slash
-
- if detection['type'] == 1:
- query_start = requested_name.find('?')
- if query_start != -1:
- requested_name = requested_name[:query_start]
-
- if requested_name == '/':
- requested_name = self.run_args.index_page
- try:
- if requested_name[-1] == '/':
- requested_name = requested_name[:-1]
- requested_name = unquote(requested_name)
- file_name = self.meta[requested_name]['hash']
- content_type = self.meta[requested_name]['content_type']
- except KeyError:
- status_code = 404
- requested_name = '/status_404'
- file_name = self.meta[requested_name]['hash']
- content_type = 'text/html'
- path = os.path.join(self.dir, file_name)
- with open(path, 'rb') as fh:
- content = fh.read()
- content = await self.handle_html_content(content)
-
- else:
- path = os.path.join(self.dir, file_name)
- if os.path.isfile(path):
- with open(path, 'rb') as fh:
- content = fh.read()
- if content_type:
- if 'text/html' in content_type:
- content = await self.handle_html_content(content)
-
- elif detection['type'] == 2:
- payload_content = detection['payload']
- if payload_content['page']:
- try:
- file_name = self.meta[payload_content['page']]['hash']
- content_type = self.meta[payload_content['page']]['content_type']
- page_path = os.path.join(self.dir, file_name)
- with open(page_path, encoding='utf-8') as p:
- content = p.read()
- except KeyError:
- content = ''
- content_type = r'text\html'
-
- soup = BeautifulSoup(content, 'html.parser')
- script_tag = soup.new_tag('div')
- script_tag.append(BeautifulSoup(payload_content['value'], 'html.parser'))
- soup.body.append(script_tag)
- content = str(soup).encode()
- else:
- content_type = mimetypes.guess_type(payload_content['value'])[0]
- content = payload_content['value'].encode('utf-8')
-
- if 'headers' in payload_content:
- headers = payload_content['headers']
- else:
- payload_content = detection['payload']
- status_code = payload_content['status_code']
-
- return (content, content_type, headers, status_code)
-
- async def handle_error(self, status=500, message=None,
- payload=None, exc=None, headers=None, reason=None):
-
- data = self.create_data(message, status)
- data['error'] = exc
- await self.submit_data(data)
- super().handle_error(status, message, payload, exc, headers, reason)
-
-
-def create_initial_config():
- cfg = configparser.ConfigParser()
- cfg['WEB-TOOLS'] = dict(google='', bing='')
- with open('/opt/snare/snare.cfg', 'w') as configfile:
- cfg.write(configfile)
-
-
-def snare_setup():
- if os.getuid() != 0:
- print('Snare has to be started as root!')
- sys.exit(1)
- # Create folders
- if not os.path.exists('/opt/snare'):
- os.mkdir('/opt/snare')
- if not os.path.exists('/opt/snare/pages'):
- os.mkdir('/opt/snare/pages')
- # Write pid to pid file
- with open('/opt/snare/snare.pid', 'wb') as pid_fh:
- pid_fh.write(str(os.getpid()).encode('utf-8'))
- # Config file
- if not os.path.exists('/opt/snare/snare.cfg'):
- create_initial_config()
- # Read or create the sensor id
- uuid_file_path = '/opt/snare/snare.uuid'
- if os.path.exists(uuid_file_path):
- with open(uuid_file_path, 'rb') as uuid_fh:
- snare_uuid = uuid_fh.read()
- return snare_uuid
- else:
- with open(uuid_file_path, 'wb') as uuid_fh:
- snare_uuid = str(uuid.uuid4()).encode('utf-8')
- uuid_fh.write(snare_uuid)
- return snare_uuid
-
-
-def drop_privileges():
- uid_name = 'nobody'
- wanted_user = pwd.getpwnam(uid_name)
- gid_name = grp.getgrgid(wanted_user.pw_gid).gr_name
- wanted_group = grp.getgrnam(gid_name)
- os.setgid(wanted_group.gr_gid)
- os.setuid(wanted_user.pw_uid)
- new_user = pwd.getpwuid(os.getuid())
- new_group = grp.getgrgid(os.getgid())
- print('privileges dropped, running as "{}:{}"'.format(new_user.pw_name, new_group.gr_name))
-
-
-def add_meta_tag(page_dir, index_page):
- google_content = config['WEB-TOOLS']['google']
- bing_content = config['WEB-TOOLS']['bing']
-
- if not google_content and not bing_content:
- return
-
- main_page_path = os.path.join('/opt/snare/pages/', page_dir, index_page)
- with open(main_page_path) as main:
- main_page = main.read()
- soup = BeautifulSoup(main_page, 'html.parser')
-
- if (google_content and soup.find("meta", attrs={"name": "google-site-verification"}) is None):
- google_meta = soup.new_tag('meta')
- google_meta.attrs['name'] = 'google-site-verification'
- google_meta.attrs['content'] = google_content
- soup.head.append(google_meta)
- if (bing_content and soup.find("meta", attrs={"name": "msvalidate.01"}) is None):
- bing_meta = soup.new_tag('meta')
- bing_meta.attrs['name'] = 'msvalidate.01'
- bing_meta.attrs['content'] = bing_content
- soup.head.append(bing_meta)
-
- html = soup.prettify("utf-8")
- with open(main_page_path, "wb") as file:
- file.write(html)
-
-
-def compare_version_info(timeout):
- while True:
- repo = git.Repo(os.getcwd())
- try:
- rem = repo.remote()
- res = rem.fetch()
- diff_list = res[0].commit.diff(repo.heads.master)
- except TimeoutError:
- print('timeout fetching the repository version')
- else:
- if diff_list:
- print('you are running an outdated version, SNARE will be updated and restarted')
- repo.git.reset('--hard')
- repo.heads.master.checkout()
- repo.git.clean('-xdf')
- repo.remotes.origin.pull()
- pip.main(['install', '-r', 'requirements.txt'])
- os.execv(sys.executable, [sys.executable, __file__] + sys.argv[1:])
- return
- else:
- print('you are running the latest version')
- time.sleep(timeout)
-
-
-def parse_timeout(timeout):
- result = None
- timeouts_coeff = {
- 'M': 60,
- 'H': 3600,
- 'D': 86400
- }
-
- form = timeout[-1]
- if form not in timeouts_coeff.keys():
- print('Bad timeout format, default will be used')
- parse_timeout('24H')
- else:
- result = int(timeout[:-1])
- result *= timeouts_coeff[form]
- return result
-
-
-async def check_tanner():
- vm = VersionManager()
- with aiohttp.ClientSession() as client:
- req_url = 'http://{}:8090/version'.format(args.tanner)
- try:
- resp = await client.get(req_url)
- result = await resp.json()
- version = result["version"]
- vm.check_compatibility(version)
- except aiohttp.errors.ClientOSError:
- print("Can't connect to tanner host {}".format(req_url))
- exit(1)
- else:
- await resp.release()
-
-
-if __name__ == '__main__':
- print(r"""
- _____ _ _____ ____ ______
- / ___// | / / | / __ \/ ____/
- \__ \/ |/ / /| | / /_/ / __/
- ___/ / /| / ___ |/ _, _/ /___
-/____/_/ |_/_/ |_/_/ |_/_____/
-
- """)
- snare_uuid = snare_setup()
- parser = argparse.ArgumentParser()
- page_group = parser.add_mutually_exclusive_group(required=True)
- page_group.add_argument("--page-dir", help="name of the folder to be served")
- page_group.add_argument("--list-pages", help="list available pages", action='store_true')
- parser.add_argument("--index-page", help="file name of the index page", default='index.html')
- parser.add_argument("--port", help="port to listen on", default='8080')
- parser.add_argument("--interface", help="interface to bind to")
- parser.add_argument("--host-ip", help="host ip to bind to", default='localhost')
- parser.add_argument("--debug", help="run web server in debug mode", default=False)
- parser.add_argument("--tanner", help="ip of the tanner service", default='tanner.mushmush.org')
- parser.add_argument("--skip-check-version", help="skip check for update", action='store_true')
- parser.add_argument("--slurp-enabled", help="enable nsq logging", action='store_true')
- parser.add_argument("--slurp-host", help="nsq logging host", default='slurp.mushmush.org')
- parser.add_argument("--slurp-auth", help="nsq logging auth", default='slurp')
- parser.add_argument("--config", help="snare config file", default='snare.cfg')
- parser.add_argument("--auto-update", help="auto update SNARE if new version available ", default=True)
- parser.add_argument("--update-timeout", help="update snare every timeout ", default='24H')
- parser.add_argument("--server-header", help="set server-header", default='nignx/1.3.8')
- parser.add_argument("--no-dorks", help="disable the use of dorks", action='store_true')
- parser.add_argument("--log-dir", help="path to directory of the log file", default='/opt/snare/')
-
- args = parser.parse_args()
- base_path = '/opt/snare/'
- base_page_path = '/opt/snare/pages/'
- config = configparser.ConfigParser()
- config.read(os.path.join(base_path, args.config))
-
- log_debug = args.log_dir + "snare.log"
- log_err = args.log_dir + "snare.err"
- logger.Logger.create_logger(log_debug, log_err, __package__)
-
- if args.list_pages:
- print('Available pages:\n')
- for page in os.listdir(base_page_path):
- print('\t- {}'.format(page))
- print('\nuse with --page-dir {page_name}\n\n')
- exit()
- full_page_path = os.path.join(base_page_path, args.page_dir)
- if not os.path.exists(full_page_path):
- print("--page-dir: {0} does not exist".format(args.page_dir))
- exit()
- args.index_page = os.path.join("/", args.index_page)
-
- if not os.path.exists(os.path.join(full_page_path, 'meta.json')):
- conv = Converter()
- conv.convert(full_page_path)
- print("pages was converted. Try to clone again for the better result.")
-
- with open(os.path.join(full_page_path, 'meta.json')) as meta:
- meta_info = json.load(meta)
- if not os.path.exists(os.path.join(base_page_path, args.page_dir,
- os.path.join(meta_info[args.index_page]['hash']))):
- print('can\'t create meta tag')
- else:
- add_meta_tag(args.page_dir, meta_info[args.index_page]['hash'])
- loop = asyncio.get_event_loop()
- loop.run_until_complete(check_tanner())
-
- pool = ProcessPoolExecutor(max_workers=multiprocessing.cpu_count())
- compare_version_fut = None
- if args.auto_update is True:
- timeout = parse_timeout(args.update_timeout)
- compare_version_fut = loop.run_in_executor(pool, compare_version_info, timeout)
-
- if args.host_ip == 'localhost' and args.interface:
- host_ip = ni.ifaddresses(args.interface)[2][0]['addr']
- else:
- host_ip = args.host_ip
- future = loop.create_server(
- lambda: HttpRequestHandler(meta_info, args, debug=args.debug, keep_alive=75),
- args.host_ip, int(args.port))
- srv = loop.run_until_complete(future)
-
- drop_privileges()
- print('serving on {0} with uuid {1}'.format(srv.sockets[0].getsockname()[:2], snare_uuid.decode('utf-8')))
- print("Debug logs will be stored in", log_debug)
- print("Error logs will be stored in", log_err)
- print("(Press CTRL+C to quit)")
- try:
- loop.run_forever()
- except (KeyboardInterrupt, TypeError) as e:
- print(e)
- finally:
- if compare_version_fut:
- compare_version_fut.cancel()
- srv.close()
- loop.run_until_complete(srv.wait_closed())
- loop.close()
diff --git a/snare/__init__.py b/snare/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/clone.py b/snare/cloner.py
similarity index 66%
rename from clone.py
rename to snare/cloner.py
index c7b1e0fc..b1632c0b 100644
--- a/clone.py
+++ b/snare/cloner.py
@@ -1,40 +1,21 @@
-#!/usr/bin/env python3
-
-"""
-Copyright (C) 2015-2016 MushMush Foundation
-
-This program is free software: you can redistribute it and/or modify
-it under the terms of the GNU General Public License as published by
-the Free Software Foundation, either version 3 of the License, or
-(at your option) any later version.
-
-This program is distributed in the hope that it will be useful,
-but WITHOUT ANY WARRANTY; without even the implied warranty of
-MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-GNU General Public License for more details.
-"""
-
-import argparse
+import os
+import sys
+import logging
import asyncio
+from asyncio import Queue
import hashlib
import json
-import os
import re
-import sys
-from asyncio import Queue
-
import aiohttp
import cssutils
import yarl
from bs4 import BeautifulSoup
-import logger
-import logging
class Cloner(object):
def __init__(self, root, max_depth, css_validate):
self.visited_urls = []
- self.root, self.error_page = self.add_scheme(root)
+ self.root, self.error_page = self.add_scheme(root)
self.max_depth = max_depth
self.moved_root = None
if len(self.root.host) < 4:
@@ -42,7 +23,7 @@ def __init__(self, root, max_depth, css_validate):
self.target_path = '/opt/snare/pages/{}'.format(self.root.host)
if not os.path.exists(self.target_path):
- os.mkdir(self.target_path)
+ os.mkdir(self.target_path)
self.css_validate = css_validate
self.new_urls = Queue()
self.meta = {}
@@ -148,11 +129,10 @@ async def get_body(self, session):
data = None
content_type = None
try:
- with aiohttp.Timeout(10.0):
- response = await session.get(current_url, headers={'Accept': 'text/html'})
- content_type = response.content_type
- data = await response.read()
-
+ response = await session.get(current_url, headers={'Accept': 'text/html'}, timeout=10.0)
+ content_type = response.content_type
+ data = await response.read()
+
except (aiohttp.ClientError, asyncio.TimeoutError) as client_error:
self.logger.error(client_error)
else:
@@ -165,8 +145,8 @@ async def get_body(self, session):
data = str(soup).encode()
with open(os.path.join(self.target_path, hash_name), 'wb') as index_fh:
index_fh.write(data)
- if content_type == 'text/css':
- css = cssutils.parseString(data, validate=self.css_validate)
+ if content_type == 'text/css':
+ css = cssutils.parseString(data, validate=self.css_validate)
for carved_url in cssutils.getUrls(css):
if carved_url.startswith('data'):
continue
@@ -178,20 +158,20 @@ async def get_body(self, session):
async def get_root_host(self):
try:
- with aiohttp.ClientSession() as session:
+ async with aiohttp.ClientSession() as session:
resp = await session.get(self.root)
- if resp._url_obj.host != self.root.host:
+ if resp.host != self.root.host:
self.moved_root = resp._url_obj
resp.close()
- except aiohttp.errors.ClientError as err:
- self.logger.error("Can\'t connect to target host.")
+ except aiohttp.ClientError as err:
+ self.logger.error("Can\'t connect to target host: %s", err)
exit(-1)
async def run(self):
session = aiohttp.ClientSession()
try:
await self.new_urls.put((self.root, 0))
- await self.new_urls.put((self.error_page,0))
+ await self.new_urls.put((self.error_page, 0))
await self.get_body(session)
except KeyboardInterrupt:
raise
@@ -199,51 +179,3 @@ async def run(self):
with open(os.path.join(self.target_path, 'meta.json'), 'w') as mj:
json.dump(self.meta, mj)
await session.close()
-
-def str_to_bool(v):
- if v.lower() == 'true':
- return True
- elif v.lower() == 'false':
- return False
- else:
- raise argparse.ArgumentTypeError('Boolean value expected')
-
-def main():
- if os.getuid() != 0:
- print('Clone has to be run as root!')
- sys.exit(1)
- if not os.path.exists('/opt/snare'):
- os.mkdir('/opt/snare')
- if not os.path.exists('/opt/snare/pages'):
- os.mkdir('/opt/snare/pages')
- loop = asyncio.get_event_loop()
- parser = argparse.ArgumentParser()
- parser.add_argument("--target", help="domain of the site to be cloned", required=True)
- parser.add_argument("--max-depth", help="max depth of the cloning", required=False, default=sys.maxsize)
- parser.add_argument("--log_path", help="path to the error log file")
- parser.add_argument("--css-validate", help="set whether css validation is required", type=str_to_bool, default=None)
- args = parser.parse_args()
- if args.log_path:
- log_err = args.log_path + "clone.err"
- else:
- log_err = "/opt/snare/clone.err"
- logger.Logger.create_clone_logger(log_err, __package__)
- print("Error logs will be stored in {}\n".format(log_err))
- try:
- cloner = Cloner(args.target, int(args.max_depth), args.css_validate)
- loop.run_until_complete(cloner.get_root_host())
- loop.run_until_complete(cloner.run())
- except KeyboardInterrupt:
- pass
-
-
-if __name__ == '__main__':
- print("""
- ______ __ ______ _ ____________
- / ____// / / __ // | / / ____/ __ \\
- / / / / / / / // |/ / __/ / /_/ /
- / /___ / /____ / /_/ // /| / /___/ _, _/
-/_____//______//_____//_/ |_/_____/_/ |_|
-
- """)
- main()
diff --git a/snare/html_handler.py b/snare/html_handler.py
new file mode 100644
index 00000000..73f2c443
--- /dev/null
+++ b/snare/html_handler.py
@@ -0,0 +1,63 @@
+import asyncio
+import json
+import logging
+from bs4 import BeautifulSoup
+import cssutils
+import aiohttp
+
+
+class HtmlHandler():
+ def __init__(self, no_dorks, tanner):
+ self.no_dorks = no_dorks
+ self.dorks = []
+ self.logger = logging.getLogger(__name__)
+ self.tanner = tanner
+
+ async def get_dorks(self):
+ dorks = None
+ try:
+ async with aiohttp.ClientSession() as session:
+ r = await session.get(
+ 'http://{0}:8090/dorks'.format(self.tanner), timeout=10.0
+ )
+ try:
+ dorks = await r.json()
+ except json.decoder.JSONDecodeError as e:
+ self.logger.error('Error getting dorks: %s', e)
+ finally:
+ await r.release()
+ except asyncio.TimeoutError:
+ self.logger.info('Dorks timeout')
+ return dorks['response']['dorks'] if dorks else []
+
+ async def handle_content(self, content):
+ soup = BeautifulSoup(content, 'html.parser')
+ if self.no_dorks is not True:
+ for p_elem in soup.find_all('p'):
+ if p_elem.findChildren():
+ continue
+ css = None
+ if 'style' in p_elem.attrs:
+ css = cssutils.parseStyle(p_elem.attrs['style'])
+ text_list = p_elem.text.split()
+ p_new = soup.new_tag('p', style=css.cssText if css else None)
+ for idx, word in enumerate(text_list):
+ # Fetch dorks if required
+ if len(self.dorks) <= 0:
+ self.dorks = await self.get_dorks()
+ word += ' '
+ if idx % 5 == 0:
+ a_tag = soup.new_tag(
+ 'a',
+ href=self.dorks.pop(),
+ style='color:{color};text-decoration:none;cursor:text;'.format(
+ color=css.color if css and 'color' in css.keys() else '#000000'
+ )
+ )
+ a_tag.string = word
+ p_new.append(a_tag)
+ else:
+ p_new.append(soup.new_string(word))
+ p_elem.replace_with(p_new)
+ content = soup.encode('utf-8')
+ return content
diff --git a/snare/middlewares.py b/snare/middlewares.py
new file mode 100644
index 00000000..a8875e3c
--- /dev/null
+++ b/snare/middlewares.py
@@ -0,0 +1,38 @@
+import aiohttp_jinja2
+from aiohttp import web
+
+
+class SnareMiddleware():
+
+ def __init__(self, file_name):
+ self.error_404 = file_name
+
+ async def handle_404(self, request):
+ return aiohttp_jinja2.render_template(self.error_404, request, {})
+
+ async def handle_500(self, request):
+ return aiohttp_jinja2.render_template('500.html', request, {})
+
+ def create_error_middleware(self, overrides):
+
+ @web.middleware
+ async def error_middleware(request, handler):
+ try:
+ response = await handler(request)
+ override = overrides.get(response.status)
+ if override:
+ return await override(request)
+ return response
+ except web.HTTPException as ex:
+ override = overrides.get(ex.status)
+ if override:
+ return await override(request)
+ raise
+ return error_middleware
+
+ def setup_middlewares(self, app):
+ error_middleware = self.create_error_middleware({
+ 404: self.handle_404,
+ 500: self.handle_500
+ })
+ app.middlewares.append(error_middleware)
diff --git a/snare/server.py b/snare/server.py
new file mode 100644
index 00000000..d9cf1cab
--- /dev/null
+++ b/snare/server.py
@@ -0,0 +1,93 @@
+import logging
+import json
+import aiohttp
+from aiohttp import web
+from aiohttp.web import StaticResource as StaticRoute
+import multidict
+import aiohttp_jinja2
+import jinja2
+from snare.middlewares import SnareMiddleware
+from snare.tanner_handler import TannerHandler
+
+
+class HttpRequestHandler():
+ def __init__(self, meta, run_args, snare_uuid, debug=False, keep_alive=75, **kwargs):
+ self.run_args = run_args
+ self.dir = '/opt/snare/pages/{}'.format(run_args.page_dir)
+ self.meta = meta
+ self.snare_uuid = snare_uuid
+ self.logger = logging.getLogger(__name__)
+ self.sroute = StaticRoute(
+ name=None, prefix='/',
+ directory=self.dir
+ )
+ self.tanner_handler = TannerHandler(run_args, meta, snare_uuid)
+
+ async def submit_slurp(self, data):
+ try:
+ async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(verify_ssl=False)) as session:
+ r = await session.post(
+ 'https://{0}:8080/api?auth={1}&chan=snare_test&msg={2}'.format(
+ self.run_args.slurp_host, self.run_args.slurp_auth, data
+ ), data=json.dumps(data), timeout=10.0
+ )
+ assert r.status == 200
+ r.close()
+ except Exception as e:
+ self.logger.error('Error submitting slurp: %s', e)
+
+ async def handle_request(self, request):
+ self.logger.info('Request path: {0}'.format(request.path))
+ data = self.tanner_handler.create_data(request, 200)
+ if request.method == 'POST':
+ post_data = await request.post()
+ self.logger.info('POST data:')
+ for key, val in post_data.items():
+ self.logger.info('\t- {0}: {1}'.format(key, val))
+ data['post_data'] = dict(post_data)
+
+ # Submit the event to the TANNER service
+ event_result = await self.tanner_handler.submit_data(data)
+
+ # Log the event to slurp service if enabled
+ if self.run_args.slurp_enabled:
+ await self.submit_slurp(request.path)
+
+ content, content_type, headers, status_code = await self.tanner_handler.parse_tanner_response(
+ request.path, event_result['response']['message']['detection'])
+
+ response_headers = multidict.CIMultiDict()
+
+ for name, val in headers.items():
+ response_headers.add(name, val)
+
+ response_headers.add('Server', self.run_args.server_header)
+
+ if 'cookies' in data and 'sess_uuid' in data['cookies']:
+ previous_sess_uuid = data['cookies']['sess_uuid']
+ else:
+ previous_sess_uuid = None
+
+ if event_result is not None and ('sess_uuid' in event_result['response']['message']):
+ cur_sess_id = event_result['response']['message']['sess_uuid']
+ if previous_sess_uuid is None or not previous_sess_uuid.strip() or previous_sess_uuid != cur_sess_id:
+ response_headers.add('Set-Cookie', 'sess_uuid=' + cur_sess_id)
+
+ if not content_type:
+ response_content_type = 'text/plain'
+ else:
+ response_content_type = content_type
+ response = web.Response(
+ body=content, status=status_code, headers=response_headers, content_type=response_content_type
+ )
+ return response
+
+ def start(self):
+ app = web.Application()
+ app.add_routes([web.route('*', '/{tail:.*}', self.handle_request)])
+ aiohttp_jinja2.setup(
+ app, loader=jinja2.FileSystemLoader(self.dir)
+ )
+ middleware = SnareMiddleware(self.meta['/status_404']['hash'])
+ middleware.setup_middlewares(app)
+ web.run_app(app, host=self.run_args.host_ip, port=self.run_args.port)
diff --git a/snare/tanner_handler.py b/snare/tanner_handler.py
new file mode 100644
index 00000000..a6f8298f
--- /dev/null
+++ b/snare/tanner_handler.py
@@ -0,0 +1,126 @@
+import re
+import os
+from urllib.parse import unquote
+import mimetypes
+import json
+import logging
+import aiohttp
+from bs4 import BeautifulSoup
+from snare.html_handler import HtmlHandler
+
+
+class TannerHandler():
+ def __init__(self, run_args, meta, snare_uuid):
+ self.run_args = run_args
+ self.meta = meta
+ self.dir = '/opt/snare/pages/{}'.format(run_args.page_dir)
+ self.snare_uuid = snare_uuid
+ self.html_handler = HtmlHandler(run_args.no_dorks, run_args.tanner)
+ self.logger = logging.getLogger(__name__)
+
+ def create_data(self, request, response_status):
+ data = dict(
+ method=None,
+ path=None,
+ headers=None,
+ uuid=self.snare_uuid.decode('utf-8'),
+ peer=None,
+ status=response_status
+ )
+ if request.transport:
+ peer = dict(
+ ip=request.transport.get_extra_info('peername')[0],
+ port=request.transport.get_extra_info('peername')[1]
+ )
+ data['peer'] = peer
+ if request.path:
+ header = {key: value for (key, value) in request.headers.items()}
+ data['method'] = request.method
+ data['headers'] = header
+ data['path'] = request.path
+ if ('Cookie' in header):
+ data['cookies'] = {
+ cookie.split('=')[0]: cookie.split('=')[1] for cookie in header['Cookie'].split(';')
+ }
+ return data
+
+ async def submit_data(self, data):
+ event_result = None
+ try:
+ async with aiohttp.ClientSession() as session:
+ r = await session.post(
+ 'http://{0}:8090/event'.format(self.run_args.tanner), data=json.dumps(data),
+ timeout=10.0
+ )
+ try:
+ event_result = await r.json()
+ except json.decoder.JSONDecodeError as e:
+ self.logger.error('Error submitting data: {} {}'.format(e, data))
+ finally:
+ await r.release()
+ except Exception as e:
+ raise e
+ return event_result
+
+ async def parse_tanner_response(self, requested_name, detection):
+ content_type = None
+ content = None
+ status_code = 200
+ headers = {}
+ # Creating a regex object for the pattern of multiple contiguous forward slashes
+ p = re.compile('/+')
+ # Substituting all occurrences of the pattern with single forward slash
+ requested_name = p.sub('/', requested_name)
+
+ if detection['type'] == 1:
+ query_start = requested_name.find('?')
+ if query_start != -1:
+ requested_name = requested_name[:query_start]
+ if requested_name == '/':
+ requested_name = self.run_args.index_page
+ try:
+ if requested_name[-1] == '/':
+ requested_name = requested_name[:-1]
+ requested_name = unquote(requested_name)
+ file_name = self.meta[requested_name]['hash']
+ content_type = self.meta[requested_name]['content_type']
+ except KeyError:
+ status_code = 404
+ else:
+ path = os.path.join(self.dir, file_name)
+ if os.path.isfile(path):
+ with open(path, 'rb') as fh:
+ content = fh.read()
+ if content_type:
+ if 'text/html' in content_type:
+ content = await self.html_handler.handle_content(content)
+
+ elif detection['type'] == 2:
+ payload_content = detection['payload']
+ if payload_content['page']:
+ try:
+ file_name = self.meta[payload_content['page']]['hash']
+ content_type = self.meta[payload_content['page']]['content_type']
+ page_path = os.path.join(self.dir, file_name)
+ with open(page_path, encoding='utf-8') as p:
+ content = p.read()
+ except KeyError:
+ content = ''
+ content_type = r'text\html'
+
+ soup = BeautifulSoup(content, 'html.parser')
+ script_tag = soup.new_tag('div')
+ script_tag.append(BeautifulSoup(payload_content['value'], 'html.parser'))
+ soup.body.append(script_tag)
+ content = str(soup).encode()
+ else:
+ content_type = mimetypes.guess_type(payload_content['value'])[0]
+ content = payload_content['value'].encode('utf-8')
+
+ if 'headers' in payload_content:
+ headers = payload_content['headers']
+ else:
+ payload_content = detection['payload']
+ status_code = payload_content['status_code']
+
+ return (content, content_type, headers, status_code)
diff --git a/snare/tests/__init__.py b/snare/tests/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/tests/test_add_meta_tag.py b/snare/tests/test_add_meta_tag.py
similarity index 100%
rename from tests/test_add_meta_tag.py
rename to snare/tests/test_add_meta_tag.py
diff --git a/tests/test_converter.py b/snare/tests/test_converter.py
similarity index 100%
rename from tests/test_converter.py
rename to snare/tests/test_converter.py
diff --git a/tests/test_create_data.py b/snare/tests/test_create_data.py
similarity index 100%
rename from tests/test_create_data.py
rename to snare/tests/test_create_data.py
diff --git a/tests/test_get_dorks.py b/snare/tests/test_get_dorks.py
similarity index 100%
rename from tests/test_get_dorks.py
rename to snare/tests/test_get_dorks.py
diff --git a/tests/test_handle_error.py b/snare/tests/test_handle_error.py
similarity index 100%
rename from tests/test_handle_error.py
rename to snare/tests/test_handle_error.py
diff --git a/tests/test_handle_html_content.py b/snare/tests/test_handle_html_content.py
similarity index 100%
rename from tests/test_handle_html_content.py
rename to snare/tests/test_handle_html_content.py
diff --git a/tests/test_handle_request.py b/snare/tests/test_handle_request.py
similarity index 100%
rename from tests/test_handle_request.py
rename to snare/tests/test_handle_request.py
diff --git a/tests/test_parse_tanner_response.py b/snare/tests/test_parse_tanner_response.py
similarity index 100%
rename from tests/test_parse_tanner_response.py
rename to snare/tests/test_parse_tanner_response.py
diff --git a/tests/test_submit_data.py b/snare/tests/test_submit_data.py
similarity index 100%
rename from tests/test_submit_data.py
rename to snare/tests/test_submit_data.py
diff --git a/tests/test_versions_manager.py b/snare/tests/test_versions_manager.py
similarity index 100%
rename from tests/test_versions_manager.py
rename to snare/tests/test_versions_manager.py
diff --git a/snare/utils/__init__.py b/snare/utils/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/utils/asyncmock.py b/snare/utils/asyncmock.py
similarity index 100%
rename from utils/asyncmock.py
rename to snare/utils/asyncmock.py
diff --git a/logger.py b/snare/utils/logger.py
similarity index 81%
rename from logger.py
rename to snare/utils/logger.py
index e0b146e1..3778f988 100644
--- a/logger.py
+++ b/snare/utils/logger.py
@@ -1,6 +1,7 @@
import logging
import logging.handlers
+
class LevelFilter(logging.Filter):
"""Filters (lets through) all messages with level < LEVEL"""
@@ -8,24 +9,27 @@ def __init__(self, level):
self.level = level
def filter(self, record):
- return record.levelno < self.level # "<" instead of "<=": since logger.setLevel is inclusive, this should be exclusive
+ return record.levelno < self.level
+ # "<" instead of "<=": since logger.setLevel is inclusive, this should be exclusive
+
class Logger:
-
+
@staticmethod
def create_logger(debug_filename, err_filename, logger_name):
logger = logging.getLogger(logger_name)
logger.setLevel(logging.DEBUG)
logger.propagate = False
formatter = logging.Formatter(
- fmt='%(asctime)s %(levelname)s:%(name)s:%(funcName)s: %(message)s', datefmt='%Y-%m-%d %H:%M:%S')
-
+ fmt='%(asctime)s %(levelname)s:%(name)s:%(funcName)s: %(message)s', datefmt='%Y-%m-%d %H:%M:%S'
+ )
+
# ERROR log to 'snare.err'
error_log_handler = logging.handlers.RotatingFileHandler(err_filename, encoding='utf-8')
error_log_handler.setLevel(logging.ERROR)
error_log_handler.setFormatter(formatter)
logger.addHandler(error_log_handler)
-
+
# DEBUG log to 'snare.log'
debug_log_handler = logging.handlers.RotatingFileHandler(debug_filename, encoding='utf-8')
debug_log_handler.setLevel(logging.DEBUG)
@@ -33,17 +37,17 @@ def create_logger(debug_filename, err_filename, logger_name):
max_level_filter = LevelFilter(logging.ERROR)
debug_log_handler.addFilter(max_level_filter)
logger.addHandler(debug_log_handler)
-
+
return logger
-
+
@staticmethod
def create_clone_logger(err_filename, logger_name):
logger = logging.getLogger(logger_name)
formatter = logging.Formatter(
- fmt='%(asctime)s %(levelname)s:%(name)s:%(funcName)s: %(message)s', datefmt='%Y-%m-%d %H:%M:%S')
+ fmt='%(asctime)s %(levelname)s:%(name)s:%(funcName)s: %(message)s', datefmt='%Y-%m-%d %H:%M:%S'
+ )
# ERROR log to 'clone.err'
error_log_handler = logging.handlers.RotatingFileHandler(err_filename, encoding='utf-8')
error_log_handler.setLevel(logging.ERROR)
error_log_handler.setFormatter(formatter)
logger.addHandler(error_log_handler)
-
\ No newline at end of file
diff --git a/utils/page_path_generator.py b/snare/utils/page_path_generator.py
similarity index 100%
rename from utils/page_path_generator.py
rename to snare/utils/page_path_generator.py
diff --git a/snare/utils/snare_helpers.py b/snare/utils/snare_helpers.py
new file mode 100644
index 00000000..ffebd940
--- /dev/null
+++ b/snare/utils/snare_helpers.py
@@ -0,0 +1,105 @@
+import os
+from os import walk
+import hashlib
+import mimetypes
+import json
+import shutil
+import argparse
+from distutils.version import StrictVersion
+from bs4 import BeautifulSoup
+
+
+class VersionManager:
+ def __init__(self):
+ self.version = "0.2.0"
+ self.version_mapper = {
+ "0.1.0": ["0.1.0", "0.4.0"],
+ "0.2.0": ["0.5.0", "0.5.0"]
+ }
+
+ def check_compatibility(self, tanner_version):
+ min_version = self.version_mapper[self.version][0]
+ max_version = self.version_mapper[self.version][1]
+ if not (StrictVersion(min_version) <= StrictVersion(tanner_version) <= StrictVersion(max_version)):
+ raise RuntimeError("Wrong tanner version: {}. Compatible versions are {} - {}"
+ .format(tanner_version, min_version, max_version))
+
+
+class Converter:
+ def __init__(self):
+ self.meta = {}
+
+ def convert(self, path):
+ files_to_convert = []
+
+ for (dirpath, dirnames, filenames) in walk(path):
+ for fn in filenames:
+ files_to_convert.append(os.path.join(dirpath, fn))
+
+ for fn in files_to_convert:
+ path_len = len(path)
+ file_name = fn[path_len:]
+ m = hashlib.md5()
+ m.update(fn.encode('utf-8'))
+ hash_name = m.hexdigest()
+ self.meta[file_name] = {'hash': hash_name, 'content_type': mimetypes.guess_type(file_name)[0]}
+ shutil.copyfile(fn, os.path.join(path, hash_name))
+ os.remove(fn)
+
+ with open(os.path.join(path, 'meta.json'), 'w') as mj:
+ json.dump(self.meta, mj)
+
+
+def add_meta_tag(page_dir, index_page, config):
+ google_content = config['WEB-TOOLS']['google']
+ bing_content = config['WEB-TOOLS']['bing']
+
+ if not google_content and not bing_content:
+ return
+
+ main_page_path = os.path.join('/opt/snare/pages/', page_dir, index_page)
+ with open(main_page_path) as main:
+ main_page = main.read()
+ soup = BeautifulSoup(main_page, 'html.parser')
+
+ if (google_content and soup.find("meta", attrs={"name": "google-site-verification"}) is None):
+ google_meta = soup.new_tag('meta')
+ google_meta.attrs['name'] = 'google-site-verification'
+ google_meta.attrs['content'] = google_content
+ soup.head.append(google_meta)
+ if (bing_content and soup.find("meta", attrs={"name": "msvalidate.01"}) is None):
+ bing_meta = soup.new_tag('meta')
+ bing_meta.attrs['name'] = 'msvalidate.01'
+ bing_meta.attrs['content'] = bing_content
+ soup.head.append(bing_meta)
+
+ html = soup.prettify("utf-8")
+ with open(main_page_path, "wb") as file:
+ file.write(html)
+
+
+def parse_timeout(timeout):
+ result = None
+ timeouts_coeff = {
+ 'M': 60,
+ 'H': 3600,
+ 'D': 86400
+ }
+
+ form = timeout[-1]
+ if form not in timeouts_coeff.keys():
+ print('Bad timeout format, default will be used')
+ parse_timeout('24H')
+ else:
+ result = int(timeout[:-1])
+ result *= timeouts_coeff[form]
+ return result
+
+
+def str_to_bool(v):
+ if v.lower() == 'true':
+ return True
+ elif v.lower() == 'false':
+ return False
+ else:
+ raise argparse.ArgumentTypeError('Boolean value expected')
diff --git a/versions_manager.py b/versions_manager.py
deleted file mode 100644
index fba8b410..00000000
--- a/versions_manager.py
+++ /dev/null
@@ -1,17 +0,0 @@
-from distutils.version import StrictVersion
-
-
-class VersionManager:
- def __init__(self):
- self.version = "0.2.0"
- self.version_mapper = {
- "0.1.0": ["0.1.0", "0.4.0"],
- "0.2.0": ["0.5.0", "0.5.0"]
- }
-
- def check_compatibility(self, tanner_version):
- min_version = self.version_mapper[self.version][0]
- max_version = self.version_mapper[self.version][1]
- if not (StrictVersion(min_version) <= StrictVersion(tanner_version) <= StrictVersion(max_version)):
- raise RuntimeError("Wrong tanner version: {}. Compatible versions are {} - {}"
- .format(tanner_version, min_version, max_version))
From a9c35beb98d1be29d5727e3bd17afe09b1002ba7 Mon Sep 17 00:00:00 2001
From: Viswak Hanumanth
Date: Tue, 5 Jun 2018 00:22:13 +0530
Subject: [PATCH 11/13] Rewrite tests (#142)
* Rewriting tests after latest snare update
* Update requirements.txt
* Update requirements.txt
* Update .travis.yml
* error fixes
* Minor Change
---
.travis.yml | 2 +-
requirements.txt | 5 +-
snare/tests/test_add_meta_tag.py | 12 ++--
snare/tests/test_converter.py | 3 +-
snare/tests/test_create_data.py | 52 ++++++++------
snare/tests/test_get_dorks.py | 34 +++++----
snare/tests/test_handle_error.py | 70 ------------------
snare/tests/test_handle_html_content.py | 41 +++++------
snare/tests/test_handle_request.py | 87 +++++++++++------------
snare/tests/test_parse_tanner_response.py | 41 ++++++-----
snare/tests/test_submit_data.py | 30 ++++----
snare/tests/test_versions_manager.py | 2 +-
12 files changed, 156 insertions(+), 223 deletions(-)
delete mode 100644 snare/tests/test_handle_error.py
diff --git a/.travis.yml b/.travis.yml
index faf78826..09f04e02 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -5,5 +5,5 @@ python:
install: "pip install -r requirements.txt"
# command to run tests
script:
- - nosetests -w ./tests -vv
+ - nosetests -w snare/tests -vv
- pycodestyle . --max-line-length=120
diff --git a/requirements.txt b/requirements.txt
index 33f64ad3..6720e057 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,8 +1,9 @@
-aiohttp<2.0
+aiohttp
+aiohttp_jinja2
+yarl
beautifulsoup4
cssutils
gitpython
netifaces
-yarl==0.9.8
python-magic
pycodestyle
diff --git a/snare/tests/test_add_meta_tag.py b/snare/tests/test_add_meta_tag.py
index fe87facd..df28b88a 100644
--- a/snare/tests/test_add_meta_tag.py
+++ b/snare/tests/test_add_meta_tag.py
@@ -1,10 +1,10 @@
import unittest
import os
-from bs4 import BeautifulSoup
-import snare
import shutil
import configparser
-from utils.page_path_generator import generate_unique_path
+from bs4 import BeautifulSoup
+from snare.utils.snare_helpers import add_meta_tag
+from snare.utils.page_path_generator import generate_unique_path
class TestAddMetaTag(unittest.TestCase):
@@ -19,9 +19,9 @@ def setUp(self):
f.write(self.content)
def test_add_meta_tag(self):
- snare.config = configparser.ConfigParser()
- snare.config['WEB-TOOLS'] = dict(google='test google content', bing='test bing content')
- snare.add_meta_tag(self.page_dir, self.index_page)
+ config = configparser.ConfigParser()
+ config['WEB-TOOLS'] = dict(google='test google content', bing='test bing content')
+ add_meta_tag(self.page_dir, self.index_page, config)
with open(os.path.join(self.main_page_path, 'index.html')) as main:
main_page = main.read()
soup = BeautifulSoup(main_page, 'html.parser')
diff --git a/snare/tests/test_converter.py b/snare/tests/test_converter.py
index 21c051a1..20dfedc2 100644
--- a/snare/tests/test_converter.py
+++ b/snare/tests/test_converter.py
@@ -1,9 +1,8 @@
import unittest
import os
-import sys
import shutil
import json
-from converter import Converter
+from snare.utils.snare_helpers import Converter
class TestConverter(unittest.TestCase):
diff --git a/snare/tests/test_create_data.py b/snare/tests/test_create_data.py
index b27212e3..a07ab0f0 100644
--- a/snare/tests/test_create_data.py
+++ b/snare/tests/test_create_data.py
@@ -1,38 +1,49 @@
import unittest
from unittest.mock import Mock
-import asyncio
-import argparse
-import aiohttp
import shutil
import os
-import json
-from aiohttp.protocol import HttpVersion
-from utils.asyncmock import AsyncMock
-from snare import HttpRequestHandler
-import snare
-from utils.page_path_generator import generate_unique_path
+import asyncio
+import argparse
+from yarl import URL
+from aiohttp import HttpVersion
+from aiohttp import web
+from aiohttp.http_parser import RawRequestMessage
+from snare.tanner_handler import TannerHandler
+from snare.utils.page_path_generator import generate_unique_path
-class TestHandleRequest(unittest.TestCase):
+class TestCreateData(unittest.TestCase):
def setUp(self):
- self.meta = {}
+ meta = {}
run_args = argparse.ArgumentParser()
run_args.add_argument("--tanner")
run_args.add_argument("--page-dir")
self.main_page_path = generate_unique_path()
os.makedirs(self.main_page_path)
- self.page_dir = self.main_page_path.rsplit('/')[-1]
- self.args = run_args.parse_args(['--page-dir', self.page_dir])
- snare.snare_uuid = ('9c10172f-7ce2-4fb4-b1c6-abc70141db56').encode('utf-8')
- self.handler = HttpRequestHandler(self.meta, self.args)
- self.headers = {
+ page_dir = self.main_page_path.rsplit('/')[-1]
+ args = run_args.parse_args(['--page-dir', page_dir])
+ snare_uuid = ('9c10172f-7ce2-4fb4-b1c6-abc70141db56').encode('utf-8')
+ args.no_dorks = True
+ self.handler = TannerHandler(args, meta, snare_uuid)
+ headers = {
'Host': 'test_host', 'status': 200,
'Cookie': 'sess_uuid=prev_test_uuid; test_cookie=test'
}
- self.request = aiohttp.protocol.RawRequestMessage(
- method='POST', path='/', version=HttpVersion(major=1, minor=1), headers=self.headers,
- raw_headers=None, should_close=None, compression=None)
+ message = RawRequestMessage(
+ method='POST', path='/', version=HttpVersion(major=1, minor=1), headers=headers,
+ raw_headers=None, should_close=None, compression=None, upgrade=None, chunked=None,
+ url=URL('http://test_url/')
+ )
+ loop = asyncio.get_event_loop()
+ RequestHandler = Mock()
+ protocol = RequestHandler()
+ self.request = web.Request(
+ message=message, payload=None, protocol=protocol, payload_writer=None,
+ task='POST', loop=loop
+ )
+ self.request.transport.get_extra_info = Mock(return_value=(['test_ip', 'test_port']))
self.response_status = "test_status"
+ self.data = None
self.expected_data = {
'method': 'POST', 'path': '/',
'headers': {'Host': 'test_host', 'status': 200,
@@ -42,9 +53,6 @@ def setUp(self):
'status': 'test_status',
'cookies': {'sess_uuid': 'prev_test_uuid', ' test_cookie': 'test'}
}
- asyncio.BaseTransport = Mock()
- self.handler.transport = asyncio.BaseTransport()
- self.handler.transport.get_extra_info = Mock(return_value=['test_ip', 'test_port'])
def test_create_data(self):
self.data = self.handler.create_data(self.request, self.response_status)
diff --git a/snare/tests/test_get_dorks.py b/snare/tests/test_get_dorks.py
index ccddcc99..4fe98d72 100644
--- a/snare/tests/test_get_dorks.py
+++ b/snare/tests/test_get_dorks.py
@@ -1,33 +1,31 @@
import unittest
-from unittest.mock import Mock
import asyncio
-import argparse
-import aiohttp
import shutil
-import yarl
import os
-from utils.asyncmock import AsyncMock
-from snare import HttpRequestHandler
-from utils.page_path_generator import generate_unique_path
+import yarl
+import aiohttp
+from snare.utils.asyncmock import AsyncMock
+from snare.html_handler import HtmlHandler
+from snare.utils.page_path_generator import generate_unique_path
class TestGetDorks(unittest.TestCase):
def setUp(self):
- self.meta = {}
- run_args = argparse.ArgumentParser()
- run_args.add_argument("--tanner")
- run_args.add_argument("--page-dir")
self.main_page_path = generate_unique_path()
os.makedirs(self.main_page_path)
- self.page_dir = self.main_page_path.rsplit('/')[-1]
- self.args = run_args.parse_args(['--page-dir', self.page_dir])
self.dorks = dict(response={'dorks': "test_dorks"})
self.loop = asyncio.new_event_loop()
aiohttp.ClientSession.get = AsyncMock(
- return_value=aiohttp.ClientResponse(url=yarl.URL("http://www.example.com"), method="GET")
- )
- self.handler = HttpRequestHandler(self.meta, self.args)
- self.handler.run_args.tanner = "tanner.mushmush.org"
+ return_value=aiohttp.ClientResponse(
+ url=yarl.URL("http://www.example.com"), method="GET", writer=None, continue100=1,
+ timer=None, request_info=None, traces=None, loop=self.loop,
+ session=None
+ )
+ )
+ no_dorks = True
+ tanner = "tanner.mushmush.org"
+ self.handler = HtmlHandler(no_dorks, tanner)
+ self.data = None
def test_get_dorks(self):
aiohttp.ClientResponse.json = AsyncMock(return_value=dict(response={'dorks': "test_dorks"}))
@@ -35,7 +33,7 @@ def test_get_dorks(self):
async def test():
self.data = await self.handler.get_dorks()
self.loop.run_until_complete(test())
- aiohttp.ClientSession.get.assert_called_with('http://tanner.mushmush.org:8090/dorks')
+ aiohttp.ClientSession.get.assert_called_with('http://tanner.mushmush.org:8090/dorks', timeout=10.0)
def test_return_dorks(self):
aiohttp.ClientResponse.json = AsyncMock(return_value=self.dorks)
diff --git a/snare/tests/test_handle_error.py b/snare/tests/test_handle_error.py
deleted file mode 100644
index bedf8497..00000000
--- a/snare/tests/test_handle_error.py
+++ /dev/null
@@ -1,70 +0,0 @@
-import unittest
-from unittest.mock import Mock
-import asyncio
-import argparse
-import aiohttp
-import shutil
-import os
-from utils.asyncmock import AsyncMock
-from snare import HttpRequestHandler
-from utils.page_path_generator import generate_unique_path
-
-
-class TestHandleError(unittest.TestCase):
- def setUp(self):
- self.meta = {}
- run_args = argparse.ArgumentParser()
- run_args.add_argument("--tanner")
- run_args.add_argument("--page-dir")
- self.main_page_path = generate_unique_path()
- os.makedirs(self.main_page_path)
- self.page_dir = self.main_page_path.rsplit('/')[-1]
- self.args = run_args.parse_args(['--page-dir', self.page_dir])
- self.loop = asyncio.new_event_loop()
- self.status = 500
- self.message = "test"
- self.payload = "test"
- self.exc = "[Errno 0] test"
- self.headers = "test"
- self.reason = "test"
- self.data = dict(
- method='GET',
- path='/',
- headers="test_headers",
- uuid="test_uuid",
- peer="test_peer",
- status="test_status",
- error=self.exc
- )
- aiohttp.server.ServerHttpProtocol.handle_error = Mock()
- self.handler = HttpRequestHandler(self.meta, self.args)
- self.handler.create_data = Mock(return_value=self.data)
- self.handler.submit_data = AsyncMock()
-
- def test_create_error_data(self):
-
- async def test():
- await self.handler.handle_error(
- self.status, self.message, self.payload, self.exc, self.headers, self.reason)
- self.loop.run_until_complete(test())
- self.handler.create_data.assert_called_with(self.message, self.status)
-
- def test_submit_error_data(self):
-
- async def test():
- await self.handler.handle_error(
- self.status, self.message, self.payload, self.exc, self.headers, self.reason)
- self.loop.run_until_complete(test())
- self.handler.submit_data.assert_called_with(self.data)
-
- def test_handle_error_data(self):
-
- async def test():
- await self.handler.handle_error(
- self.status, self.message, self.payload, self.exc, self.headers, self.reason)
- self.loop.run_until_complete(test())
- aiohttp.server.ServerHttpProtocol.handle_error.assert_called_with(
- self.status, self.message, self.payload, self.exc, self.headers, self.reason)
-
- def tearDown(self):
- shutil.rmtree(self.main_page_path)
diff --git a/snare/tests/test_handle_html_content.py b/snare/tests/test_handle_html_content.py
index 2f3c03df..984a6b22 100644
--- a/snare/tests/test_handle_html_content.py
+++ b/snare/tests/test_handle_html_content.py
@@ -1,27 +1,17 @@
import unittest
-from unittest.mock import Mock
import asyncio
-import argparse
-import aiohttp
import shutil
import os
-import yarl
from bs4 import BeautifulSoup
-from utils.asyncmock import AsyncMock
-from snare import HttpRequestHandler
-from utils.page_path_generator import generate_unique_path
+from snare.utils.asyncmock import AsyncMock
+from snare.html_handler import HtmlHandler
+from snare.utils.page_path_generator import generate_unique_path
class TestHandleHtmlContent(unittest.TestCase):
def setUp(self):
- self.meta = {}
- run_args = argparse.ArgumentParser()
- run_args.add_argument("--tanner")
- run_args.add_argument("--page-dir")
- run_args.add_argument("--no-dorks")
self.main_page_path = generate_unique_path()
os.makedirs(self.main_page_path)
- self.page_dir = self.main_page_path.rsplit('/')[-1]
self.content = '''
@@ -34,39 +24,40 @@ def setUp(self):
self.expected_content += ' A\n \n paragraph to be tested\n \n \n\n'
self.no_dorks_content = '\n \n \n A paragraph to be tested\n'
self.no_dorks_content += '
\n \n\n'
- self.args = run_args.parse_args(['--page-dir', self.page_dir])
self.loop = asyncio.new_event_loop()
- self.handler = HttpRequestHandler(self.meta, self.args)
- self.handler.dir = self.main_page_path
+ self.return_content = None
+ no_dorks = True
+ tanner = "tanner.mushmush.org"
+ self.handler = HtmlHandler(no_dorks, tanner)
def test_handle_content(self):
- self.handler.run_args.no_dorks = False
+ self.handler.no_dorks = False
self.handler.get_dorks = AsyncMock(return_value=["test_dork1"])
async def test():
- self.return_content = await self.handler.handle_html_content(self.content)
+ self.return_content = await self.handler.handle_content(self.content)
self.loop.run_until_complete(test())
soup = BeautifulSoup(self.return_content, "html.parser")
- self.return_content = soup.decode("utf-8")
- self.assertEquals(self.return_content, self.expected_content)
+ return_content = soup.decode("utf-8")
+ self.assertEquals(return_content, self.expected_content)
def test_handle_content_no_dorks(self):
- self.handler.run_args.no_dorks = True
+ self.handler.no_dorks = True
async def test():
- self.return_content = await self.handler.handle_html_content(self.content)
+ self.return_content = await self.handler.handle_content(self.content)
self.loop.run_until_complete(test())
soup = BeautifulSoup(self.return_content, "html.parser")
self.return_content = soup.decode("utf-8")
self.assertEquals(self.return_content, self.no_dorks_content)
def test_handle_content_exception(self):
- self.handler.run_args.no_dorks = False
+ self.handler.no_dorks = False
self.handler.get_dorks = AsyncMock(return_value=[])
async def test():
- self.return_content = await self.handler.handle_html_content(None)
- with self.assertRaises(TypeError):
+ self.return_content = await self.handler.handle_content(self.content)
+ with self.assertRaises(IndexError):
self.loop.run_until_complete(test())
def tearDown(self):
diff --git a/snare/tests/test_handle_request.py b/snare/tests/test_handle_request.py
index 506fa12e..ee545ca2 100644
--- a/snare/tests/test_handle_request.py
+++ b/snare/tests/test_handle_request.py
@@ -1,33 +1,35 @@
import unittest
from unittest.mock import Mock
-from unittest.mock import call
import asyncio
import argparse
-import aiohttp
import shutil
import os
-import json
-import yarl
-from aiohttp.protocol import HttpVersion
-from utils.asyncmock import AsyncMock
-from snare import HttpRequestHandler
-from utils.page_path_generator import generate_unique_path
+import aiohttp
+from aiohttp.http_parser import RawRequestMessage
+from aiohttp import HttpVersion
+from aiohttp import web
+from yarl import URL
+from snare.server import HttpRequestHandler
+from snare.utils.asyncmock import AsyncMock
+from snare.utils.page_path_generator import generate_unique_path
class TestHandleRequest(unittest.TestCase):
def setUp(self):
- self.meta = {}
+ meta = {}
run_args = argparse.ArgumentParser()
run_args.add_argument("--tanner")
run_args.add_argument("--page-dir")
self.main_page_path = generate_unique_path()
os.makedirs(self.main_page_path)
self.page_dir = self.main_page_path.rsplit('/')[-1]
- self.args = run_args.parse_args(['--page-dir', self.page_dir])
- self.loop = asyncio.new_event_loop()
- self.handler = HttpRequestHandler(self.meta, self.args)
- self.handler.run_args.server_header = "test_server"
- self.handler.run_args.slurp_enabled = True
+ args = run_args.parse_args(['--page-dir', self.page_dir])
+ uuid = ('9c10172f-7ce2-4fb4-b1c6-abc70141db56').encode('utf-8')
+ args.tanner = 'tanner.mushmush.org'
+ args.no_dorks = True
+ args.server_header = "test_server"
+ args.slurp_enabled = True
+ self.handler = HttpRequestHandler(meta, args, uuid)
self.data = {
'method': 'GET', 'path': '/',
'headers': {
@@ -37,64 +39,59 @@ def setUp(self):
'sess_uuid': 'prev_test_uuid'
}
}
+ self.loop = asyncio.new_event_loop()
self.content = ''
self.content_type = 'test_type'
- self.event_result = dict(response=dict(message=dict(detection={'type': 1}, sess_uuid="test_uuid")))
- self.request = aiohttp.protocol.RawRequestMessage(
+ event_result = dict(response=dict(message=dict(detection={'type': 1}, sess_uuid="test_uuid")))
+ RequestHandler = Mock()
+ protocol = RequestHandler()
+ message = RawRequestMessage(
method='POST', path='/', version=HttpVersion(major=1, minor=1), headers=self.data['headers'],
- raw_headers=None, should_close=None, compression=None)
- self.handler.create_data = Mock(return_value=self.data)
- self.handler.submit_data = AsyncMock(return_value=self.event_result)
+ raw_headers=None, should_close=None, compression=None, upgrade=None, chunked=None,
+ url=URL('http://test_url/')
+ )
+ self.request = web.Request(
+ message=message, payload=None, protocol=protocol, payload_writer=None,
+ task='POST', loop=self.loop
+ )
+ self.handler.tanner_handler.create_data = Mock(return_value=self.data)
+ self.handler.tanner_handler.submit_data = AsyncMock(return_value=event_result)
self.handler.submit_slurp = AsyncMock()
- self.payload = aiohttp.streams.EmptyStreamReader()
- aiohttp.Response.add_header = Mock()
- aiohttp.Response.write = Mock()
- aiohttp.Response.send_headers = Mock()
- aiohttp.Response.write_eof = AsyncMock()
+ web.Response.add_header = Mock()
+ web.Response.write = Mock()
+ web.Response.send_headers = Mock()
+ web.Response.write_eof = AsyncMock()
aiohttp.streams.EmptyStreamReader.read = AsyncMock(return_value=b'con1=test1&con2=test2')
- self.handler.parse_tanner_response = AsyncMock(
+ self.handler.tanner_handler.parse_tanner_response = AsyncMock(
return_value=(self.content, self.content_type, self.data['headers'], self.data['headers']['status']))
def test_create_request_data(self):
async def test():
- await self.handler.handle_request(self.request, self.payload)
+ await self.handler.handle_request(self.request)
self.loop.run_until_complete(test())
- self.handler.create_data.assert_called_with(self.request, 200)
+ self.handler.tanner_handler.create_data.assert_called_with(self.request, 200)
def test_submit_request_data(self):
async def test():
- await self.handler.handle_request(self.request, self.payload)
+ await self.handler.handle_request(self.request)
self.loop.run_until_complete(test())
- self.handler.submit_data.assert_called_with(self.data)
+ self.handler.tanner_handler.submit_data.assert_called_with(self.data)
def test_submit_request_slurp(self):
async def test():
- await self.handler.handle_request(self.request, self.payload)
+ await self.handler.handle_request(self.request)
self.loop.run_until_complete(test())
self.handler.submit_slurp.assert_called_with(self.request.path)
def test_parse_response(self):
async def test():
- await self.handler.handle_request(self.request, self.payload)
- self.loop.run_until_complete(test())
- self.handler.parse_tanner_response.assert_called_with(self.request.path, {'type': 1})
-
- def test_handle_response(self):
- calls = [call('Host', 'test_host'), call('status', 200), call('Server', 'test_server'),
- call('Set-Cookie', 'sess_uuid=test_uuid'), call('Content-Type', 'test_type'),
- call('Content-Length', str(len(self.content)))]
-
- async def test():
- await self.handler.handle_request(self.request, self.payload)
+ await self.handler.handle_request(self.request)
self.loop.run_until_complete(test())
- aiohttp.Response.add_header.assert_has_calls(calls, any_order=True)
- aiohttp.Response.send_headers.assert_called_with()
- aiohttp.Response.write.assert_called_with(self.content)
- aiohttp.Response.write_eof.assert_called_with()
+ self.handler.tanner_handler.parse_tanner_response.assert_called_with(self.request.path, {'type': 1})
def tearDown(self):
shutil.rmtree(self.main_page_path)
diff --git a/snare/tests/test_parse_tanner_response.py b/snare/tests/test_parse_tanner_response.py
index 9578e563..6219526a 100644
--- a/snare/tests/test_parse_tanner_response.py
+++ b/snare/tests/test_parse_tanner_response.py
@@ -1,18 +1,12 @@
import unittest
-from unittest.mock import Mock
-from unittest.mock import call
import asyncio
import argparse
-import aiohttp
import shutil
import os
import json
-import yarl
-from aiohttp.protocol import HttpVersion
-from utils.asyncmock import AsyncMock
-from snare import HttpRequestHandler
-from utils.page_path_generator import generate_unique_path
-from urllib.parse import unquote
+from snare.utils.asyncmock import AsyncMock
+from snare.utils.page_path_generator import generate_unique_path
+from snare.tanner_handler import TannerHandler
class TestParseTannerResponse(unittest.TestCase):
@@ -22,25 +16,34 @@ def setUp(self):
run_args.add_argument("--page-dir")
self.main_page_path = generate_unique_path()
os.makedirs(self.main_page_path)
- self.page_dir = self.main_page_path.rsplit('/')[-1]
- self.meta_content = {"/index.html": {"hash": "hash_name", "content_type": "text/html"}}
+ page_dir = self.main_page_path.rsplit('/')[-1]
+ meta_content = {"/index.html": {"hash": "hash_name", "content_type": "text/html"}}
self.page_content = ""
self.content_type = "text/html"
with open(os.path.join(self.main_page_path, "hash_name"), 'w') as f:
f.write(self.page_content)
with open(os.path.join(self.main_page_path, "meta.json"), 'w') as f:
- json.dump(self.meta_content, f)
- self.args = run_args.parse_args(['--page-dir', self.page_dir])
+ json.dump(meta_content, f)
+ args = run_args.parse_args(['--page-dir', page_dir])
+ args.index_page = '/index.html'
+ args.no_dorks = True
+ args.tanner = "tanner.mushmush.org"
+ uuid = "test_uuid"
+ self.handler = TannerHandler(args, meta_content, uuid)
self.requested_name = '/'
- self.loop = asyncio.new_event_loop()
- self.handler = HttpRequestHandler(self.meta_content, self.args)
- self.handler.run_args.index_page = '/index.html'
- self.handler.handle_html_content = AsyncMock(return_value=self.page_content)
+ self.loop = asyncio.get_event_loop()
+ self.handler.html_handler.handle_content = AsyncMock(return_value=self.page_content)
+ self.res1 = None
+ self.res2 = None
+ self.res3 = None
+ self.res4 = None
+ self.detection = None
+ self.expected_content = None
+ self.call_content = None
def test_parse_type_one(self):
self.detection = {"type": 1}
self.call_content = b''
- self.expected_content = self.page_content
async def test():
(self.res1, self.res2,
@@ -96,7 +99,7 @@ async def test():
(self.res1, self.res2,
self.res3, self.res4) = await self.handler.parse_tanner_response(self.requested_name, self.detection)
self.loop.run_until_complete(test())
- self.handler.handle_html_content.assert_called_with(self.call_content)
+ self.handler.html_handler.handle_content.assert_called_with(self.call_content)
def test_parse_exception(self):
self.detection = {}
diff --git a/snare/tests/test_submit_data.py b/snare/tests/test_submit_data.py
index fa2190e4..d3e61133 100644
--- a/snare/tests/test_submit_data.py
+++ b/snare/tests/test_submit_data.py
@@ -1,27 +1,26 @@
import unittest
-from unittest.mock import Mock
import asyncio
import argparse
-import aiohttp
import shutil
import os
import json
import yarl
-from utils.asyncmock import AsyncMock
-from snare import HttpRequestHandler
-from utils.page_path_generator import generate_unique_path
+import aiohttp
+from snare.utils.asyncmock import AsyncMock
+from snare.tanner_handler import TannerHandler
+from snare.utils.page_path_generator import generate_unique_path
class TestSubmitData(unittest.TestCase):
def setUp(self):
- self.meta = {}
+ meta = {}
run_args = argparse.ArgumentParser()
run_args.add_argument("--tanner")
run_args.add_argument("--page-dir")
self.main_page_path = generate_unique_path()
os.makedirs(self.main_page_path)
- self.page_dir = self.main_page_path.rsplit('/')[-1]
- self.args = run_args.parse_args(['--page-dir', self.page_dir])
+ page_dir = self.main_page_path.rsplit('/')[-1]
+ args = run_args.parse_args(['--page-dir', page_dir])
self.loop = asyncio.new_event_loop()
self.data = {
'method': 'GET', 'path': '/',
@@ -34,10 +33,17 @@ def setUp(self):
}
}
aiohttp.ClientSession.post = AsyncMock(
- return_value=aiohttp.ClientResponse(url=yarl.URL("http://www.example.com"), method="GET")
+ return_value=aiohttp.ClientResponse(
+ url=yarl.URL("http://www.example.com"), method="GET", writer=None, continue100=1,
+ timer=None, request_info=None, traces=None, loop=self.loop,
+ session=None
+ )
)
- self.handler = HttpRequestHandler(self.meta, self.args)
- self.handler.run_args.tanner = "tanner.mushmush.org"
+ uuid = "test_uuid"
+ args.tanner = "tanner.mushmush.org"
+ args.no_dorks = True
+ self.handler = TannerHandler(args, meta, uuid)
+ self.result = None
def test_post_data(self):
aiohttp.ClientResponse.json = AsyncMock(return_value=dict(detection={'type': 1}, sess_uuid="test_uuid"))
@@ -46,7 +52,7 @@ async def test():
self.result = await self.handler.submit_data(self.data)
self.loop.run_until_complete(test())
aiohttp.ClientSession.post.assert_called_with(
- 'http://tanner.mushmush.org:8090/event', data=json.dumps(self.data)
+ 'http://tanner.mushmush.org:8090/event', data=json.dumps(self.data), timeout=10.0
)
def test_event_result(self):
diff --git a/snare/tests/test_versions_manager.py b/snare/tests/test_versions_manager.py
index f634a227..280baf21 100644
--- a/snare/tests/test_versions_manager.py
+++ b/snare/tests/test_versions_manager.py
@@ -1,5 +1,5 @@
import unittest
-from versions_manager import VersionManager
+from snare.utils.snare_helpers import VersionManager
class TestVersion(unittest.TestCase):
From 93162f39e848b32f4054a98327f3ba840e5c57c4 Mon Sep 17 00:00:00 2001
From: Viswak Hanumanth
Date: Wed, 11 Jul 2018 05:52:36 +0530
Subject: [PATCH 12/13] Fix attack requests from snare-develop (#149)
* phase#1
* Fix tests
---
snare/server.py | 6 +++---
snare/tanner_handler.py | 2 +-
snare/tests/test_create_data.py | 2 +-
snare/tests/test_handle_request.py | 4 ++--
4 files changed, 7 insertions(+), 7 deletions(-)
diff --git a/snare/server.py b/snare/server.py
index d9cf1cab..74c9cbbc 100644
--- a/snare/server.py
+++ b/snare/server.py
@@ -37,7 +37,7 @@ async def submit_slurp(self, data):
self.logger.error('Error submitting slurp: %s', e)
async def handle_request(self, request):
- self.logger.info('Request path: {0}'.format(request.path))
+ self.logger.info('Request path: {0}'.format(request.path_qs))
data = self.tanner_handler.create_data(request, 200)
if request.method == 'POST':
post_data = await request.post()
@@ -51,10 +51,10 @@ async def handle_request(self, request):
# Log the event to slurp service if enabled
if self.run_args.slurp_enabled:
- await self.submit_slurp(request.path)
+ await self.submit_slurp(request.path_qs)
content, content_type, headers, status_code = await self.tanner_handler.parse_tanner_response(
- request.path, event_result['response']['message']['detection'])
+ request.path_qs, event_result['response']['message']['detection'])
response_headers = multidict.CIMultiDict()
diff --git a/snare/tanner_handler.py b/snare/tanner_handler.py
index a6f8298f..c8810612 100644
--- a/snare/tanner_handler.py
+++ b/snare/tanner_handler.py
@@ -37,7 +37,7 @@ def create_data(self, request, response_status):
header = {key: value for (key, value) in request.headers.items()}
data['method'] = request.method
data['headers'] = header
- data['path'] = request.path
+ data['path'] = request.path_qs
if ('Cookie' in header):
data['cookies'] = {
cookie.split('=')[0]: cookie.split('=')[1] for cookie in header['Cookie'].split(';')
diff --git a/snare/tests/test_create_data.py b/snare/tests/test_create_data.py
index a07ab0f0..c89e1db7 100644
--- a/snare/tests/test_create_data.py
+++ b/snare/tests/test_create_data.py
@@ -45,7 +45,7 @@ def setUp(self):
self.response_status = "test_status"
self.data = None
self.expected_data = {
- 'method': 'POST', 'path': '/',
+ 'method': 'POST', 'path': 'http://test_url/',
'headers': {'Host': 'test_host', 'status': 200,
'Cookie': 'sess_uuid=prev_test_uuid; test_cookie=test'},
'uuid': '9c10172f-7ce2-4fb4-b1c6-abc70141db56',
diff --git a/snare/tests/test_handle_request.py b/snare/tests/test_handle_request.py
index ee545ca2..18f7590e 100644
--- a/snare/tests/test_handle_request.py
+++ b/snare/tests/test_handle_request.py
@@ -84,14 +84,14 @@ def test_submit_request_slurp(self):
async def test():
await self.handler.handle_request(self.request)
self.loop.run_until_complete(test())
- self.handler.submit_slurp.assert_called_with(self.request.path)
+ self.handler.submit_slurp.assert_called_with(self.request.path_qs)
def test_parse_response(self):
async def test():
await self.handler.handle_request(self.request)
self.loop.run_until_complete(test())
- self.handler.tanner_handler.parse_tanner_response.assert_called_with(self.request.path, {'type': 1})
+ self.handler.tanner_handler.parse_tanner_response.assert_called_with(self.request.path_qs, {'type': 1})
def tearDown(self):
shutil.rmtree(self.main_page_path)
From 280c68f2f75bfec1978f9c13a34ec370647f9753 Mon Sep 17 00:00:00 2001
From: Evgeniia
Date: Fri, 20 Jul 2018 22:18:24 +0200
Subject: [PATCH 13/13] Update version (#152)
* update version
* 0.3.0 works with both 0.5.0 and 0.6.0
---
setup.py | 2 +-
snare/utils/snare_helpers.py | 5 +++--
2 files changed, 4 insertions(+), 3 deletions(-)
diff --git a/setup.py b/setup.py
index 4f7b0f02..61d47187 100644
--- a/setup.py
+++ b/setup.py
@@ -3,7 +3,7 @@
from distutils.core import setup
setup(name='Snare',
- version='0.2.0',
+ version='0.3.0',
description='Super Next generation Advanced Reactive honEypot',
author='MushMush Foundation',
author_email='glastopf@public.honeynet.org',
diff --git a/snare/utils/snare_helpers.py b/snare/utils/snare_helpers.py
index ffebd940..98855847 100644
--- a/snare/utils/snare_helpers.py
+++ b/snare/utils/snare_helpers.py
@@ -11,10 +11,11 @@
class VersionManager:
def __init__(self):
- self.version = "0.2.0"
+ self.version = "0.3.0"
self.version_mapper = {
"0.1.0": ["0.1.0", "0.4.0"],
- "0.2.0": ["0.5.0", "0.5.0"]
+ "0.2.0": ["0.5.0", "0.5.0"],
+ "0.3.0": ["0.5.0", "0.6.0"]
}
def check_compatibility(self, tanner_version):