Skip to content

Commit

Permalink
re-structured project a bit, added nodrop option to proxy_pass
Browse files Browse the repository at this point in the history
  • Loading branch information
mgeeky committed Dec 16, 2020
1 parent c0c2cd8 commit bde7d10
Show file tree
Hide file tree
Showing 10 changed files with 880 additions and 756 deletions.
13 changes: 10 additions & 3 deletions example-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -149,16 +149,23 @@ action_url:
#
# Syntax:
# proxy_pass:
# - /url_to_be_passed example.com
# - /url_to_be_passed example.com [option1,option2=value2]
#
# The first parameter 'url' is a regex (case-insensitive). Must start with '/'.
# The begin/end regex operands are implicit and will constitute following regex with URL:
# '^' + url + '$'
# The regex begin/end operators are implied and will constitute following regex to be
# matched against inbound request's URL:
# '^/' + url_to_be_passed + '$'
#
# Following options are supported:
# - nodrop - Process this rule at first, before evaluating any DROP-logic.
# Ensures requests with matching URL to be proxy-passed no matter what.
#
# Default: No proxy pass rules.
#
proxy_pass:
- /foobar\d* bing.com
- /alwayspass google.com nodrop


#
# If set, removes all HTTP headers sent by Client that are not expected by Teamserver according
Expand Down
Empty file added lib/__init__.py
Empty file.
117 changes: 80 additions & 37 deletions ipLookupHelper.py → lib/ipLookupHelper.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,17 +35,19 @@
import threading
import requests
import urllib3

from urllib.parse import urlparse, parse_qsl
from subprocess import Popen, PIPE
from proxylogger import ProxyLogger
from pluginsloader import PluginsLoader
from sslintercept import SSLInterception
from http.server import BaseHTTPRequestHandler, HTTPServer
from socketserver import ThreadingMixIn
import plugins.IProxyPlugin
from io import StringIO, BytesIO
from html.parser import HTMLParser

import lib.plugins.IProxyPlugin
from lib.proxylogger import ProxyLogger
from lib.pluginsloader import PluginsLoader
from lib.sslintercept import SSLInterception

urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
ssl._create_default_https_context = ssl._create_unverified_context

Expand Down Expand Up @@ -94,54 +96,49 @@ class IPLookupHelper:

cached_lookups_file = 'ip-lookups-cache.json'

def __init__(self, apiKeys):
def __init__(self, logger, apiKeys):
self.logger = logger
self.apiKeys = {
'ip_api_com': 'this-provider-not-requires-api-key-for-free-plan',
'ipapi_co': 'this-provider-not-requires-api-key-for-free-plan',
}

self.httpHeaders = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit (KHTML, like Gecko) Chrome/87',
'Accept': 'text/json, */*',
'Host': '',
}

if len(apiKeys) > 0:
for prov in IPLookupHelper.supported_providers:
if prov in apiKeys.keys():
if len(apiKeys[prov].strip()) < 2: continue
if apiKeys[prov] == None or len(apiKeys[prov].strip()) < 2: continue
self.apiKeys[prov] = apiKeys[prov].strip()

self.cachedLookups = {}

Logger.dbg('Following IP Lookup providers will be used: ' + str(list(self.apiKeys.keys())))
self.logger.dbg('Following IP Lookup providers will be used: ' + str(list(self.apiKeys.keys())))

try:
with open(IPLookupHelper.cached_lookups_file) as f:
data = f.read()
if len(data) > 0:
cached = json.loads(data)
self.cachedLookups = cached
Logger.dbg(f'Read {len(cached)} cached entries from file.')
self.logger.dbg(f'Read {len(cached)} cached entries from file.')

except json.decoder.JSONDecodeError as e:
Logger.err(f'Corrupted JSON data in cache file: {IPLookupHelper.cached_lookups_file}! Error: {e}')
self.logger.err(f'Corrupted JSON data in cache file: {IPLookupHelper.cached_lookups_file}! Error: {e}')
raise

except FileNotFoundError as e:
with open(IPLookupHelper.cached_lookups_file, 'w') as f:
json.dump({}, f)

except Exception as e:
Logger.err(f'Exception raised while loading cached lookups from file ({IPLookupHelper.cached_lookups_file}: {e}')
self.logger.err(f'Exception raised while loading cached lookups from file ({IPLookupHelper.cached_lookups_file}: {e}')
raise

def lookup(self, ipAddress):
if len(self.apiKeys) == 0:
return
return {}

if ipAddress in self.cachedLookups.keys():
Logger.dbg(f'Returning cached entry for IP address: {ipAddress}')
self.logger.dbg(f'Returning cached entry for IP address: {ipAddress}')
return self.cachedLookups[ipAddress]

leftProvs = list(self.apiKeys.keys())
Expand All @@ -152,7 +149,7 @@ def lookup(self, ipAddress):

if hasattr(self, prov) != None:
method = getattr(self, prov)
Logger.dbg(f'Calling IP Lookup provider: {prov}')
self.logger.dbg(f'Calling IP Lookup provider: {prov}')
result = method(ipAddress)

if len(result) > 0:
Expand All @@ -167,7 +164,7 @@ def lookup(self, ipAddress):
with open(IPLookupHelper.cached_lookups_file, 'w') as f:
json.dump(self.cachedLookups, f)

Logger.dbg(f'New IP lookup entry cached: {ipAddress}')
self.logger.dbg(f'New IP lookup entry cached: {ipAddress}')

return result

Expand Down Expand Up @@ -282,7 +279,7 @@ def update(out, data, keydst, keysrc):
return output

def ip_api_com(self, ipAddress):
# $ curl -s ip-api.com/json/89.167.131.40
# $ curl -s ip-api.com/json/89.167.131.40 [21:05]
# {
# "status": "success",
# "country": "Germany",
Expand All @@ -301,17 +298,15 @@ def ip_api_com(self, ipAddress):
# }

try:
self.httpHeaders['Host'] = 'ip-api.com'
r = requests.get(f'http://ip-api.com/json/{ipAddress}',
headers = self.httpHeaders)
r = requests.get(f'http://ip-api.com/json/{ipAddress}')

if r.status_code != 200:
raise Exception(f'ip-api.com returned unexpected status code: {r.status_code}.\nOutput text:\n' + r.json())

return r.json()

except Exception as e:
Logger.err(f'Exception catched while querying ip-api.com with {ipAddress}:\nName: {e}')
self.logger.err(f'Exception catched while querying ip-api.com with {ipAddress}:\nName: {e}', color='cyan')

return {}

Expand Down Expand Up @@ -346,17 +341,15 @@ def ipapi_co(self, ipAddress):
# }

try:
self.httpHeaders['Host'] = 'ipapi.co'
r = requests.get(f'https://ipapi.co/{ipAddress}/json/',
headers = self.httpHeaders)
r = requests.get(f'https://ipapi.co/{ipAddress}/json/')

if r.status_code != 200:
raise Exception(f'ipapi.co returned unexpected status code: {r.status_code}.\nOutput text:\n' + r.json())

return r.json()

except Exception as e:
Logger.err(f'Exception catched while querying ipapi.co with {ipAddress}:\nName: {e}')
self.logger.err(f'Exception catched while querying ipapi.co with {ipAddress}:\nName: {e}', color='cyan')

return {}

Expand Down Expand Up @@ -400,17 +393,15 @@ def ipgeolocation_io(self, ipAddress):
# }
# }
try:
self.httpHeaders['Host'] = 'api.ipgeolocation.io'
r = requests.get(f'https://api.ipgeolocation.io/ipgeo?apiKey={self.apiKeys["ipgeolocation_io"]}&ip={ipAddress}',
headers = self.httpHeaders)
r = requests.get(f'https://api.ipgeolocation.io/ipgeo?apiKey={self.apiKeys["ipgeolocation_io"]}&ip={ipAddress}')

if r.status_code != 200:
raise Exception(f'ipapi.co returned unexpected status code: {r.status_code}.\nOutput text:\n' + r.json())

return r.json()

except Exception as e:
Logger.err(f'Exception catched while querying ipapi.co with {ipAddress}:\nName: {e}')
self.logger.err(f'Exception catched while querying ipapi.co with {ipAddress}:\nName: {e}', color='cyan')

return {}

Expand All @@ -425,7 +416,8 @@ class IPGeolocationDeterminant:
'timezone'
)

def __init__(self, determinants):
def __init__(self, logger, determinants):
self.logger = logger
if type(determinants) != dict:
raise Exception('Specified ip_geolocation_requirements must be a valid dictonary!')

Expand Down Expand Up @@ -466,25 +458,76 @@ def determine(self, ipLookupResult):

for exp in expected:
if georesult in exp.lower():
Logger.dbg(f'IP Geo result {determinant} value "{georesult}" met expected value "{exp}"')
self.logger.dbg(f'IP Geo result {determinant} value "{georesult}" met expected value "{exp}"')
matched = True
break

m = re.search(exp, georesult, re.I)
if m:
Logger.dbg(f'IP Geo result {determinant} value "{georesult}" met expected regular expression: ({exp})')
self.logger.dbg(f'IP Geo result {determinant} value "{georesult}" met expected regular expression: ({exp})')
matched = True
break

if matched:
break

if not matched:
Logger.dbg(f'IP Geo result {determinant} values {ipLookupResult[determinant]} DID NOT met expected set {expected}')
self.logger.dbg(f'IP Geo result {determinant} values {ipLookupResult[determinant]} DID NOT met expected set {expected}')
result = False

return result

@staticmethod
def getValues(v, n = 0):
values = []

if type(v) == str:
if ' ' in v:
values.extend(v.split(' '))
values.append(v)
elif type(v) == int or type(v) == float:
values.extend([str(v)])
elif type(v) == tuple or type(v) == list:
for w in v:
values.extend(IPGeolocationDeterminant.getValues(w, n+1))
elif type(v) == dict and n < 10:
values.extend(IPGeolocationDeterminant.getValuesDict(v, n+1))

return values

@staticmethod
def getValuesDict(data, n = 0):
values = []

for k, v in data.items():
if type(v) == dict and n < 10:
values.extend(IPGeolocationDeterminant.getValuesDict(v, n+1))
elif n < 10:
values.extend(IPGeolocationDeterminant.getValues(v, n+1))

return values

def validateIpGeoMetadata(self, ipLookupDetails):
if len(ipLookupDetails) == 0: return (True, '')

words = set(list(filter(None, IPGeolocationDeterminant.getValuesDict(ipLookupDetails))))
if len(words) == 0: return (True, '')

self.logger.dbg(f"Extracted keywords from Peer's IP Geolocation metadata: ({words})")

for w in words:
for x in BANNED_AGENTS:
if ((' ' in x) and (x.lower() in w.lower())):
self.logger.dbg(f"Peer's IP Geolocation metadata contained banned phrase: ({w})")
return (False, w)

elif (w.lower() == x.lower()):
self.logger.dbg(f"Peer's IP Geolocation metadata contained banned keyword: ({w})")
return (False, w)

self.logger.dbg(f"Peer's IP Geolocation metadata didn't raise any suspicion.")
return (True, '')

def main(argv):
if len(argv) < 2:
print ('''
Expand Down
Loading

0 comments on commit bde7d10

Please sign in to comment.