Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Hi I tried to fix the repo to run on python3.7 #80

Open
wants to merge 3 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
156 changes: 78 additions & 78 deletions Common/TaskController.py

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion Helpers/CanarioAPI.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ def retrieve(s, url, data=None, post=False):

# 'data' must be in the form of a dictionary
def build_url(s, data):
d = ['%s=%s' % (x, y) for x, y in data.iteritems()]
d = ['%s=%s' % (x, y) for x, y in list(data.items())]
return '%s&%s' % (s.url, '&'.join(d))

# Does a search--whee. Bangs can be specified via separate argument. This is due to plan to make changes to the search for API users
Expand Down
24 changes: 12 additions & 12 deletions Helpers/Connect6.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
#!/usr/bin/env python
import helpers
from . import helpers
import requests
import configparser
import urlparse
import urllib.parse
import logging
from BeautifulSoup import BeautifulSoup
from bs4 import BeautifulSoup


class Connect6Scraper(object):
Expand All @@ -24,7 +24,7 @@ def __init__(self, domain, Verbose=False):
self.FinalAnswer = ''
self.verbose = Verbose
except Exception as e:
print e
print(e)

'''
Try to find the connect6 url for the domain
Expand All @@ -44,14 +44,14 @@ def Connect6AutoUrl(self):
except Exception as e:
error = "[!] Major issue with Google Search: for Connect6 URL" + \
str(e)
print helpers.color(error, warning=True)
print((helpers.color(error, warning=True)))
try:
rawhtml = r.content
soup = BeautifulSoup(rawhtml)
for a in soup.findAll('a', href=True):
try:
l = urlparse.parse_qs(
urlparse.urlparse(a['href']).query)['q']
l = urllib.parse.parse_qs(
urllib.parse.urlparse(a['href']).query)['q']
if 'site:connect6.com' not in l[0]:
l = l[0].split(":")
urllist.append(l[2])
Expand All @@ -67,7 +67,7 @@ def Connect6AutoUrl(self):
y += 1
return urllist
except Exception as e:
print e
print(e)
return urllist

def Connect6Download(self, url):
Expand All @@ -82,12 +82,12 @@ def Connect6Download(self, url):
url = 'http://' + str(url)
if self.verbose:
p = " [*] Now downloading Connect6 Source: " + str(url)
print helpers.color(p, firewall=True)
print((helpers.color(p, firewall=True)))
r = requests.get(url, headers=self.UserAgent)
except Exception as e:
error = " [!] Major issue with Downloading Connect6 source:" + \
str(e)
print helpers.color(error, warning=True)
print((helpers.color(error, warning=True)))
try:
if r:
rawhtml = r.content
Expand All @@ -99,13 +99,13 @@ def Connect6Download(self, url):
if self.verbose:
p = " [*] Connect6 Name Found: " + \
str(litag.text)
print helpers.color(p, firewall=True)
print((helpers.color(p, firewall=True)))
except:
pass
return NameList
# for a in soup.findAll('a', href=True):
except Exception as e:
print e
print(e)

def Connect6ParseName(self, raw):
'''
Expand Down
10 changes: 5 additions & 5 deletions Helpers/Converter.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from pdfminer.converter import TextConverter
from pdfminer.layout import LAParams
from pdfminer.pdfpage import PDFPage
from cStringIO import StringIO
from io import StringIO


class Converter(object):
Expand All @@ -18,7 +18,7 @@ def __init__(self, verbose=False):
self.logger = logging.getLogger("SimplyEmail.Converter")
self.verbose = verbose
except Exception as e:
print e
print(e)

def convert_docx_to_txt(self, path):
"""
Expand All @@ -32,7 +32,7 @@ def convert_docx_to_txt(self, path):
try:
text = docx2txt.process(path)
self.logger.debug("Converted docx to text: " + str(path))
return unicode(text)
return str(text)
except Exception as e:
text = ""
return text
Expand Down Expand Up @@ -144,14 +144,14 @@ def convert_zip_to_text(self, path, rawtext=True):
try:
text += str(a[x])
except Exception as e:
print e
print(e)
# pass
self.logger.debug("Unzip of file complted (raw text): " + str(path))
return text
else:
return {name: input_zip.read(name) for name in input_zip.namelist()}
except Exception as e:
print e
print(e)
text = ""
return text
self.logger.error(
Expand Down
30 changes: 15 additions & 15 deletions Helpers/Download.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@
import requests
import os
import configparser
import helpers
from . import helpers
import logging
import urllib2
import urllib.request, urllib.error, urllib.parse
import time
from bs4 import BeautifulSoup
from random import randint
Expand All @@ -21,7 +21,7 @@ def __init__(self, verbose=False):
self.UserAgent = {
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36'}
except Exception as e:
print e
print(e)

def download_file(self, url, filetype, maxfile=100, verify=True):
"""
Expand Down Expand Up @@ -56,7 +56,7 @@ def download_file(self, url, filetype, maxfile=100, verify=True):
except Exception as e:
if self.verbose:
p = ' [*] Download of file failed: ' + e
print helpers.color(p, firewall=True)
print((helpers.color(p, firewall=True)))
self.logger.error("Failed to download file: " + str(url) + ' error: ' + str(e))
download = os.path.isfile(local_filename)
return local_filename, download
Expand All @@ -75,19 +75,19 @@ def download_file2(self, url, filetype, timeout=10):
url = 'http://' + str(url)
try:
self.logger.debug("Download2 started download: " + str(url))
response = urllib2.urlopen(url, timeout=timeout)
response = urllib.request.urlopen(url, timeout=timeout)
data = response.read()
download = os.path.isfile(local_filename)
except urllib2.HTTPError, e:
except urllib.error.HTTPError as e:
self.logger.debug('urllib2 HTTPError: ' + e)
except urllib2.URLError, e:
except urllib.error.URLError as e:
self.logger.debug('urllib2 URLError: ' + e)
except urllib2.HTTPException, e:
except urllib2.HTTPException as e:
self.logger.debug('urllib2 HTTPException: ' + e)
except Exception as e:
if self.verbose:
p = ' [*] Download2 of file failed: ' + e
print helpers.color(p, firewall=True)
print((helpers.color(p, firewall=True)))
self.logger.error("Failed to download2 file: " + str(e))
try:
with open(local_filename, 'wb+') as f:
Expand All @@ -108,18 +108,18 @@ def delete_file(self, local_filename):
else:
if self.verbose:
p = ' [*] File not found to remove : ' + local_filename
print helpers.color(p, firewall=True)
print((helpers.color(p, firewall=True)))
except Exception as e:
self.logger.error("Failed to delete file: " + str(e))
if self.verbose:
print e
print(e)

def GoogleCaptchaDetection(self, RawHtml):
soup = BeautifulSoup(RawHtml, "lxml")
if "Our systems have detected unusual traffic" in soup.text:
p = " [!] Google Captcha was detected! (For best results resolve/restart -- Increase sleep/jitter in SimplyEmail.ini)"
self.logger.warning("Google Captcha was detected!")
print helpers.color(p, warning=True)
print((helpers.color(p, warning=True)))
return True
else:
return False
Expand Down Expand Up @@ -147,7 +147,7 @@ def requesturl(self, url, useragent, timeout=10, retrytime=5, statuscode=False,
if self.verbose:
p = ' [!] Request for url timed out, retrying: ' + url
self.logger.info('Request timed out, retrying: ' + url)
print helpers.color(p, firewall=True)
print((helpers.color(p, firewall=True)))
r = requests.get(url, headers=self.UserAgent, timeout=retrytime, verify=verify)
rawhtml = r.content
except requests.exceptions.TooManyRedirects:
Expand All @@ -156,14 +156,14 @@ def requesturl(self, url, useragent, timeout=10, retrytime=5, statuscode=False,
p = ' [!] Request for url resulted in bad url: ' + url
self.logger.error(
'Request for url resulted in bad url: ' + url)
print helpers.color(p, warning=True)
print((helpers.color(p, warning=True)))
except requests.exceptions.RequestException as e:
# catastrophic error. bail.
if self.verbose:
p = ' [!] Request for url resulted in major error: ' + str(e)
self.logger.critical(
'Request for url resulted in major error: ' + str(e))
print helpers.color(p, warning=True)
print((helpers.color(p, warning=True)))
except Exception as e:
p = ' [!] Request for url resulted in unhandled error: ' + str(e)
self.logger.critical(
Expand Down
Loading