Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Portando código do python 2 para 3 #13

Open
wants to merge 3 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 5 additions & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
@@ -1,5 +1,9 @@
language: python

python:
- "2.7"
- "3.6"

before_install: pip install -r requirements.txt

script: nosetests
script: nosetests
8 changes: 5 additions & 3 deletions packtrack/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
from correios import EncomendaRepository
from royal import RoyalMail
from dhl_gm import DhlGmTracker
from __future__ import absolute_import

from .correios import EncomendaRepository
from .royal import RoyalMail
from .dhl_gm import DhlGmTracker


class Correios(object):
Expand Down
8 changes: 5 additions & 3 deletions packtrack/correios.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
# coding: utf-8
from __future__ import absolute_import

from datetime import datetime
import re

Expand All @@ -15,7 +17,7 @@ def get(self, numero, auth=None):
return func(numero, **kwargs)

def _init_scraper(self, backend):
from scraping import CorreiosWebsiteScraper, CorreiosRastroService
from .scraping import CorreiosWebsiteScraper, CorreiosRastroService
if backend is None:
backend = 'www2'

Expand All @@ -36,8 +38,8 @@ def __init__(self, numero):
def adicionar_status(self, status):
d = datetime
self.status.append(status)
t_format = self.validar_data(status.data)
self.status.sort(lambda x, y: 1 if d.strptime(x.data, t_format) > d.strptime(y.data, t_format) else -1)
self.status.sort(
key=lambda x: d.strptime(x.data, self.validar_data(x.data)))

def validar_data(self, data):
if re.match('^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}$', data):
Expand Down
32 changes: 14 additions & 18 deletions packtrack/scraping.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,16 @@
from __future__ import absolute_import

import os
import re
from HTMLParser import HTMLParser

from BeautifulSoup import BeautifulSoup
from bs4 import BeautifulSoup
import requests
from requests.exceptions import RequestException
from zeep import Client as Zeep
from zeep.cache import InMemoryCache
from zeep.transports import Transport

from correios import Encomenda, Status
from .correios import Encomenda, Status


class CorreiosWebsiteScraper(object):
Expand Down Expand Up @@ -49,46 +50,41 @@ def get_encomenda_info(self, numero):
html = response.content

if html:
try:
html = html.decode('latin-1')
except UnicodeDecodeError:
pass
encomenda = Encomenda(numero)
for status in self._get_all_status_from_html(html):
encomenda.adicionar_status(status)
return encomenda

def _text(self, value):
value = BeautifulSoup(value.strip()).text
return value.replace(' ', ' ')
value = BeautifulSoup(value.strip(), 'lxml').text
return value.replace(' ', ' ').replace(u'\xa0',' ')

def _get_all_status_from_html(self, html):
status = []
html_parser = HTMLParser()
if "<table" not in html:
if '<table' not in html:
return status
html_info = re.search('.*(<table.*</table>).*', html, re.S)
if not html_info:
return status

table = html_info.group(1)
soup = BeautifulSoup(table)
soup = BeautifulSoup(table, 'lxml')

for tr in soup.table:
try:
tds = tr.findAll('td')
except AttributeError:
continue
for td in tds:
content = td.renderContents().replace('\r', ' ') \
.split('<br />')
class_ = td['class']
content = td.renderContents().replace(b'\r', b' ') \
.split(b'<br/>')
class_ = td['class'][0]
if class_ == 'sroDtEvent':
data = '%s %s' % (content[0].strip(), content[1].strip())
data = '%s %s' % (content[0].strip().decode(), content[1].strip().decode())
local = '/'.join(self._text(content[2]).rsplit(' / ', 1)).upper()
elif class_ == 'sroLbEvent':
situacao = html_parser.unescape(self._text(content[0]))
detalhes = html_parser.unescape(self._text(content[1]))
situacao = self._text(content[0].decode('utf-8'))
detalhes = self._text(content[1].decode('utf-8'))
if detalhes:
detalhes = u'%s %s' % (situacao, detalhes)
status.append(Status(data=data, local=local,
Expand Down
9 changes: 3 additions & 6 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,8 +1,5 @@
BeautifulSoup==3.2.1
argparse==1.2.1
beautifulsoup4==4.3.2
lxml==2.3.5
mockito==0.5.1
beautifulsoup4==4.6.0
lxml==3.8.0
mockito==1.0.12
requests==2.18.1
wsgiref==0.1.2
zeep==1.6.0
6 changes: 3 additions & 3 deletions tests/correios_api_test.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
import unittest

from mockito import when, Mock
from mockito import when, mock

from packtrack import Correios


class CorreiosTest(unittest.TestCase):

def test_should_use_repository_to_get_encomenda(self):
encomenda_repository_mock = Mock()
encomenda_repository_mock = mock()
when(encomenda_repository_mock).get('123', auth=None) \
.thenReturn('encomenda123')

Expand All @@ -18,7 +18,7 @@ def test_should_use_repository_to_get_encomenda(self):

def test_service_should_receive_auth(self):
auth = ('mi', 'mimi')
encomenda_repository_mock = Mock()
encomenda_repository_mock = mock()
when(encomenda_repository_mock).get('123', auth=auth) \
.thenReturn('encomenda123')

Expand Down
2 changes: 1 addition & 1 deletion tests/correios_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ class EncomendaRepositoryTest(unittest.TestCase):
def test_should_get_encomenda_by_numero(self):
encomenda_123 = Status(data='2009-01-28 17:49:00')

correios_website_scraper_mock = Mock()
correios_website_scraper_mock = mock()
when(correios_website_scraper_mock).get_encomenda_info('123', auth=None).thenReturn(encomenda_123)

repository = EncomendaRepository()
Expand Down
5 changes: 3 additions & 2 deletions tests/scraping_test.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# encoding: UTF-8
import os
import unittest

import io
import mock

from packtrack.scraping import CorreiosWebsiteScraper
Expand All @@ -17,7 +17,8 @@ def _assert_status(self, status, data, local, situacao, detalhes):
self.assertEqual(detalhes, status.detalhes)

def test_should_get_data_from_correios_website(self):
example_file = open('%s/tests/correios_website/exemplo_rastreamento_correios1.html' % os.getcwd())
example_file = io.open('%s/tests/correios_website/exemplo_rastreamento_correios1.html' % os.getcwd(),
encoding='iso-8859-1')
sample_html = example_file.read()
example_file.close()

Expand Down