Skip to content

Commit ab3f3f1

Browse files
Merge pull request #151 from pkubiak/feature-search-rect-via-api
Feature search_rect via API
2 parents c21ff39 + 52cdbdb commit ab3f3f1

11 files changed

+3083
-23
lines changed

pycaching/cache.py

Lines changed: 63 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -153,6 +153,43 @@ def _from_print_page(cls, geocaching, guid, soup):
153153
cache_info["log_counts"] = Cache._get_log_counts_from_print_page(soup)
154154
return Cache(geocaching, **cache_info)
155155

156+
@classmethod
157+
def _from_api_record(cls, geocaching, record):
158+
"""Create a cache instance from a JSON record returned by API."""
159+
cache = Cache(
160+
geocaching,
161+
wp=record['code'],
162+
name=record['name'],
163+
type=Type.from_number(record['geocacheType']),
164+
state=Status(record['cacheStatus']) == Status.enabled,
165+
found=record['userFound'],
166+
size=Size.from_number(record['containerType']),
167+
difficulty=record['difficulty'],
168+
terrain=record['terrain'],
169+
author=record['owner']['username'],
170+
hidden=record['placedDate'].split('T')[0],
171+
favorites=record['favoritePoints'],
172+
pm_only=record['premiumOnly'],
173+
174+
# Not consumed attributes:
175+
# detailsUrl
176+
# hasGeotour
177+
# hasLogDraft
178+
# id
179+
# lastFoundDate
180+
# owner.code
181+
# userDidNotFind
182+
)
183+
184+
# NOTE: Basic Members have no access to postedCoordinates of Premium-only caches
185+
if 'postedCoordinates' in record:
186+
cache.location = Point(
187+
record['postedCoordinates']['latitude'],
188+
record['postedCoordinates']['longitude']
189+
)
190+
191+
return cache
192+
156193
def __init__(self, geocaching, wp, **kwargs):
157194
"""Create a cache instance.
158195
@@ -827,7 +864,7 @@ def load_by_guid(self):
827864
type_img = os.path.basename(content.find("img").get("src"))
828865
self.type = Type.from_filename(os.path.splitext(type_img)[0])
829866

830-
size_img = content.find("img", src=re.compile("\/icons\/container\/"))
867+
size_img = content.find("img", src=re.compile(r"\/icons\/container\/"))
831868
self.size = Size.from_string(size_img.get("alt").split(": ")[1])
832869

833870
D_and_T_img = content.find("p", "Meta DiffTerr").find_all("img")
@@ -843,7 +880,7 @@ def load_by_guid(self):
843880
hidden_p = content.find("p", text=re.compile("Placed Date:"))
844881
self.hidden = hidden_p.text.replace("Placed Date:", "").strip()
845882

846-
attr_img = content.find_all("img", src=re.compile("\/attributes\/"))
883+
attr_img = content.find_all("img", src=re.compile(r"\/attributes\/"))
847884
attributes_raw = [
848885
os.path.basename(_.get("src")).rsplit("-", 1) for _ in attr_img
849886
]
@@ -1004,12 +1041,12 @@ def load_logbook(self, limit=float("inf")):
10041041
img_filename = log_data["LogTypeImage"].rsplit(".", 1)[0] # filename w/o extension
10051042

10061043
# create and fill log object
1007-
l = Log()
1008-
l.type = LogType.from_filename(img_filename)
1009-
l.text = log_data["LogText"]
1010-
l.visited = log_data["Visited"]
1011-
l.author = log_data["UserName"]
1012-
yield l
1044+
log = Log()
1045+
log.type = LogType.from_filename(img_filename)
1046+
log.text = log_data["LogText"]
1047+
log.visited = log_data["Visited"]
1048+
log.author = log_data["UserName"]
1049+
yield log
10131050

10141051
# TODO: trackable list can have multiple pages - handle it in similar way as _logbook_get_page
10151052
# for example see: http://www.geocaching.com/geocache/GC26737_geocaching-jinak-tb-gc-hrbitov
@@ -1034,7 +1071,7 @@ def load_trackables(self, limit=float("inf")):
10341071
# filter out all urls for trackables
10351072
urls = [link.get("href") for link in links if "track" in link.get("href")]
10361073
# find the names matching the trackble urls
1037-
names = [re.split("[\<\>]", str(link))[2] for link in links if "track" in link.get("href")]
1074+
names = [re.split(r"[\<\>]", str(link))[2] for link in links if "track" in link.get("href")]
10381075

10391076
for name, url in zip(names, urls):
10401077

@@ -1280,6 +1317,10 @@ def from_string(cls, name):
12801317
except KeyError as e:
12811318
raise errors.ValueError("Unknown cache type '{}'.".format(name)) from e
12821319

1320+
@classmethod
1321+
def from_number(cls, number: int):
1322+
return Type(str(number))
1323+
12831324

12841325
class Size(enum.Enum):
12851326
"""Enum of possible cache sizes.
@@ -1322,14 +1363,25 @@ def from_number(cls, number):
13221363
number = int(number)
13231364

13241365
number_mapping = {
1366+
1: cls.not_chosen,
13251367
2: cls.micro,
1326-
8: cls.small,
13271368
3: cls.regular,
13281369
4: cls.large,
1329-
6: cls.other
1370+
5: cls.virtual,
1371+
6: cls.other,
1372+
8: cls.small,
13301373
}
13311374

13321375
try:
13331376
return number_mapping[number]
13341377
except KeyError as e:
13351378
raise errors.ValueError("Unknown cache size numeric id '{}'.".format(number)) from e
1379+
1380+
1381+
class Status(enum.IntEnum):
1382+
"""Enum of possible cache statuses."""
1383+
# NOTE: extracted from https://www.geocaching.com/play/map/public/main.2b28b0dc1c9c10aaba66.js
1384+
enabled = 0
1385+
disabled = 1
1386+
archived = 2
1387+
unpublished = 3

pycaching/errors.py

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -55,3 +55,22 @@ class ValueError(Error, ValueError):
5555
Can be raised in various situations, but most commonly when unexpected property value is set.
5656
"""
5757
pass
58+
59+
60+
class TooManyRequestsError(Error):
61+
"""Geocaching API rate limit has been reached."""
62+
63+
def __init__(self, url: str, rate_limit_reset: int = 0):
64+
"""
65+
Initialize TooManyRequestsError.
66+
67+
:param url: Requested url.
68+
:param rate_limit_reset: Number of seconds to wait before rate limit reset.
69+
"""
70+
self.url = url
71+
self.rate_limit_reset = rate_limit_reset
72+
73+
def wait_for(self):
74+
"""Wait enough time to release Rate Limits."""
75+
import time
76+
time.sleep(self.rate_limit_reset + 5)

pycaching/geo.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -230,6 +230,10 @@ def __init__(self, point_a, point_b):
230230
:param .Point point_a: Top left corner.
231231
:param .Point point_b: Bottom right corner.
232232
"""
233+
if point_a.latitude < point_b.latitude:
234+
point_a.latitude, point_b.latitude = point_b.latitude, point_a.latitude
235+
if point_a.longitude > point_b.longitude:
236+
point_a.longitude, point_b.longitude = point_b.longitude, point_a.longitude
233237

234238
assert point_a != point_b, "Corner points cannot be the same"
235239
self.corners = [point_a, point_b]

pycaching/geocaching.py

Lines changed: 86 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -7,13 +7,30 @@
77
import json
88
import subprocess
99
import warnings
10+
import enum
11+
from typing import Optional, Union
1012
from urllib.parse import parse_qs, urljoin, urlparse
1113
from os import path
1214
from pycaching.cache import Cache, Size
1315
from pycaching.log import Log, Type as LogType
14-
from pycaching.geo import Point
16+
from pycaching.geo import Point, Rectangle
1517
from pycaching.trackable import Trackable
16-
from pycaching.errors import Error, NotLoggedInException, LoginFailedException, PMOnlyException
18+
from pycaching.errors import Error, NotLoggedInException, LoginFailedException, PMOnlyException, TooManyRequestsError
19+
20+
21+
class SortOrder(enum.Enum):
22+
"""Enum of possible cache sort orderings returned in Groundspeak API."""
23+
# NOTE: extracted from https://www.geocaching.com/play/map/public/main.2b28b0dc1c9c10aaba66.js
24+
container_size = "containersize"
25+
date_last_visited = "datelastvisited"
26+
difficulty = "difficulty"
27+
distance = "distance"
28+
favorite_point = "favoritepoint"
29+
found_date = "founddate"
30+
found_date_of_found_by_user = "founddateoffoundbyuser"
31+
geocache_name = "geocachename"
32+
place_date = "placedate"
33+
terrain = "terrain"
1734

1835

1936
class Geocaching(object):
@@ -29,6 +46,7 @@ class Geocaching(object):
2946
"search": "play/search",
3047
"search_more": "play/search/more-results",
3148
'my_logs': 'my/logs.aspx',
49+
'api_search': 'api/proxy/web/search'
3250
}
3351
_credentials_file = ".gc_credentials"
3452

@@ -67,6 +85,12 @@ def _request(self, url, *, expect="soup", method="GET", login_check=True, **kwar
6785
return res
6886

6987
except requests.exceptions.RequestException as e:
88+
if e.response.status_code == 429: # Handle rate limiting errors
89+
raise TooManyRequestsError(
90+
url,
91+
rate_limit_reset=int(e.response.headers.get('x-rate-limit-reset', '0'))
92+
) from e
93+
7094
raise Error("Cannot load page: {}".format(url)) from e
7195

7296
def login(self, username=None, password=None):
@@ -356,6 +380,64 @@ def search_quick(self, area, *, strict=False, zoom=None):
356380

357381
# add some shortcuts ------------------------------------------------------
358382

383+
def search_rect(
384+
self,
385+
rect: Rectangle,
386+
*,
387+
per_query: int = 200,
388+
sort_by: Union[str, SortOrder] = SortOrder.date_last_visited,
389+
origin: Optional[Point] = None,
390+
wait_sleep: bool = True
391+
):
392+
"""
393+
Return a generator of caches in given Rectange area.
394+
395+
:param rect: Search area.
396+
:param int per_query: Number of caches requested in single query.
397+
:param sort_by: Order cached by given criterion.
398+
:param origin: Origin point for search by distance.
399+
:param wait_sleep: In case of rate limits exceeding, wait appropriate time if set True,
400+
otherwise just yield None.
401+
"""
402+
if not isinstance(sort_by, SortOrder):
403+
sort_by = SortOrder(sort_by)
404+
405+
params = {
406+
"box": "{},{},{},{}".format(
407+
rect.corners[0].latitude,
408+
rect.corners[0].longitude,
409+
rect.corners[1].latitude,
410+
rect.corners[1].longitude,
411+
),
412+
"take": per_query,
413+
"asc": "true",
414+
"skip": 0,
415+
"sort": sort_by.value,
416+
}
417+
418+
if sort_by is SortOrder.distance:
419+
assert isinstance(origin, Point)
420+
params["origin"] = "{},{}".format(origin.latitude, origin.longitude)
421+
422+
total, offset = None, 0
423+
while (total is None) or (offset < total):
424+
params["skip"] = offset
425+
426+
try:
427+
resp = self._request(self._urls["api_search"], params=params, expect="json")
428+
except TooManyRequestsError as e:
429+
if wait_sleep:
430+
e.wait_for()
431+
else:
432+
yield None
433+
continue
434+
435+
for record in resp["results"]:
436+
yield Cache._from_api_record(self, record)
437+
438+
total = resp["total"]
439+
offset += per_query
440+
359441
def geocode(self, location):
360442
"""Return a :class:`.Point` object from geocoded location.
361443
@@ -396,8 +478,8 @@ def post_log(self, wp, text, type=LogType.found_it, date=None):
396478
"""
397479
if not date:
398480
date = datetime.date.today()
399-
l = Log(type=type, text=text, visited=date)
400-
self.get_cache(wp).post_log(l)
481+
log = Log(type=type, text=text, visited=date)
482+
self.get_cache(wp).post_log(log)
401483

402484
def _cache_from_guid(self, guid):
403485
logging.info('Loading cache with GUID {!r}'.format(guid))

pycaching/util.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,7 @@ def format_date(date, user_date_format):
7777
"""Format a date according to user_date_format."""
7878
# parse user format
7979
date_format = user_date_format.lower()
80-
date_format = re.split("(\W+)", date_format)
80+
date_format = re.split(r"(\W+)", date_format)
8181
# non-zero-padded numbers use different characters depending on different platforms
8282
# see https://strftime.org/ for example
8383
eat_zero_prefix = "#" if platform.system() == "Windows" else "-"

0 commit comments

Comments
 (0)