27
27
from datetime import date , datetime , timedelta
28
28
from typing import Dict , Generator , List , Optional , Set , Tuple , Type , cast
29
29
30
+ from aea .protocols .base import Message
30
31
from twitter_text import parse_tweet
31
32
32
33
from packages .valory .connections .openai .connection import (
33
34
PUBLIC_ID as LLM_CONNECTION_PUBLIC_ID ,
34
35
)
36
+ from packages .valory .connections .tweepy .connection import (
37
+ PUBLIC_ID as TWEEPY_CONNECTION_PUBLIC_ID ,
38
+ )
35
39
from packages .valory .protocols .llm .message import LlmMessage
40
+ from packages .valory .protocols .srr .dialogues import SrrDialogue , SrrDialogues
41
+ from packages .valory .protocols .srr .message import SrrMessage
36
42
from packages .valory .skills .abstract_round_abci .base import AbstractRound
37
43
from packages .valory .skills .abstract_round_abci .behaviours import (
38
44
AbstractRoundBehaviour ,
57
63
from packages .valory .skills .olas_week_abci .prompts import tweet_summarizer_prompt
58
64
from packages .valory .skills .olas_week_abci .rounds import (
59
65
ERROR_API_LIMITS ,
60
- ERROR_GENERIC ,
66
+ ERROR_TWEEPY_CONNECTION ,
61
67
Event ,
62
68
OlasWeekDecisionMakingRound ,
63
69
OlasWeekEvaluationRound ,
83
89
LINK_REGEX = r"https?:\/\/(?:www\.)?[-a-zA-Z0-9@:%._\+~#=]{1,256}\.[a-zA-Z0-9()]{1,6}\b(?:[-a-zA-Z0-9()@:%_\+.~#?&\/=]*)"
84
90
HIGHLIGHT_LINK_REGEX = rf"☴.*\n{ LINK_REGEX } \n"
85
91
WEEK_IN_OLAS_REGEX = r".*Week \d+ in Olas.*"
92
+ AUTONOLAS_TWITTER_ID = "1450081635559428107"
86
93
87
94
88
95
def extract_headers (header_str : str ) -> dict :
@@ -204,6 +211,38 @@ def _check_twitter_limits(self) -> Tuple:
204
211
# Window has not expired and we have not reached the max number of tweets
205
212
return False , number_of_tweets_pulled_today , last_tweet_pull_window_reset
206
213
214
+ def _do_connection_request (
215
+ self ,
216
+ message : Message ,
217
+ dialogue : Message ,
218
+ timeout : Optional [float ] = None ,
219
+ ) -> Generator [None , None , Message ]:
220
+ """Do a request and wait the response, asynchronously."""
221
+
222
+ self .context .outbox .put_message (message = message )
223
+ request_nonce = self ._get_request_nonce_from_dialogue (dialogue ) # type: ignore
224
+ cast (Requests , self .context .requests ).request_id_to_callback [
225
+ request_nonce
226
+ ] = self .get_callback_request ()
227
+ response = yield from self .wait_for_message (timeout = timeout )
228
+ return response
229
+
230
+ def _call_tweepy (
231
+ self ,
232
+ ** kwargs ,
233
+ ) -> Generator [None , None , Dict ]:
234
+ """Send a request message from the skill context."""
235
+ srr_dialogues = cast (SrrDialogues , self .context .srr_dialogues )
236
+ srr_message , srr_dialogue = srr_dialogues .create (
237
+ counterparty = str (TWEEPY_CONNECTION_PUBLIC_ID ),
238
+ performative = SrrMessage .Performative .REQUEST ,
239
+ payload = json .dumps (** kwargs ),
240
+ )
241
+ srr_message = cast (SrrMessage , srr_message )
242
+ srr_dialogue = cast (SrrDialogue , srr_dialogue )
243
+ response = yield from self ._do_connection_request (srr_message , srr_dialogue ) # type: ignore
244
+ return json .loads (response .payload ) # type: ignore
245
+
207
246
208
247
class OlasWeekRandomnessBehaviour (RandomnessBehaviour ):
209
248
"""Retrieve randomness."""
@@ -439,7 +478,7 @@ def _sender_act(self) -> Generator:
439
478
440
479
else :
441
480
# Get tweets from Twitter
442
- payload_data = yield from self ._get_tweets (
481
+ payload_data = yield from self ._get_week_tweets (
443
482
number_of_tweets_pulled_today = number_of_tweets_pulled_today
444
483
)
445
484
@@ -455,15 +494,13 @@ def _sender_act(self) -> Generator:
455
494
456
495
self .set_done ()
457
496
458
- def _get_tweets (
497
+ def _get_week_tweets (
459
498
self ,
460
499
number_of_tweets_pulled_today : int ,
461
500
) -> Generator [None , None , Dict ]:
462
- """Get Tweets"""
463
-
464
- api_base = self .params .twitter_api_base
465
- api_endpoint = self .params .twitter_tweets_endpoint
501
+ """Get last week's tweets from Twitter"""
466
502
503
+ # Checl the tweet allowance
467
504
number_of_tweets_remaining_today = (
468
505
self .params .max_tweet_pulls_allowed - number_of_tweets_pulled_today
469
506
)
@@ -487,138 +524,42 @@ def _get_tweets(
487
524
488
525
start_time_str = start_time .strftime ("%Y-%m-%dT%H:%M:%SZ" )
489
526
490
- # Build the args
491
- api_args = self .params .twitter_tweets_args .replace (
492
- "{start_time}" , start_time_str
493
- )
494
- api_args = api_args .replace (
495
- "{max_results}" , str (number_of_tweets_remaining_today )
496
- )
497
- api_url = api_base + api_endpoint + api_args
498
- headers = dict (Authorization = f"Bearer { self .params .twitter_api_bearer_token } " )
527
+ self .context .logger .info (f"Searching @autonolas tweets since { start_time_str } " )
499
528
500
- self .context .logger .info (
501
- f"Retrieving tweets from Twitter API [{ api_url } ]\n Bearer token { self .params .twitter_api_bearer_token [:5 ]} *******{ self .params .twitter_api_bearer_token [- 5 :]} "
529
+ # Call Tweepy conection
530
+ response = yield from self ._call_tweepy (
531
+ action = "get_users_tweets" ,
532
+ kwargs = {
533
+ "id" : AUTONOLAS_TWITTER_ID ,
534
+ "start_time" : start_time_str ,
535
+ },
502
536
)
537
+ # Check response
538
+ if "error" in response :
539
+ return {
540
+ "tweets" : None ,
541
+ "error" : ERROR_TWEEPY_CONNECTION ,
542
+ "latest_mention_tweet_id" : None ,
543
+ "number_of_tweets_pulled_today" : number_of_tweets_pulled_today ,
544
+ "sleep_until" : self .synchronized_data .sleep_until ,
545
+ }
503
546
504
- tweets = {}
505
- next_token = None
506
-
507
- # Pagination loop: we read a max of <twitter_max_pages> pages each period
508
- # Each page contains 100 tweets. The default value for twitter_max_pages is 10
509
- for _ in range (self .params .twitter_max_pages ):
510
- self .context .logger .info (
511
- f"Retrieving a new page. max_pages={ self .params .twitter_max_pages } "
512
- )
513
- url = api_url
514
- # Add the pagination token if it exists
515
- if next_token :
516
- url += f"&pagination_token={ next_token } "
517
-
518
- # Make the request
519
- response = yield from self .get_http_response (
520
- method = "GET" , url = url , headers = headers
521
- )
522
-
523
- # Check response status
524
- if response .status_code != 200 :
525
- header_dict = extract_headers (response .headers )
526
-
527
- remaining , limit , reset_ts = [
528
- header_dict .get (header , "?" )
529
- for header in [
530
- "x-rate-limit-remaining" ,
531
- "x-rate-limit-limit" ,
532
- "x-rate-limit-reset" ,
533
- ]
534
- ]
535
- reset = (
536
- datetime .fromtimestamp (int (reset_ts )).strftime ("%Y-%m-%d %H:%M:%S" )
537
- if reset_ts != "?"
538
- else None
539
- )
540
-
541
- self .context .logger .error (
542
- f"Error retrieving tweets from Twitter [{ response .status_code } ]: { response .body } "
543
- f"API limits: { remaining } /{ limit } . Window reset: { reset } "
544
- )
545
-
546
- return {
547
- "tweets" : None ,
548
- "error" : ERROR_API_LIMITS
549
- if response .status_code == HTTP_TOO_MANY_REQUESTS
550
- else ERROR_GENERIC ,
551
- "number_of_tweets_pulled_today" : number_of_tweets_pulled_today ,
552
- "sleep_until" : reset_ts
553
- if response .status_code == HTTP_TOO_MANY_REQUESTS
554
- else self .synchronized_data .sleep_until ,
555
- }
556
-
557
- api_data = json .loads (response .body )
558
-
559
- # Check the meta field
560
- if "meta" not in api_data :
561
- self .context .logger .error (
562
- f"Twitter API response does not contain the required 'meta' field: { api_data !r} "
563
- )
564
- return {
565
- "tweets" : None ,
566
- "error" : ERROR_GENERIC ,
567
- "number_of_tweets_pulled_today" : number_of_tweets_pulled_today ,
568
- "sleep_until" : None , # we reset this on a successful request
569
- }
570
-
571
- # Check if there are no more results
572
- if (
573
- "result_count" in api_data ["meta" ]
574
- and int (api_data ["meta" ]["result_count" ]) == 0
575
- ):
576
- break
577
-
578
- # Check that the data exists
579
- if "data" not in api_data or "newest_id" not in api_data ["meta" ]:
580
- self .context .logger .error (
581
- f"Twitter API response does not contain the required 'meta' field: { api_data !r} "
582
- )
583
- return {
584
- "tweets" : None ,
585
- "error" : ERROR_GENERIC ,
586
- "number_of_tweets_pulled_today" : number_of_tweets_pulled_today ,
587
- "sleep_until" : None , # we reset this on a successful request
588
- }
589
-
590
- if "includes" not in api_data or "users" not in api_data ["includes" ]:
591
- self .context .logger .error (
592
- f"Twitter API response does not contain the required 'includes/users' field: { api_data !r} "
593
- )
594
- return {
595
- "tweets" : None ,
596
- "error" : ERROR_GENERIC ,
597
- "number_of_tweets_pulled_today" : number_of_tweets_pulled_today ,
598
- "sleep_until" : None , # we reset this on a successful request
599
- }
600
-
601
- # Add the retrieved tweets
602
- for tweet in api_data ["data" ]:
603
- tweets [tweet ["id" ]] = tweet
604
-
605
- # Set the author handle
606
- for user in api_data ["includes" ]["users" ]:
607
- if user ["id" ] == tweet ["author_id" ]:
608
- tweets [tweet ["id" ]]["username" ] = user ["username" ]
609
- break
610
- number_of_tweets_pulled_today += 1
611
-
612
- if "next_token" in api_data ["meta" ]:
613
- next_token = api_data ["meta" ]["next_token" ]
614
- continue
547
+ # Process tweets
548
+ tweets = {t ["id" ]: t for t in response ["tweets" ]}
549
+ retrieved_tweets = len (response ["tweets" ])
550
+ number_of_tweets_pulled_today += retrieved_tweets
551
+ latest_tweet_id = response ["tweets" ][
552
+ 0
553
+ ].id # tweepy sorts by most recent first by default
615
554
616
- break
555
+ self .context .logger .info (
556
+ f"Got { retrieved_tweets } new hashtag tweets until tweet_id={ latest_tweet_id } : { tweets .keys ()} "
557
+ )
617
558
618
559
self .context .logger .info (f"Got { len (tweets )} new tweets" )
619
560
620
561
return {
621
- "tweets" : list ( tweets . values ()) ,
562
+ "tweets" : tweets ,
622
563
"number_of_tweets_pulled_today" : number_of_tweets_pulled_today ,
623
564
"sleep_until" : None , # we reset this on a successful request
624
565
}
@@ -720,7 +661,7 @@ def evaluate_summary(
720
661
)
721
662
request_llm_message = cast (LlmMessage , request_llm_message )
722
663
llm_dialogue = cast (LlmDialogue , llm_dialogue )
723
- llm_response_message = yield from self ._do_request (
664
+ llm_response_message = yield from self ._do_connection_request (
724
665
request_llm_message , llm_dialogue
725
666
)
726
667
data = llm_response_message .value
@@ -730,30 +671,6 @@ def evaluate_summary(
730
671
self .context .logger .info (f"Parsed summary: { summary } " )
731
672
return summary
732
673
733
- def _do_request (
734
- self ,
735
- llm_message : LlmMessage ,
736
- llm_dialogue : LlmDialogue ,
737
- timeout : Optional [float ] = None ,
738
- ) -> Generator [None , None , LlmMessage ]:
739
- """
740
- Do a request and wait the response, asynchronously.
741
-
742
- :param llm_message: The request message
743
- :param llm_dialogue: the HTTP dialogue associated to the request
744
- :param timeout: seconds to wait for the reply.
745
- :yield: LLMMessage object
746
- :return: the response message
747
- """
748
- self .context .outbox .put_message (message = llm_message )
749
- request_nonce = self ._get_request_nonce_from_dialogue (llm_dialogue )
750
- cast (Requests , self .context .requests ).request_id_to_callback [
751
- request_nonce
752
- ] = self .get_callback_request ()
753
- # notify caller by propagating potential timeout exception.
754
- response = yield from self .wait_for_message (timeout = timeout )
755
- return response
756
-
757
674
758
675
class OlasWeekRoundBehaviour (AbstractRoundBehaviour ):
759
676
"""OlasWeekRoundBehaviour"""
0 commit comments