From 3e69154f409d89b9ef76f2eced4406c70ce208a6 Mon Sep 17 00:00:00 2001 From: David Vilela Date: Wed, 12 Jun 2024 17:09:32 +0200 Subject: [PATCH 1/4] feat: replace twitter calls on scoring behaviours --- .../agents/impact_evaluator/aea-config.yaml | 11 +- .../valory/connections/tweepy/connection.py | 15 +- .../services/impact_evaluator/service.yaml | 35 +- .../impact_evaluator_local/service.yaml | 11 +- .../fsm_specification.yaml | 27 +- .../skills/impact_evaluator_abci/skill.yaml | 11 +- .../skills/twitter_scoring_abci/behaviours.py | 508 +++--------------- .../fsm_specification.yaml | 27 +- .../skills/twitter_scoring_abci/models.py | 2 + .../skills/twitter_scoring_abci/payloads.py | 11 +- .../skills/twitter_scoring_abci/rounds.py | 179 +----- .../skills/twitter_scoring_abci/skill.yaml | 9 +- 12 files changed, 145 insertions(+), 701 deletions(-) diff --git a/packages/valory/agents/impact_evaluator/aea-config.yaml b/packages/valory/agents/impact_evaluator/aea-config.yaml index ce8ccd71..f904fec3 100644 --- a/packages/valory/agents/impact_evaluator/aea-config.yaml +++ b/packages/valory/agents/impact_evaluator/aea-config.yaml @@ -225,16 +225,7 @@ models: whitelist_api_key: null whitelist_endpoint: http://localhost points_to_image_hashes: ${str:{}} - twitter_api_base: ${str:https://api.twitter.com/} - twitter_api_bearer_token: ${str:} - twitter_mentions_endpoint: ${str:2/users/1450081635559428107/mentions?} - twitter_mentions_args: ${str:tweet.fields=author_id&user.fields=name&expansions=author_id&max_results={max_results}&since_id={since_id}} - twitter_max_pages: ${int:1} max_tweet_pulls_allowed: ${int:80} - twitter_search_endpoint: ${str:2/tweets/search/recent?} - twitter_search_args: ${str:query=%23olas&tweet.fields=author_id,created_at,conversation_id&user.fields=name&expansions=author_id&max_results={max_results}&since_id={since_id}} - twitter_tweets_endpoint: ${str:2/users/1450081635559428107/tweets?} - twitter_tweets_args: ${str:tweet.fields=author_id,created_at,conversation_id&user.fields=name&expansions=author_id&max_results=50&start_time={start_time}} openai_call_window_size: ${float:3600.0} openai_calls_allowed_in_window: ${int:100} tx_timeout: 10.0 @@ -260,6 +251,8 @@ models: termination_from_block: ${int:0} mech_chain_id: ${str:gnosis} mech_interaction_sleep_time: ${int:10} + recent_tweet_query: ${str:@autonolas OR #OlasNetwork OR #StakeWithPearl -is:retweet -is:reply} + twitter_max_recent_results: ${int:10} randomness_api: args: api_id: cloudflare diff --git a/packages/valory/connections/tweepy/connection.py b/packages/valory/connections/tweepy/connection.py index b2523c90..cf2b8d8d 100644 --- a/packages/valory/connections/tweepy/connection.py +++ b/packages/valory/connections/tweepy/connection.py @@ -323,7 +323,7 @@ def read_with_key_rotation(self, method, **kwargs) -> Tuple[Dict, bool]: cli = self.get_read_cli() method = getattr(cli, method) result = method(**kwargs) - return result, False + return {"tweets": self.process_tweets(result)}, False except Exception as e: self.logger.error( f"Error when calling {method} on account {self.twitter_read_credentials[0]['account_id']}. Rotating credentials:\n{e}" @@ -332,6 +332,19 @@ def read_with_key_rotation(self, method, **kwargs) -> Tuple[Dict, bool]: rotations += 1 return {"error": "Max Twitter read credential rotations reached"}, True + def process_tweets(self, tweets) -> List: + """Process tweets""" + users = {u["id"]: u for u in tweets.includes["users"]} + return [ + { + "id": tweet.id, + "text": tweet.text, + "author_id": tweet.author_id, + "username": users[tweet.author_id], + } + for tweet in tweets + ] + def search_recent_tweets(self, **kwargs) -> Tuple[Dict, bool]: """Search recent tweets""" return self.read_with_key_rotation( diff --git a/packages/valory/services/impact_evaluator/service.yaml b/packages/valory/services/impact_evaluator/service.yaml index 309e41d1..bf0eb5b1 100644 --- a/packages/valory/services/impact_evaluator/service.yaml +++ b/packages/valory/services/impact_evaluator/service.yaml @@ -132,6 +132,8 @@ extra: termination_from_block: ${TERMINATION_FROM_BLOCK:int:19717045} mech_chain_id: ${MECH_CHAIN_ID:str:gnosis} mech_interaction_sleep_time: ${MECH_INTERACTION_SLEEP_TIME:int:10} + recent_tweet_query: ${RECENT_TWEET_QUERY:str:@autonolas OR #OlasNetwork OR #StakeWithPearl -is:retweet -is:reply} + twitter_max_recent_results: ${TWITTER_MAX_RECENT_RESULTS:int:10} randomness_api: args: url: ${DRAND_ENDPOINT:str:https://drand.cloudflare.com/public/latest} @@ -185,16 +187,7 @@ extra: whitelist_api_key: null whitelist_endpoint: http://localhost points_to_image_hashes: ${POINTS_TO_IMAGE_HASHES:str:{"0":"bafybeiabtdl53v2a3irrgrg7eujzffjallpymli763wvhv6gceurfmcemm","100":"bafybeid46w6yzbehir7ackcnsyuasdkun5aq7jnckt4sknvmiewpph776q","50000":"bafybeigbxlwzljbxnlwteupmt6c6k7k2m4bbhunvxxa53dc7niuedilnr4","100000":"bafybeiawxpq4mqckbau3mjwzd3ic2o7ywlhp6zqo7jnaft26zeqm3xsjjy","150000":"bafybeie6k53dupf7rf6622rzfxu3dmlv36hytqrmzs5yrilxwcrlhrml2m"}} - twitter_api_base: ${TWITTER_API_BASE:str:https://api.twitter.com/} - twitter_api_bearer_token: ${TWITTER_API_BEARER_TOKEN:str:null} - twitter_mentions_endpoint: ${TWITTER_MENTIONS_ENDPOINT:str:2/users/1450081635559428107/mentions?} - twitter_mentions_args: ${TWITTER_MENTIONS_ARGS:str:tweet.fields=author_id&user.fields=name&expansions=author_id&max_results=25&since_id={since_id}} - twitter_max_pages: ${TWITTER_MAX_PAGES:int:1} max_tweet_pulls_allowed: ${MAX_TWEET_PULLS_ALLOWED:int:80} - twitter_search_endpoint: ${TWITTER_SEARCH_ENDPOINT:str:2/tweets/search/recent?} - twitter_search_args: ${TWITTER_SEARCH_ARGS:str:query=%23OlasNetwork&tweet.fields=author_id,created_at,conversation_id&user.fields=name&expansions=author_id&max_results=25&since_id={since_id}} - twitter_tweets_endpoint: ${TWITTER_TWEETS_ENDPOINT:str:2/users/1450081635559428107/tweets?} - twitter_tweets_args: ${TWITTER_TWEETS_ARGS:str:tweet.fields=author_id,created_at,conversation_id&user.fields=name&expansions=author_id&max_results=50&start_time={start_time}} tx_timeout: 10.0 use_termination: ${USE_TERMINATION:bool:true} validate_timeout: 1205 @@ -219,6 +212,8 @@ extra: termination_from_block: ${TERMINATION_FROM_BLOCK:int:19717045} mech_chain_id: ${MECH_CHAIN_ID:str:gnosis} mech_interaction_sleep_time: ${MECH_INTERACTION_SLEEP_TIME:int:10} + recent_tweet_query: ${RECENT_TWEET_QUERY:str:@autonolas OR #OlasNetwork OR #StakeWithPearl -is:retweet -is:reply} + twitter_max_recent_results: ${TWITTER_MAX_RECENT_RESULTS:int:10} randomness_api: args: url: ${DRAND_ENDPOINT:str:https://drand.cloudflare.com/public/latest} @@ -272,16 +267,7 @@ extra: whitelist_api_key: null whitelist_endpoint: http://localhost points_to_image_hashes: ${POINTS_TO_IMAGE_HASHES:str:{"0":"bafybeiabtdl53v2a3irrgrg7eujzffjallpymli763wvhv6gceurfmcemm","100":"bafybeid46w6yzbehir7ackcnsyuasdkun5aq7jnckt4sknvmiewpph776q","50000":"bafybeigbxlwzljbxnlwteupmt6c6k7k2m4bbhunvxxa53dc7niuedilnr4","100000":"bafybeiawxpq4mqckbau3mjwzd3ic2o7ywlhp6zqo7jnaft26zeqm3xsjjy","150000":"bafybeie6k53dupf7rf6622rzfxu3dmlv36hytqrmzs5yrilxwcrlhrml2m"}} - twitter_api_base: ${TWITTER_API_BASE:str:https://api.twitter.com/} - twitter_api_bearer_token: ${TWITTER_API_BEARER_TOKEN:str:null} - twitter_mentions_endpoint: ${TWITTER_MENTIONS_ENDPOINT:str:2/users/1450081635559428107/mentions?} - twitter_mentions_args: ${TWITTER_MENTIONS_ARGS:str:tweet.fields=author_id&user.fields=name&expansions=author_id&max_results=25&since_id={since_id}} - twitter_max_pages: ${TWITTER_MAX_PAGES:int:1} max_tweet_pulls_allowed: ${MAX_TWEET_PULLS_ALLOWED:int:80} - twitter_search_endpoint: ${TWITTER_SEARCH_ENDPOINT:str:2/tweets/search/recent?} - twitter_search_args: ${TWITTER_SEARCH_ARGS:str:query=%23OlasNetwork&tweet.fields=author_id,created_at,conversation_id&user.fields=name&expansions=author_id&max_results=25&since_id={since_id}} - twitter_tweets_endpoint: ${TWITTER_TWEETS_ENDPOINT:str:2/users/1450081635559428107/tweets?} - twitter_tweets_args: ${TWITTER_TWEETS_ARGS:str:tweet.fields=author_id,created_at,conversation_id&user.fields=name&expansions=author_id&max_results=50&start_time={start_time}} tx_timeout: 10.0 use_termination: ${USE_TERMINATION:bool:true} validate_timeout: 1205 @@ -306,6 +292,8 @@ extra: termination_from_block: ${TERMINATION_FROM_BLOCK:int:19717045} mech_chain_id: ${MECH_CHAIN_ID:str:gnosis} mech_interaction_sleep_time: ${MECH_INTERACTION_SLEEP_TIME:int:10} + recent_tweet_query: ${RECENT_TWEET_QUERY:str:@autonolas OR #OlasNetwork OR #StakeWithPearl -is:retweet -is:reply} + twitter_max_recent_results: ${TWITTER_MAX_RECENT_RESULTS:int:10} randomness_api: args: url: ${DRAND_ENDPOINT:str:https://drand.cloudflare.com/public/latest} @@ -361,16 +349,7 @@ extra: whitelist_api_key: null whitelist_endpoint: http://localhost points_to_image_hashes: ${POINTS_TO_IMAGE_HASHES:str:{"0":"bafybeiabtdl53v2a3irrgrg7eujzffjallpymli763wvhv6gceurfmcemm","100":"bafybeid46w6yzbehir7ackcnsyuasdkun5aq7jnckt4sknvmiewpph776q","50000":"bafybeigbxlwzljbxnlwteupmt6c6k7k2m4bbhunvxxa53dc7niuedilnr4","100000":"bafybeiawxpq4mqckbau3mjwzd3ic2o7ywlhp6zqo7jnaft26zeqm3xsjjy","150000":"bafybeie6k53dupf7rf6622rzfxu3dmlv36hytqrmzs5yrilxwcrlhrml2m"}} - twitter_api_base: ${TWITTER_API_BASE:str:https://api.twitter.com/} - twitter_api_bearer_token: ${TWITTER_API_BEARER_TOKEN:str:null} - twitter_mentions_endpoint: ${TWITTER_MENTIONS_ENDPOINT:str:2/users/1450081635559428107/mentions?} - twitter_mentions_args: ${TWITTER_MENTIONS_ARGS:str:tweet.fields=author_id&user.fields=name&expansions=author_id&max_results=25&since_id={since_id}} - twitter_max_pages: ${TWITTER_MAX_PAGES:int:1} max_tweet_pulls_allowed: ${MAX_TWEET_PULLS_ALLOWED:int:80} - twitter_search_endpoint: ${TWITTER_SEARCH_ENDPOINT:str:2/tweets/search/recent?} - twitter_search_args: ${TWITTER_SEARCH_ARGS:str:query=%23OlasNetwork&tweet.fields=author_id,created_at,conversation_id&user.fields=name&expansions=author_id&max_results=25&since_id={since_id}} - twitter_tweets_endpoint: ${TWITTER_TWEETS_ENDPOINT:str:2/users/1450081635559428107/tweets?} - twitter_tweets_args: ${TWITTER_TWEETS_ARGS:str:tweet.fields=author_id,created_at,conversation_id&user.fields=name&expansions=author_id&max_results=50&start_time={start_time}} tx_timeout: 10.0 use_termination: ${USE_TERMINATION:bool:true} validate_timeout: 1205 @@ -397,6 +376,8 @@ extra: termination_from_block: ${TERMINATION_FROM_BLOCK:int:19717045} mech_chain_id: ${MECH_CHAIN_ID:str:gnosis} mech_interaction_sleep_time: ${MECH_INTERACTION_SLEEP_TIME:int:10} + recent_tweet_query: ${RECENT_TWEET_QUERY:str:@autonolas OR #OlasNetwork OR #StakeWithPearl -is:retweet -is:reply} + twitter_max_recent_results: ${TWITTER_MAX_RECENT_RESULTS:int:10} randomness_api: args: url: ${DRAND_ENDPOINT:str:https://drand.cloudflare.com/public/latest} diff --git a/packages/valory/services/impact_evaluator_local/service.yaml b/packages/valory/services/impact_evaluator_local/service.yaml index fb9bd071..df7d3174 100644 --- a/packages/valory/services/impact_evaluator_local/service.yaml +++ b/packages/valory/services/impact_evaluator_local/service.yaml @@ -97,16 +97,7 @@ models: whitelist_api_key: null whitelist_endpoint: http://localhost points_to_image_hashes: ${POINTS_TO_IMAGE_HASHES:str:null} - twitter_api_base: ${TWITTER_API_BASE:str:https://api.twitter.com/} - twitter_api_bearer_token: ${TWITTER_API_BEARER_TOKEN:str:null} - twitter_mentions_endpoint: ${TWITTER_MENTIONS_ENDPOINT:str:2/users/1450081635559428107/mentions?} - twitter_mentions_args: ${TWITTER_MENTIONS_ARGS:str:tweet.fields=author_id&user.fields=name&expansions=author_id&max_results=25&since_id={since_id}} - twitter_max_pages: ${TWITTER_MAX_PAGES:int:1} max_tweet_pulls_allowed: ${MAX_TWEET_PULLS_ALLOWED:int:80} - twitter_search_endpoint: ${TWITTER_SEARCH_ENDPOINT:str:2/tweets/search/recent?} - twitter_search_args: ${TWITTER_SEARCH_ARGS:str:query=%23OlasNetwork&tweet.fields=author_id,created_at,conversation_id&user.fields=name&expansions=author_id&max_results=25&since_id={since_id}} - twitter_tweets_endpoint: ${TWITTER_TWEETS_ENDPOINT:str:2/users/1450081635559428107/tweets?} - twitter_tweets_args: ${TWITTER_TWEETS_ARGS:str:tweet.fields=author_id,created_at,conversation_id&user.fields=name&expansions=author_id&max_results=50&start_time={start_time}} tx_timeout: 10.0 use_termination: ${USE_TERMINATION:bool:true} validate_timeout: 1205 @@ -126,6 +117,8 @@ models: termination_from_block: ${TERMINATION_FROM_BLOCK:int:0} mech_chain_id: ${MECH_CHAIN_ID:str:gnosis} mech_interaction_sleep_time: ${MECH_INTERACTION_SLEEP_TIME:int:10} + recent_tweet_query: ${RECENT_TWEET_QUERY:str:@autonolas OR #OlasNetwork OR #StakeWithPearl -is:retweet -is:reply} + twitter_max_recent_results: ${TWITTER_MAX_RECENT_RESULTS:int:10} randomness_api: args: url: ${DRAND_ENDPOINT:str:https://drand.cloudflare.com/public/latest} diff --git a/packages/valory/skills/impact_evaluator_abci/fsm_specification.yaml b/packages/valory/skills/impact_evaluator_abci/fsm_specification.yaml index 1cfabc5d..ad07bee2 100644 --- a/packages/valory/skills/impact_evaluator_abci/fsm_specification.yaml +++ b/packages/valory/skills/impact_evaluator_abci/fsm_specification.yaml @@ -36,8 +36,6 @@ alphabet_in: - READ_MANUAL_POINTS - RESET_AND_PAUSE_TIMEOUT - RESET_TIMEOUT -- RETRIEVE_HASHTAGS -- RETRIEVE_MENTIONS - RETRIEVE_TWEETS - RETRY - ROUND_TIMEOUT @@ -47,6 +45,7 @@ alphabet_in: - SKIP_EVALUATION - SKIP_REQUEST - SUSPICIOUS_ACTIVITY +- TWEET_COLLECTION - TWEET_EVALUATION_ROUND_TIMEOUT - TWEET_VALIDATION - UPDATE_CENTAURS @@ -97,9 +96,8 @@ states: - StreamWriteRound - SynchronizeLateMessagesRound - TokenTrackRound +- TweetCollectionRound - TwitterDecisionMakingRound -- TwitterHashtagsCollectionRound -- TwitterMentionsCollectionRound - TwitterRandomnessRound - TwitterSelectKeepersRound - TwitterWriteRound @@ -256,27 +254,20 @@ transition_func: (TokenTrackRound, DONE): DecisionMakingRound (TokenTrackRound, NO_MAJORITY): TokenTrackRound (TokenTrackRound, ROUND_TIMEOUT): TokenTrackRound + (TweetCollectionRound, API_ERROR): TweetCollectionRound + (TweetCollectionRound, DONE): TwitterDecisionMakingRound + (TweetCollectionRound, DONE_API_LIMITS): TwitterDecisionMakingRound + (TweetCollectionRound, DONE_MAX_RETRIES): TwitterDecisionMakingRound + (TweetCollectionRound, NO_MAJORITY): TwitterRandomnessRound + (TweetCollectionRound, ROUND_TIMEOUT): TwitterRandomnessRound (TwitterDecisionMakingRound, DB_UPDATE): DBUpdateRound (TwitterDecisionMakingRound, DONE): TokenTrackRound (TwitterDecisionMakingRound, NO_MAJORITY): TwitterDecisionMakingRound (TwitterDecisionMakingRound, POST_MECH): PostMechRequestRound (TwitterDecisionMakingRound, PRE_MECH): PreMechRequestRound - (TwitterDecisionMakingRound, RETRIEVE_HASHTAGS): TwitterHashtagsCollectionRound - (TwitterDecisionMakingRound, RETRIEVE_MENTIONS): TwitterMentionsCollectionRound (TwitterDecisionMakingRound, ROUND_TIMEOUT): TwitterDecisionMakingRound (TwitterDecisionMakingRound, SELECT_KEEPERS): TwitterRandomnessRound - (TwitterHashtagsCollectionRound, API_ERROR): TwitterHashtagsCollectionRound - (TwitterHashtagsCollectionRound, DONE): TwitterDecisionMakingRound - (TwitterHashtagsCollectionRound, DONE_API_LIMITS): TwitterDecisionMakingRound - (TwitterHashtagsCollectionRound, DONE_MAX_RETRIES): TwitterDecisionMakingRound - (TwitterHashtagsCollectionRound, NO_MAJORITY): TwitterRandomnessRound - (TwitterHashtagsCollectionRound, ROUND_TIMEOUT): TwitterRandomnessRound - (TwitterMentionsCollectionRound, API_ERROR): TwitterMentionsCollectionRound - (TwitterMentionsCollectionRound, DONE): TwitterDecisionMakingRound - (TwitterMentionsCollectionRound, DONE_API_LIMITS): TwitterDecisionMakingRound - (TwitterMentionsCollectionRound, DONE_MAX_RETRIES): TwitterDecisionMakingRound - (TwitterMentionsCollectionRound, NO_MAJORITY): TwitterRandomnessRound - (TwitterMentionsCollectionRound, ROUND_TIMEOUT): TwitterRandomnessRound + (TwitterDecisionMakingRound, TWEET_COLLECTION): TweetCollectionRound (TwitterRandomnessRound, DONE): TwitterSelectKeepersRound (TwitterRandomnessRound, NO_MAJORITY): TwitterRandomnessRound (TwitterRandomnessRound, ROUND_TIMEOUT): TwitterRandomnessRound diff --git a/packages/valory/skills/impact_evaluator_abci/skill.yaml b/packages/valory/skills/impact_evaluator_abci/skill.yaml index 42b31893..fa14cb00 100644 --- a/packages/valory/skills/impact_evaluator_abci/skill.yaml +++ b/packages/valory/skills/impact_evaluator_abci/skill.yaml @@ -163,16 +163,7 @@ models: tendermint_p2p_url: localhost:26656 tendermint_url: http://localhost:26657 token_uri_base: https://pfp.staging.autonolas.tech/ - twitter_api_base: https://api.twitter.com/ - twitter_api_bearer_token: - twitter_mentions_endpoint: 2/users/1450081635559428107/mentions? - twitter_mentions_args: tweet.fields=author_id&user.fields=name&expansions=author_id&max_results={max_results}&since_id={since_id} - twitter_max_pages: 1 max_tweet_pulls_allowed: 80 - twitter_search_endpoint: 2/tweets/search/recent? - twitter_search_args: query=%23olas&tweet.fields=author_id,created_at,conversation_id&user.fields=name&expansions=author_id&max_results={max_results}&since_id={since_id} - twitter_tweets_endpoint: 2/users/1450081635559428107/tweets? - twitter_tweets_args: tweet.fields=author_id,created_at,conversation_id&user.fields=name&expansions=author_id&max_results=50&start_time={start_time} openai_call_window_size: 3600.0 openai_calls_allowed_in_window: 100 max_points_per_period: 900 @@ -199,6 +190,8 @@ models: tweet_consensus_veolas: 2000000 mech_chain_id: gnosis mech_interaction_sleep_time: 10 + recent_tweet_query: '@autonolas OR #OlasNetwork OR #StakeWithPearl -is:retweet -is:reply' + twitter_max_recent_results: 10 class_name: Params randomness_api: args: diff --git a/packages/valory/skills/twitter_scoring_abci/behaviours.py b/packages/valory/skills/twitter_scoring_abci/behaviours.py index e27ba78f..07614dc8 100644 --- a/packages/valory/skills/twitter_scoring_abci/behaviours.py +++ b/packages/valory/skills/twitter_scoring_abci/behaviours.py @@ -28,14 +28,21 @@ from datetime import datetime from typing import Dict, Generator, List, Optional, Set, Tuple, Type, cast +from aea.protocols.base import Message from web3 import Web3 +from packages.valory.connections.tweepy.connection import ( + PUBLIC_ID as TWEEPY_CONNECTION_PUBLIC_ID, +) +from packages.valory.protocols.srr.dialogues import SrrDialogue, SrrDialogues +from packages.valory.protocols.srr.message import SrrMessage from packages.valory.skills.abstract_round_abci.base import AbstractRound from packages.valory.skills.abstract_round_abci.behaviours import ( AbstractRoundBehaviour, BaseBehaviour, ) from packages.valory.skills.abstract_round_abci.common import RandomnessBehaviour +from packages.valory.skills.abstract_round_abci.models import Requests from packages.valory.skills.twitter_scoring_abci.models import ( OpenAICalls, Params, @@ -45,9 +52,8 @@ DBUpdatePayload, PostMechRequestPayload, PreMechRequestPayload, + TweetCollectionPayload, TwitterDecisionMakingPayload, - TwitterHashtagsCollectionPayload, - TwitterMentionsCollectionPayload, TwitterRandomnessPayload, TwitterSelectKeepersPayload, ) @@ -55,15 +61,14 @@ from packages.valory.skills.twitter_scoring_abci.rounds import ( DBUpdateRound, ERROR_API_LIMITS, - ERROR_GENERIC, + ERROR_TWEEPY_CONNECTION, Event, MechMetadata, PostMechRequestRound, PreMechRequestRound, SynchronizedData, + TweetCollectionRound, TwitterDecisionMakingRound, - TwitterHashtagsCollectionRound, - TwitterMentionsCollectionRound, TwitterRandomnessRound, TwitterScoringAbciApp, TwitterSelectKeepersRound, @@ -76,9 +81,6 @@ DEFAULT_TWEET_POINTS = 100 TWEET_QUALITY_TO_POINTS = {"LOW": 1, "AVERAGE": 2, "HIGH": 3} TWEET_RELATIONSHIP_TO_POINTS = {"LOW": 100, "AVERAGE": 200, "HIGH": 300} -HTTP_OK = 200 -HTTP_TOO_MANY_REQUESTS = 429 -RETWEET_START = "RT @" def extract_headers(header_str: str) -> dict: @@ -156,6 +158,38 @@ def _check_twitter_limits(self) -> Tuple: # Window has not expired and we have not reached the max number of tweets return False, number_of_tweets_pulled_today, last_tweet_pull_window_reset + def _do_connection_request( + self, + message: Message, + dialogue: Message, + timeout: Optional[float] = None, + ) -> Generator[None, None, Message]: + """Do a request and wait the response, asynchronously.""" + + self.context.outbox.put_message(message=message) + request_nonce = self._get_request_nonce_from_dialogue(dialogue) # type: ignore + cast(Requests, self.context.requests).request_id_to_callback[ + request_nonce + ] = self.get_callback_request() + response = yield from self.wait_for_message(timeout=timeout) + return response + + def _call_tweepy( + self, + **kwargs, + ) -> Generator[None, None, Dict]: + """Send a request message from the skill context.""" + srr_dialogues = cast(SrrDialogues, self.context.srr_dialogues) + srr_message, srr_dialogue = srr_dialogues.create( + counterparty=str(TWEEPY_CONNECTION_PUBLIC_ID), + performative=SrrMessage.Performative.REQUEST, + payload=json.dumps(**kwargs), + ) + srr_message = cast(SrrMessage, srr_message) + srr_dialogue = cast(SrrDialogue, srr_dialogue) + response = yield from self._do_connection_request(srr_message, srr_dialogue) # type: ignore + return json.loads(response.payload) # type: ignore + class TwitterRandomnessBehaviour(RandomnessBehaviour): """Retrieve randomness.""" @@ -285,11 +319,8 @@ def get_next_event(self) -> str: if Event.SELECT_KEEPERS.value not in performed_tasks: return Event.SELECT_KEEPERS.value - if Event.RETRIEVE_HASHTAGS.value not in performed_tasks: - return Event.RETRIEVE_HASHTAGS.value - - if Event.RETRIEVE_MENTIONS.value not in performed_tasks: - return Event.RETRIEVE_MENTIONS.value + if Event.TWEET_COLLECTION.value not in performed_tasks: + return Event.TWEET_COLLECTION.value if Event.PRE_MECH.value not in performed_tasks: return Event.PRE_MECH.value @@ -303,17 +334,10 @@ def get_next_event(self) -> str: return Event.DONE.value -class TwitterMentionsCollectionBehaviour(TwitterScoringBaseBehaviour): - """TwitterMentionsCollectionBehaviour""" +class TweetCollectionBehaviour(TwitterScoringBaseBehaviour): + """TweetCollectionBehaviour""" - matching_round: Type[AbstractRound] = TwitterMentionsCollectionRound - - def _i_am_not_sending(self) -> bool: - """Indicates if the current agent is one of the sender or not.""" - return ( - self.context.agent_address - not in self.synchronized_data.most_voted_keeper_addresses - ) + matching_round: Type[AbstractRound] = TweetCollectionRound def async_act(self) -> Generator[None, None, None]: """ @@ -329,246 +353,6 @@ def async_act(self) -> Generator[None, None, None]: else: yield from self._sender_act() - def _not_sender_act(self) -> Generator: - """Do the non-sender action.""" - with self.context.benchmark_tool.measure(self.behaviour_id).consensus(): - self.context.logger.info( - f"Waiting for the keeper to do its keeping: keepers={self.synchronized_data.most_voted_keeper_addresses}, me={self.context.agent_address}" - ) - yield from self.wait_until_round_end() - self.set_done() - - def _sender_act(self) -> Generator: - """Do the act, supporting asynchronous execution.""" - - with self.context.benchmark_tool.measure(self.behaviour_id).local(): - self.context.logger.info("I am a keeper") - - ( - has_limit_reached, - number_of_tweets_pulled_today, - last_tweet_pull_window_reset, - ) = self._check_twitter_limits() - - if has_limit_reached: - self.context.logger.info( - "Cannot retrieve tweets, max number of tweets reached for today or 15-min request amount reached" - ) - payload_data = { - "tweets": None, - "error": ERROR_API_LIMITS, - "latest_mention_tweet_id": None, - "number_of_tweets_pulled_today": number_of_tweets_pulled_today, - "sleep_until": self.synchronized_data.sleep_until, - } - - else: - # Get mentions from Twitter - payload_data = yield from self._get_twitter_mentions( - number_of_tweets_pulled_today=number_of_tweets_pulled_today - ) - - payload_data["last_tweet_pull_window_reset"] = last_tweet_pull_window_reset - sender = self.context.agent_address - payload = TwitterMentionsCollectionPayload( - sender=sender, content=json.dumps(payload_data, sort_keys=True) - ) - - with self.context.benchmark_tool.measure(self.behaviour_id).consensus(): - yield from self.send_a2a_transaction(payload) - yield from self.wait_until_round_end() - - self.set_done() - - def _get_twitter_mentions( - self, - number_of_tweets_pulled_today: int, - ) -> Generator[None, None, Dict]: - """Get Twitter mentions""" - - api_base = self.params.twitter_api_base - api_endpoint = self.params.twitter_mentions_endpoint - try: - latest_mention_tweet_id = int( - self.context.ceramic_db["module_data"]["twitter"][ - "latest_mention_tweet_id" - ] - ) - except KeyError: - latest_mention_tweet_id = 0 - - number_of_tweets_remaining_today = ( - self.params.max_tweet_pulls_allowed - number_of_tweets_pulled_today - ) - if number_of_tweets_remaining_today <= 0: - self.context.logger.info( - "Cannot retrieve twitter mentions, max number of tweets reached for today" - ) - return { - "tweets": None, - "error": ERROR_API_LIMITS, - "latest_mention_tweet_id": None, - "number_of_tweets_pulled_today": number_of_tweets_pulled_today, - "sleep_until": self.synchronized_data.sleep_until, - } - - next_tweet_id = ( - int(latest_mention_tweet_id) + 1 if int(latest_mention_tweet_id) != 0 else 0 - ) - api_args = self.params.twitter_mentions_args.replace( - "{since_id}", str(next_tweet_id) - ) - api_args = api_args.replace( - "{max_results}", str(number_of_tweets_remaining_today) - ) - api_url = api_base + api_endpoint + api_args - headers = dict(Authorization=f"Bearer {self.params.twitter_api_bearer_token}") - - self.context.logger.info( - f"Retrieving mentions from Twitter API [{api_url}]\nBearer token {self.params.twitter_api_bearer_token[:5]}*******{self.params.twitter_api_bearer_token[-5:]}" - ) - - tweets = {} - next_token = None - latest_tweet_id = None - - # Pagination loop: we read a max of pages each period - # Each page contains 100 tweets. The default value for twitter_max_pages is 10 - for _ in range(self.params.twitter_max_pages): - self.context.logger.info( - f"Retrieving a new page. max_pages={self.params.twitter_max_pages}" - ) - url = api_url - # Add the pagination token if it exists - if next_token: - url += f"&pagination_token={next_token}" - - # Make the request - response = yield from self.get_http_response( - method="GET", url=url, headers=headers - ) - - # Check response status - if response.status_code != 200: - header_dict = extract_headers(response.headers) - - remaining, limit, reset_ts = [ - header_dict.get(header, "?") - for header in [ - "x-rate-limit-remaining", - "x-rate-limit-limit", - "x-rate-limit-reset", - ] - ] - reset = ( - datetime.fromtimestamp(int(reset_ts)).strftime("%Y-%m-%d %H:%M:%S") - if reset_ts != "?" - else None - ) - - self.context.logger.error( - f"Error retrieving mentions from Twitter [{response.status_code}]: {response.body}" - f"API limits: {remaining}/{limit}. Window reset: {reset}" - ) - - return { - "tweets": None, - "error": ERROR_API_LIMITS - if response.status_code == HTTP_TOO_MANY_REQUESTS - else ERROR_GENERIC, - "latest_mention_tweet_id": None, - "number_of_tweets_pulled_today": number_of_tweets_pulled_today, - "sleep_until": reset_ts - if response.status_code == HTTP_TOO_MANY_REQUESTS - else self.synchronized_data.sleep_until, - } - - api_data = json.loads(response.body) - - # Check the meta field - if "meta" not in api_data: - self.context.logger.error( - f"Twitter API response does not contain the required 'meta' field: {api_data!r}" - ) - return { - "tweets": None, - "error": ERROR_GENERIC, - "latest_mention_tweet_id": None, - "number_of_tweets_pulled_today": number_of_tweets_pulled_today, - "sleep_until": None, # we reset this on a successful request - } - - # Check if there are no more results - if ( - "result_count" in api_data["meta"] - and int(api_data["meta"]["result_count"]) == 0 - ): - break - - # Check that the data exists - if "data" not in api_data or "newest_id" not in api_data["meta"]: - self.context.logger.error( - f"Twitter API response does not contain the required 'meta' field: {api_data!r}" - ) - return { - "tweets": None, - "error": ERROR_GENERIC, - "latest_mention_tweet_id": None, - "number_of_tweets_pulled_today": number_of_tweets_pulled_today, - "sleep_until": None, # we reset this on a successful request - } - - if "includes" not in api_data or "users" not in api_data["includes"]: - self.context.logger.error( - f"Twitter API response does not contain the required 'includes/users' field: {api_data!r}" - ) - return { - "tweets": None, - "error": ERROR_GENERIC, - "latest_mention_tweet_id": None, - "number_of_tweets_pulled_today": number_of_tweets_pulled_today, - "sleep_until": None, # we reset this on a successful request - } - - # Add the retrieved tweets - for tweet in api_data["data"]: - # Skip retweets - if tweet["text"].startswith(RETWEET_START): - continue - - tweets[tweet["id"]] = tweet - - # Set the author handle - for user in api_data["includes"]["users"]: - if user["id"] == tweet["author_id"]: - tweets[tweet["id"]]["username"] = user["username"] - break - number_of_tweets_pulled_today += 1 - latest_tweet_id = int(api_data["meta"]["newest_id"]) - - if "next_token" in api_data["meta"]: - next_token = api_data["meta"]["next_token"] - continue - - break - - self.context.logger.info( - f"Got {len(tweets)} new mentions until tweet_id={latest_tweet_id}: {tweets.keys()}" - ) - - return { - "tweets": tweets, - "latest_mention_tweet_id": latest_tweet_id, - "number_of_tweets_pulled_today": number_of_tweets_pulled_today, - "sleep_until": None, # we reset this on a successful request - } - - -class TwitterHashtagsCollectionBehaviour(TwitterScoringBaseBehaviour): - """TwitterHashtagsCollectionBehaviour""" - - matching_round: Type[AbstractRound] = TwitterHashtagsCollectionRound - def _i_am_not_sending(self) -> bool: """Indicates if the current agent is one of the sender or not.""" return ( @@ -576,20 +360,6 @@ def _i_am_not_sending(self) -> bool: not in self.synchronized_data.most_voted_keeper_addresses ) - def async_act(self) -> Generator[None, None, None]: - """ - Do the action. - - Steps: - - If the agent is the keeper, then prepare the transaction and send it. - - Otherwise, wait until the next round. - - If a timeout is hit, set exit A event, otherwise set done event. - """ - if self._i_am_not_sending(): - yield from self._not_sender_act() - else: - yield from self._sender_act() - def _not_sender_act(self) -> Generator: """Do the non-sender action.""" with self.context.benchmark_tool.measure(self.behaviour_id).consensus(): @@ -624,13 +394,13 @@ def _sender_act(self) -> Generator: else: # Get hashtags from Twitter - payload_data = yield from self._get_twitter_hashtag_search( + payload_data = yield from self._get_recent_tweets( number_of_tweets_pulled_today=number_of_tweets_pulled_today ) payload_data["last_tweet_pull_window_reset"] = last_tweet_pull_window_reset sender = self.context.agent_address - payload = TwitterHashtagsCollectionPayload( + payload = TweetCollectionPayload( sender=sender, content=json.dumps(payload_data, sort_keys=True) ) @@ -640,14 +410,13 @@ def _sender_act(self) -> Generator: self.set_done() - def _get_twitter_hashtag_search( + def _get_recent_tweets( self, number_of_tweets_pulled_today: int, ) -> Generator[None, None, Dict]: """Get registrations from Twitter""" - api_base = self.params.twitter_api_base - api_endpoint = self.params.twitter_search_endpoint + # Read the latest tweet id from the previous period try: latest_hashtag_tweet_id = int( self.context.ceramic_db["module_data"]["twitter"][ @@ -657,162 +426,41 @@ def _get_twitter_hashtag_search( except KeyError: latest_hashtag_tweet_id = 0 - number_of_tweets_remaining_today = ( - self.params.max_tweet_pulls_allowed - number_of_tweets_pulled_today + next_tweet_id = ( + int(latest_hashtag_tweet_id) + 1 if int(latest_hashtag_tweet_id) != 0 else 0 ) - if number_of_tweets_remaining_today <= 0: - self.context.logger.info( - "Cannot retrieve hashtag mentions, max number of tweets reached for today" - ) + + recent_tweet_query = self.params.recent_tweet_query + self.context.logger.info( + f"Searching recent tweets since id={next_tweet_id}. Query={recent_tweet_query}" + ) + + # Call Tweepy conection + response = yield from self._call_tweepy( + action="search_recent_tweets", + kwargs={ + "query": recent_tweet_query, + "since_id": next_tweet_id, + "max_results": self.params.twitter_max_recent_results, + }, + ) + # Check response + if "error" in response: return { "tweets": None, - "error": ERROR_API_LIMITS, + "error": ERROR_TWEEPY_CONNECTION, "latest_mention_tweet_id": None, "number_of_tweets_pulled_today": number_of_tweets_pulled_today, "sleep_until": self.synchronized_data.sleep_until, } - next_tweet_id = ( - int(latest_hashtag_tweet_id) + 1 if int(latest_hashtag_tweet_id) != 0 else 0 - ) - api_args = self.params.twitter_search_args.replace( - "{since_id}", str(next_tweet_id) - ) - api_args = api_args.replace( - "{max_results}", str(number_of_tweets_remaining_today) - ) - api_url = api_base + api_endpoint + api_args - headers = dict(Authorization=f"Bearer {self.params.twitter_api_bearer_token}") - - self.context.logger.info(f"Retrieving hashtags from Twitter API [{api_url}]") - - next_token = None - latest_tweet_id = None - retrieved_tweets = 0 - tweets = {} - # Pagination loop: we read a max of pages each period - # Each page contains 100 tweets. The default value for twitter_max_pages is 10 - for _ in range(self.params.twitter_max_pages): - self.context.logger.info( - f"Retrieving a new page. max_pages={self.params.twitter_max_pages}" - ) - - url = api_url - - # Add the pagination token if it exists - if next_token: - url += f"&pagination_token={next_token}" - - # Make the request - response = yield from self.get_http_response( - method="GET", url=url, headers=headers - ) - - # Check response status - if response.status_code != 200: - header_dict = extract_headers(response.headers) - - remaining, limit, reset_ts = [ - header_dict.get(header, "?") - for header in [ - "x-rate-limit-remaining", - "x-rate-limit-limit", - "x-rate-limit-reset", - ] - ] - reset = ( - datetime.fromtimestamp(int(reset_ts)).strftime("%Y-%m-%d %H:%M:%S") - if reset_ts != "?" - else None - ) - - self.context.logger.error( - f"Error retrieving hashtags from Twitter [{response.status_code}]: {response.body}" - f"API limits: {remaining}/{limit}. Window reset: {reset}" - ) - - return { - "tweets": None, - "error": ERROR_API_LIMITS - if response.status_code == HTTP_TOO_MANY_REQUESTS - else ERROR_GENERIC, - "latest_mention_tweet_id": None, - "number_of_tweets_pulled_today": number_of_tweets_pulled_today, - "sleep_until": reset_ts - if response.status_code == HTTP_TOO_MANY_REQUESTS - else self.synchronized_data.sleep_until, - } - - api_data = json.loads(response.body) - - # Check the meta field - if "meta" not in api_data: - self.context.logger.error( - f"Twitter API response does not contain the required 'meta' field: {api_data!r}" - ) - return { - "tweets": None, - "error": ERROR_GENERIC, - "latest_mention_tweet_id": None, - "number_of_tweets_pulled_today": number_of_tweets_pulled_today, - "sleep_until": None, # we reset this on a successful request - } - - # Check if there are no more results - if ( - "result_count" in api_data["meta"] - and int(api_data["meta"]["result_count"]) == 0 - ): - break - - # Check that the data exists - if "data" not in api_data or "newest_id" not in api_data["meta"]: - self.context.logger.error( - f"Twitter API response does not contain the required 'meta' field: {api_data!r}" - ) - return { - "tweets": None, - "error": ERROR_GENERIC, - "latest_mention_tweet_id": None, - "number_of_tweets_pulled_today": number_of_tweets_pulled_today, - "sleep_until": None, # we reset this on a successful request - } - - if "includes" not in api_data or "users" not in api_data["includes"]: - self.context.logger.error( - f"Twitter API response does not contain the required 'includes/users' field: {api_data!r}" - ) - return { - "tweets": None, - "error": ERROR_GENERIC, - "latest_mention_tweet_id": None, - "number_of_tweets_pulled_today": number_of_tweets_pulled_today, - "sleep_until": None, # we reset this on a successful request - } - - # Add the retrieved tweets - for tweet in api_data["data"]: - # Skip retweets - if tweet["text"].startswith(RETWEET_START): - continue - - retrieved_tweets += 1 - if tweet["id"] not in tweets: # avoids duplicated tweets - tweets[tweet["id"]] = tweet - - # Set the author handle - for user in api_data["includes"]["users"]: - if user["id"] == tweet["author_id"]: - tweets[tweet["id"]]["username"] = user["username"] - break - number_of_tweets_pulled_today += 1 - latest_tweet_id = int(api_data["meta"]["newest_id"]) - - if "next_token" in api_data["meta"]: - next_token = api_data["meta"]["next_token"] - continue - - break + # Process tweets + tweets = {t["id"]: t for t in response["tweets"]} + retrieved_tweets = len(response["tweets"]) + number_of_tweets_pulled_today += retrieved_tweets + latest_tweet_id = response["tweets"][ + 0 + ].id # tweepy sorts by most recent first by default self.context.logger.info( f"Got {retrieved_tweets} new hashtag tweets until tweet_id={latest_tweet_id}: {tweets.keys()}" diff --git a/packages/valory/skills/twitter_scoring_abci/fsm_specification.yaml b/packages/valory/skills/twitter_scoring_abci/fsm_specification.yaml index d5d6dffd..2ce6b617 100644 --- a/packages/valory/skills/twitter_scoring_abci/fsm_specification.yaml +++ b/packages/valory/skills/twitter_scoring_abci/fsm_specification.yaml @@ -7,11 +7,10 @@ alphabet_in: - NO_MAJORITY - POST_MECH - PRE_MECH -- RETRIEVE_HASHTAGS -- RETRIEVE_MENTIONS - ROUND_TIMEOUT - SELECT_KEEPERS - SKIP_EVALUATION +- TWEET_COLLECTION default_start_state: TwitterDecisionMakingRound final_states: - FinishedTwitterCollectionRound @@ -25,9 +24,8 @@ states: - FinishedTwitterScoringRound - PostMechRequestRound - PreMechRequestRound +- TweetCollectionRound - TwitterDecisionMakingRound -- TwitterHashtagsCollectionRound -- TwitterMentionsCollectionRound - TwitterRandomnessRound - TwitterSelectKeepersRound transition_func: @@ -41,27 +39,20 @@ transition_func: (PreMechRequestRound, NO_MAJORITY): PreMechRequestRound (PreMechRequestRound, ROUND_TIMEOUT): PreMechRequestRound (PreMechRequestRound, SKIP_EVALUATION): FinishedTwitterScoringRound + (TweetCollectionRound, API_ERROR): TweetCollectionRound + (TweetCollectionRound, DONE): TwitterDecisionMakingRound + (TweetCollectionRound, DONE_API_LIMITS): TwitterDecisionMakingRound + (TweetCollectionRound, DONE_MAX_RETRIES): TwitterDecisionMakingRound + (TweetCollectionRound, NO_MAJORITY): TwitterRandomnessRound + (TweetCollectionRound, ROUND_TIMEOUT): TwitterRandomnessRound (TwitterDecisionMakingRound, DB_UPDATE): DBUpdateRound (TwitterDecisionMakingRound, DONE): FinishedTwitterScoringRound (TwitterDecisionMakingRound, NO_MAJORITY): TwitterDecisionMakingRound (TwitterDecisionMakingRound, POST_MECH): PostMechRequestRound (TwitterDecisionMakingRound, PRE_MECH): PreMechRequestRound - (TwitterDecisionMakingRound, RETRIEVE_HASHTAGS): TwitterHashtagsCollectionRound - (TwitterDecisionMakingRound, RETRIEVE_MENTIONS): TwitterMentionsCollectionRound (TwitterDecisionMakingRound, ROUND_TIMEOUT): TwitterDecisionMakingRound (TwitterDecisionMakingRound, SELECT_KEEPERS): TwitterRandomnessRound - (TwitterHashtagsCollectionRound, API_ERROR): TwitterHashtagsCollectionRound - (TwitterHashtagsCollectionRound, DONE): TwitterDecisionMakingRound - (TwitterHashtagsCollectionRound, DONE_API_LIMITS): TwitterDecisionMakingRound - (TwitterHashtagsCollectionRound, DONE_MAX_RETRIES): TwitterDecisionMakingRound - (TwitterHashtagsCollectionRound, NO_MAJORITY): TwitterRandomnessRound - (TwitterHashtagsCollectionRound, ROUND_TIMEOUT): TwitterRandomnessRound - (TwitterMentionsCollectionRound, API_ERROR): TwitterMentionsCollectionRound - (TwitterMentionsCollectionRound, DONE): TwitterDecisionMakingRound - (TwitterMentionsCollectionRound, DONE_API_LIMITS): TwitterDecisionMakingRound - (TwitterMentionsCollectionRound, DONE_MAX_RETRIES): TwitterDecisionMakingRound - (TwitterMentionsCollectionRound, NO_MAJORITY): TwitterRandomnessRound - (TwitterMentionsCollectionRound, ROUND_TIMEOUT): TwitterRandomnessRound + (TwitterDecisionMakingRound, TWEET_COLLECTION): TweetCollectionRound (TwitterRandomnessRound, DONE): TwitterSelectKeepersRound (TwitterRandomnessRound, NO_MAJORITY): TwitterRandomnessRound (TwitterRandomnessRound, ROUND_TIMEOUT): TwitterRandomnessRound diff --git a/packages/valory/skills/twitter_scoring_abci/models.py b/packages/valory/skills/twitter_scoring_abci/models.py index fcce43d3..aa7c8917 100644 --- a/packages/valory/skills/twitter_scoring_abci/models.py +++ b/packages/valory/skills/twitter_scoring_abci/models.py @@ -105,6 +105,8 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: openai_call_window_size=self.openai_call_window_size, openai_calls_allowed_in_window=self.openai_calls_allowed_in_window, ) + self.recent_tweet_query = kwargs.get("recent_tweet_query") + self.twitter_max_recent_results = kwargs.get("twitter_max_recent_results") super().__init__(*args, **kwargs) diff --git a/packages/valory/skills/twitter_scoring_abci/payloads.py b/packages/valory/skills/twitter_scoring_abci/payloads.py index a9c4c6e6..63876706 100644 --- a/packages/valory/skills/twitter_scoring_abci/payloads.py +++ b/packages/valory/skills/twitter_scoring_abci/payloads.py @@ -25,15 +25,8 @@ @dataclass(frozen=True) -class TwitterMentionsCollectionPayload(BaseTxPayload): - """Represent a transaction payload for the TwitterMentionsCollectionRound.""" - - content: str - - -@dataclass(frozen=True) -class TwitterHashtagsCollectionPayload(BaseTxPayload): - """Represent a transaction payload for the TwitterHashtagsCollectionRound.""" +class TweetCollectionPayload(BaseTxPayload): + """Represent a transaction payload for the TweetCollectionRound.""" content: str diff --git a/packages/valory/skills/twitter_scoring_abci/rounds.py b/packages/valory/skills/twitter_scoring_abci/rounds.py index b61e3bc0..c3d703f5 100644 --- a/packages/valory/skills/twitter_scoring_abci/rounds.py +++ b/packages/valory/skills/twitter_scoring_abci/rounds.py @@ -40,17 +40,16 @@ DBUpdatePayload, PostMechRequestPayload, PreMechRequestPayload, + TweetCollectionPayload, TwitterDecisionMakingPayload, - TwitterHashtagsCollectionPayload, - TwitterMentionsCollectionPayload, TwitterRandomnessPayload, TwitterSelectKeepersPayload, ) MAX_API_RETRIES = 2 -ERROR_GENERIC = "generic" -ERROR_API_LIMITS = "too many requests" +ERROR_TWEEPY_CONNECTION = "tweepy connection" +ERROR_API_LIMITS = "too many requests or allowace per day reached" class DataclassEncoder(json.JSONEncoder): @@ -107,8 +106,7 @@ class Event(Enum): ROUND_TIMEOUT = "round_timeout" TWEET_EVALUATION_ROUND_TIMEOUT = "tweet_evaluation_round_timeout" API_ERROR = "api_error" - RETRIEVE_HASHTAGS = "retrieve_hashtags" - RETRIEVE_MENTIONS = "retrieve_mentions" + TWEET_COLLECTION = "tweet_collection" PRE_MECH = "pre_mech" POST_MECH = "post_mech" DB_UPDATE = "db_update" @@ -204,7 +202,7 @@ def end_block(self) -> Optional[Tuple[BaseSynchronizedData, Event]]: if self.threshold_reached: event = Event(self.most_voted_payload) # Reference events to avoid tox -e check-abciapp-specs failures - # Event.DONE, Event.DB_UPDATE, Event.RETRIEVE_MENTIONS, Event.RETRIEVE_HASHTAGS, Event.SELECT_KEEPERS + # Event.DONE, Event.DB_UPDATE, Event.TWEET_COLLECTION, Event.SELECT_KEEPERS # Event.POST_MECH, Event.PRE_MECH return self.synchronized_data, event if not self.is_majority_possible( @@ -214,148 +212,10 @@ def end_block(self) -> Optional[Tuple[BaseSynchronizedData, Event]]: return None -class TwitterMentionsCollectionRound(CollectSameUntilThresholdRound): - """TwitterMentionsCollectionRound""" +class TweetCollectionRound(CollectSameUntilThresholdRound): + """TweetCollectionRound""" - payload_class = TwitterMentionsCollectionPayload - synchronized_data_class = SynchronizedData - - @property - def consensus_threshold(self): - """Consensus threshold""" - return math.ceil(self.synchronized_data.nb_participants / 2) # half or 1 - - @property - def threshold_reached( - self, - ) -> bool: - """Check if the threshold has been reached.""" - counts = self.payload_values_count.values() - return any(count >= self.consensus_threshold for count in counts) - - @property - def most_voted_payload_values( - self, - ) -> Tuple[Any, ...]: - """Get the most voted payload values.""" - most_voted_payload_values, max_votes = self.payload_values_count.most_common()[ - 0 - ] - if max_votes < self.consensus_threshold: - raise ABCIAppInternalError("not enough votes") - return most_voted_payload_values - - def end_block(self) -> Optional[Tuple[BaseSynchronizedData, Event]]: - """Process the end of the block.""" - if self.threshold_reached: - performed_twitter_tasks = cast( - SynchronizedData, self.synchronized_data - ).performed_twitter_tasks - - payload = json.loads(self.most_voted_payload) - - # API error - if "error" in payload: - # API limits - if payload["error"] == ERROR_API_LIMITS: - performed_twitter_tasks[ - "retrieve_mentions" - ] = Event.DONE_MAX_RETRIES.value - - synchronized_data = self.synchronized_data.update( - synchronized_data_class=SynchronizedData, - **{ - get_name(SynchronizedData.sleep_until): payload[ - "sleep_until" - ], - get_name( - SynchronizedData.performed_twitter_tasks - ): performed_twitter_tasks, - }, - ) - return synchronized_data, Event.DONE_API_LIMITS - - api_retries = ( - cast(SynchronizedData, self.synchronized_data).api_retries + 1 - ) - - # Other API errors - if api_retries >= MAX_API_RETRIES: - performed_twitter_tasks[ - "retrieve_mentions" - ] = Event.DONE_MAX_RETRIES.value - synchronized_data = self.synchronized_data.update( - synchronized_data_class=SynchronizedData, - **{ - get_name(SynchronizedData.api_retries): 0, # reset retries - get_name( - SynchronizedData.performed_twitter_tasks - ): performed_twitter_tasks, - get_name(SynchronizedData.sleep_until): payload[ - "sleep_until" - ], - }, - ) - return synchronized_data, Event.DONE_MAX_RETRIES - - synchronized_data = self.synchronized_data.update( - synchronized_data_class=SynchronizedData, - **{ - get_name(SynchronizedData.api_retries): api_retries, - get_name(SynchronizedData.sleep_until): payload["sleep_until"], - }, - ) - return synchronized_data, Event.API_ERROR - - # Happy path - previous_tweets = cast(SynchronizedData, self.synchronized_data).tweets - performed_twitter_tasks["retrieve_mentions"] = Event.DONE.value - new_tweets = payload["tweets"] - - updates = { - get_name(SynchronizedData.tweets): { - **new_tweets, - **previous_tweets, - }, # order matters here: if there is duplication, keep old tweets - get_name(SynchronizedData.number_of_tweets_pulled_today): payload[ - "number_of_tweets_pulled_today" - ], - get_name(SynchronizedData.last_tweet_pull_window_reset): payload[ - "last_tweet_pull_window_reset" - ], - get_name( - SynchronizedData.performed_twitter_tasks - ): performed_twitter_tasks, - get_name(SynchronizedData.sleep_until): payload["sleep_until"], - } - - if payload["latest_mention_tweet_id"]: - updates[get_name(SynchronizedData.latest_mention_tweet_id)] = payload[ - "latest_mention_tweet_id" - ] - else: - updates[ - get_name(SynchronizedData.latest_mention_tweet_id) - ] = self.context.ceramic_db["module_data"]["twitter"][ - "latest_mention_tweet_id" - ] - - synchronized_data = self.synchronized_data.update( - synchronized_data_class=SynchronizedData, - **updates, - ) - return synchronized_data, Event.DONE - if not self.is_majority_possible( - self.collection, self.synchronized_data.nb_participants - ): - return self.synchronized_data, Event.NO_MAJORITY - return None - - -class TwitterHashtagsCollectionRound(CollectSameUntilThresholdRound): - """TwitterHashtagsCollectionRound""" - - payload_class = TwitterHashtagsCollectionPayload + payload_class = TweetCollectionPayload synchronized_data_class = SynchronizedData @property @@ -397,7 +257,7 @@ def end_block(self) -> Optional[Tuple[BaseSynchronizedData, Event]]: # API limits if payload["error"] == ERROR_API_LIMITS: performed_twitter_tasks[ - "retrieve_hashtags" + "tweet_collection" ] = Event.DONE_MAX_RETRIES.value synchronized_data = self.synchronized_data.update( @@ -417,10 +277,10 @@ def end_block(self) -> Optional[Tuple[BaseSynchronizedData, Event]]: cast(SynchronizedData, self.synchronized_data).api_retries + 1 ) - # Other API errors + # Other errors like tweepy connection error if api_retries >= MAX_API_RETRIES: performed_twitter_tasks[ - "retrieve_hashtags" + "tweet_collection" ] = Event.DONE_MAX_RETRIES.value synchronized_data = self.synchronized_data.update( synchronized_data_class=SynchronizedData, @@ -447,7 +307,7 @@ def end_block(self) -> Optional[Tuple[BaseSynchronizedData, Event]]: # Happy path previous_tweets = cast(SynchronizedData, self.synchronized_data).tweets - performed_twitter_tasks["retrieve_hashtags"] = Event.DONE.value + performed_twitter_tasks["tweet_collection"] = Event.DONE.value new_tweets = payload["tweets"] updates = { @@ -697,8 +557,7 @@ class TwitterScoringAbciApp(AbciApp[Event]): transition_function: AbciAppTransitionFunction = { TwitterDecisionMakingRound: { Event.SELECT_KEEPERS: TwitterRandomnessRound, - Event.RETRIEVE_HASHTAGS: TwitterHashtagsCollectionRound, - Event.RETRIEVE_MENTIONS: TwitterMentionsCollectionRound, + Event.TWEET_COLLECTION: TweetCollectionRound, Event.PRE_MECH: PreMechRequestRound, Event.POST_MECH: PostMechRequestRound, Event.DB_UPDATE: DBUpdateRound, @@ -716,19 +575,11 @@ class TwitterScoringAbciApp(AbciApp[Event]): Event.NO_MAJORITY: TwitterRandomnessRound, Event.ROUND_TIMEOUT: TwitterRandomnessRound, }, - TwitterMentionsCollectionRound: { - Event.DONE: TwitterDecisionMakingRound, - Event.DONE_MAX_RETRIES: TwitterDecisionMakingRound, - Event.DONE_API_LIMITS: TwitterDecisionMakingRound, - Event.API_ERROR: TwitterMentionsCollectionRound, - Event.NO_MAJORITY: TwitterRandomnessRound, - Event.ROUND_TIMEOUT: TwitterRandomnessRound, - }, - TwitterHashtagsCollectionRound: { + TweetCollectionRound: { Event.DONE: TwitterDecisionMakingRound, Event.DONE_MAX_RETRIES: TwitterDecisionMakingRound, Event.DONE_API_LIMITS: TwitterDecisionMakingRound, - Event.API_ERROR: TwitterHashtagsCollectionRound, + Event.API_ERROR: TweetCollectionRound, Event.NO_MAJORITY: TwitterRandomnessRound, Event.ROUND_TIMEOUT: TwitterRandomnessRound, }, diff --git a/packages/valory/skills/twitter_scoring_abci/skill.yaml b/packages/valory/skills/twitter_scoring_abci/skill.yaml index 9b6f043c..c22d466c 100644 --- a/packages/valory/skills/twitter_scoring_abci/skill.yaml +++ b/packages/valory/skills/twitter_scoring_abci/skill.yaml @@ -130,14 +130,7 @@ models: tendermint_p2p_url: localhost:26656 tendermint_url: http://localhost:26657 tx_timeout: 10.0 - twitter_api_base: https://api.twitter.com/ - twitter_api_bearer_token: - twitter_mentions_endpoint: 2/users/1450081635559428107/mentions? - twitter_mentions_args: tweet.fields=author_id&user.fields=name&expansions=author_id&max_results={max_results}&since_id={since_id} - twitter_max_pages: 1 max_tweet_pulls_allowed: 80 - twitter_search_endpoint: 2/tweets/search/recent? - twitter_search_args: query=%23olas&tweet.fields=author_id,created_at,conversation_id&user.fields=name&expansions=author_id&max_results={max_results}&since_id={since_id} openai_call_window_size: 3600.0 openai_calls_allowed_in_window: 100 max_points_per_period: 900 @@ -148,6 +141,8 @@ models: slash_threshold_amount: 10000000000000000 light_slash_unit_amount: 5000000000000000 serious_slash_unit_amount: 8000000000000000 + recent_tweet_query: '@autonolas OR #OlasNetwork OR #StakeWithPearl -is:retweet -is:reply' + twitter_max_recent_results: 10 class_name: Params randomness_api: args: From 0843fbf1f697de258281af4ea7759d56ce482943 Mon Sep 17 00:00:00 2001 From: David Vilela Date: Wed, 12 Jun 2024 17:14:24 +0200 Subject: [PATCH 2/4] fix: generators --- docs/index.md | 2 +- packages/packages.json | 13 ++++++------ .../agents/impact_evaluator/aea-config.yaml | 8 +++---- .../valory/connections/tweepy/connection.py | 21 +++++++++++++++++++ .../valory/connections/tweepy/connection.yaml | 6 +++--- .../services/impact_evaluator/service.yaml | 10 ++++----- .../impact_evaluator_local/service.yaml | 4 ++-- .../skills/impact_evaluator_abci/skill.yaml | 9 ++++---- .../skills/twitter_scoring_abci/payloads.py | 2 +- .../skills/twitter_scoring_abci/skill.yaml | 13 ++++++------ 10 files changed, 56 insertions(+), 32 deletions(-) diff --git a/docs/index.md b/docs/index.md index 0e01077a..5a625d8a 100644 --- a/docs/index.md +++ b/docs/index.md @@ -31,7 +31,7 @@ In order to run a local demo service based on the IEKit: 2. Fetch the IEKit. ```bash - autonomy fetch valory/impact_evaluator:0.1.0:bafybeieqamv6zsqecx3i7hp6hf4ahfikpzfdnx2c4cdzr7jmzbezxvrrli --service + autonomy fetch valory/impact_evaluator:0.1.0:bafybeic6wdlxvrppf2e4xkhstwh4vvitptoarx6avjcppru7cgny6wkfye --service ``` 3. Build the Docker image of the service agents diff --git a/packages/packages.json b/packages/packages.json index 361299fe..d1aa9734 100644 --- a/packages/packages.json +++ b/packages/packages.json @@ -12,23 +12,24 @@ "connection/valory/twitter/0.1.0": "bafybeidcalgczvpoq6l2uh7kkfblwxmytm6bicmsxub7pevu2k46sp7mky", "connection/valory/openai/0.1.0": "bafybeigyehjbahya5mp7vyp5tjvn36rey4btvzskp3ql3mgxp3zu6gwq5a", "connection/valory/farcaster/0.1.0": "bafybeibbdas7lxbipksodaphjms3uop7vnzjqkroktjq2g6wbvgtlldaxi", + "connection/valory/tweepy/0.1.0": "bafybeidantmqvkut7fpuii45ewdfxb5zy3pwdndposr5hmzwicfzjf4cpa", "skill/valory/dynamic_nft_abci/0.1.0": "bafybeihxqueloy3vpevun72wgikkrgmsgpnm3khxb7tt43hdol32imt6by", - "skill/valory/twitter_scoring_abci/0.1.0": "bafybeiauqp3cbs57fjevnkwkrzwpbtrjwmtncmhrpet7kshuvr3hryzigu", + "skill/valory/twitter_scoring_abci/0.1.0": "bafybeicr2uomhzlroks5hohmhltk365crayyzmsgkiezcjn652oj7weqyi", "skill/valory/ceramic_read_abci/0.1.0": "bafybeihmqflb7aaofxpmvqfwoipzxbwe7jyefddwxgozjdvocamisrfezi", "skill/valory/ceramic_write_abci/0.1.0": "bafybeibovogo3fzi3clu2eurlrq7nct5bxej3ktd255ajosk55igq4dlqe", - "skill/valory/impact_evaluator_abci/0.1.0": "bafybeid356pzvlwtkudfr2h7smrgq7doqtm4bi4d3r4xb37s22gjq5cufu", + "skill/valory/impact_evaluator_abci/0.1.0": "bafybeicyha3z6jjsow3heeckem2yrxq652hwc3jh4iotu5ewf65wovnrsm", "skill/valory/generic_scoring_abci/0.1.0": "bafybeihn7dp3ck5rhuyar35xllb3f7uyblnz4yfnrpgrmtoyu4kxxsavkm", "skill/valory/twitter_write_abci/0.1.0": "bafybeiaofknda4gzs6xks53ta5xwhgs2dvyvfqypdk2ey7if6egjpgarna", "skill/valory/llm_abci/0.1.0": "bafybeicljcawuikys472glztza4mfj7ccl2eqxcbptysl7tdexik6w6zvm", "skill/valory/decision_making_abci/0.1.0": "bafybeih7uhlaqfr6mxaiiwujk27afwiml3reia3af53z6c3fioaehvqbsq", - "skill/valory/olas_week_abci/0.1.0": "bafybeigsx6qsinvq2dgcz5hpodyip75vm6qpjmgrhtf5ahxyvscqg3quia", + "skill/valory/olas_week_abci/0.1.0": "bafybeiayeqzb3u6e4culfuzenkhtwlgnzttpvahdeqlekfdottm2rmrj4i", "skill/valory/mech_interact_abci/0.1.0": "bafybeicj4c6brjtijdqywn3knejwl4o5ynzi4jcc4sinezeyj7lbgxbu5q", "skill/valory/farcaster_write_abci/0.1.0": "bafybeibqepsbwfbrlpp6lqauyb2zlapo5j35omuhcwv4s2cmnfl7ujmcc4", "skill/valory/farcaster_test_abci/0.1.0": "bafybeibgcz7cxdrkkbzwsfn7webtktxh55m63umlb3brh2wx7qwckbmlua", - "agent/valory/impact_evaluator/0.1.0": "bafybeifaq3zorj63ujyy6oqqbsy4gufkbtnosmrjrriofk75p5dqe6ah6e", + "agent/valory/impact_evaluator/0.1.0": "bafybeicvdmud7tlhabiy5nmc4xnckglwz2bv6lvghayob5xn5ckqnhgohe", "agent/valory/farcaster_test/0.1.0": "bafybeifcwix6xwyee5wo37jjfazfshsf3h3srmspple4mruyyqz2dsimf4", - "service/valory/impact_evaluator/0.1.0": "bafybeieqamv6zsqecx3i7hp6hf4ahfikpzfdnx2c4cdzr7jmzbezxvrrli", - "service/valory/impact_evaluator_local/0.1.0": "bafybeiat6z3dwtdqv3kj2afvqpwco3eglnvziiedhhwmgjcsxltzypspwi" + "service/valory/impact_evaluator/0.1.0": "bafybeic6wdlxvrppf2e4xkhstwh4vvitptoarx6avjcppru7cgny6wkfye", + "service/valory/impact_evaluator_local/0.1.0": "bafybeidt4rlis4cnemim4livkjitiy5o72bsiheh2idq4hvdtyu4qsyv5a" }, "third_party": { "protocol/open_aea/signing/1.0.0": "bafybeihv62fim3wl2bayavfcg3u5e5cxu3b7brtu4cn5xoxd6lqwachasi", diff --git a/packages/valory/agents/impact_evaluator/aea-config.yaml b/packages/valory/agents/impact_evaluator/aea-config.yaml index f904fec3..779bab08 100644 --- a/packages/valory/agents/impact_evaluator/aea-config.yaml +++ b/packages/valory/agents/impact_evaluator/aea-config.yaml @@ -43,9 +43,9 @@ protocols: skills: - valory/abstract_abci:0.1.0:bafybeibzxex6md6xp5eibvp23cmkk7xgdyd44r5lxhfhlquwe3nb22ujgy - valory/abstract_round_abci:0.1.0:bafybeih2soag6gy472x7vbbxmetmr532jqzm2efcapymi3725as33fvjvu -- valory/impact_evaluator_abci:0.1.0:bafybeid356pzvlwtkudfr2h7smrgq7doqtm4bi4d3r4xb37s22gjq5cufu +- valory/impact_evaluator_abci:0.1.0:bafybeicyha3z6jjsow3heeckem2yrxq652hwc3jh4iotu5ewf65wovnrsm - valory/generic_scoring_abci:0.1.0:bafybeihn7dp3ck5rhuyar35xllb3f7uyblnz4yfnrpgrmtoyu4kxxsavkm -- valory/twitter_scoring_abci:0.1.0:bafybeiauqp3cbs57fjevnkwkrzwpbtrjwmtncmhrpet7kshuvr3hryzigu +- valory/twitter_scoring_abci:0.1.0:bafybeicr2uomhzlroks5hohmhltk365crayyzmsgkiezcjn652oj7weqyi - valory/ceramic_read_abci:0.1.0:bafybeihmqflb7aaofxpmvqfwoipzxbwe7jyefddwxgozjdvocamisrfezi - valory/ceramic_write_abci:0.1.0:bafybeibovogo3fzi3clu2eurlrq7nct5bxej3ktd255ajosk55igq4dlqe - valory/dynamic_nft_abci:0.1.0:bafybeihxqueloy3vpevun72wgikkrgmsgpnm3khxb7tt43hdol32imt6by @@ -56,7 +56,7 @@ skills: - valory/twitter_write_abci:0.1.0:bafybeiaofknda4gzs6xks53ta5xwhgs2dvyvfqypdk2ey7if6egjpgarna - valory/decision_making_abci:0.1.0:bafybeih7uhlaqfr6mxaiiwujk27afwiml3reia3af53z6c3fioaehvqbsq - valory/llm_abci:0.1.0:bafybeicljcawuikys472glztza4mfj7ccl2eqxcbptysl7tdexik6w6zvm -- valory/olas_week_abci:0.1.0:bafybeigsx6qsinvq2dgcz5hpodyip75vm6qpjmgrhtf5ahxyvscqg3quia +- valory/olas_week_abci:0.1.0:bafybeiayeqzb3u6e4culfuzenkhtwlgnzttpvahdeqlekfdottm2rmrj4i - valory/mech_interact_abci:0.1.0:bafybeicj4c6brjtijdqywn3knejwl4o5ynzi4jcc4sinezeyj7lbgxbu5q default_ledger: ethereum required_ledgers: @@ -251,7 +251,7 @@ models: termination_from_block: ${int:0} mech_chain_id: ${str:gnosis} mech_interaction_sleep_time: ${int:10} - recent_tweet_query: ${str:@autonolas OR #OlasNetwork OR #StakeWithPearl -is:retweet -is:reply} + recent_tweet_query: ${str:@autonolas OR twitter_max_recent_results: ${int:10} randomness_api: args: diff --git a/packages/valory/connections/tweepy/connection.py b/packages/valory/connections/tweepy/connection.py index cf2b8d8d..05f6affd 100644 --- a/packages/valory/connections/tweepy/connection.py +++ b/packages/valory/connections/tweepy/connection.py @@ -1,3 +1,24 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +# ------------------------------------------------------------------------------ +# +# Copyright 2021-2024 Valory AG +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# ------------------------------------------------------------------------------ + + #!/usr/bin/env python3 # -*- coding: utf-8 -*- # ------------------------------------------------------------------------------ diff --git a/packages/valory/connections/tweepy/connection.yaml b/packages/valory/connections/tweepy/connection.yaml index b4616c41..ce9b6fa4 100644 --- a/packages/valory/connections/tweepy/connection.yaml +++ b/packages/valory/connections/tweepy/connection.yaml @@ -6,9 +6,9 @@ description: The Tweepy connection provides a wrapper around the Tweepy library. license: Apache-2.0 aea_version: '>=1.0.0, <2.0.0' fingerprint: - __init__.py: bafybeifydrb4yumno6ph2nqjetqw3bseccgso4cjfarsedy4r5f73zl72m - connection.py: bafybeiexi2m6eu4qkgfeafzbnecearz4puojdaavvtlvqcm67qrgbq4i3i - readme.md: bafybeiaulo2wb7znrotpnsh27idv2j3rlmslene7l3ygedjmshyna6tkxu + __init__.py: bafybeicrwqrdownfmeyvvzu45fllxouiwvtynrzs5fhbpt3wndyyhn66eu + connection.py: bafybeieprt2qojuunh72rxd3fm3r3tom7uqxvak3llc7xbmqt65eq6n4ey + readme.md: bafybeib5oflnp3gymrottersu6qrnitjmaifl2gvbvjq7kbmsdbihhzfaa fingerprint_ignore_patterns: [] connections: [] protocols: diff --git a/packages/valory/services/impact_evaluator/service.yaml b/packages/valory/services/impact_evaluator/service.yaml index bf0eb5b1..826a6764 100644 --- a/packages/valory/services/impact_evaluator/service.yaml +++ b/packages/valory/services/impact_evaluator/service.yaml @@ -8,7 +8,7 @@ license: Apache-2.0 fingerprint: README.md: bafybeign56hilwuoa6bgos3uqabss4gew4vadkik7vhj3ucpqw6nxtqtpe fingerprint_ignore_patterns: [] -agent: valory/impact_evaluator:0.1.0:bafybeifaq3zorj63ujyy6oqqbsy4gufkbtnosmrjrriofk75p5dqe6ah6e +agent: valory/impact_evaluator:0.1.0:bafybeicvdmud7tlhabiy5nmc4xnckglwz2bv6lvghayob5xn5ckqnhgohe number_of_agents: 4 deployment: agent: @@ -132,7 +132,7 @@ extra: termination_from_block: ${TERMINATION_FROM_BLOCK:int:19717045} mech_chain_id: ${MECH_CHAIN_ID:str:gnosis} mech_interaction_sleep_time: ${MECH_INTERACTION_SLEEP_TIME:int:10} - recent_tweet_query: ${RECENT_TWEET_QUERY:str:@autonolas OR #OlasNetwork OR #StakeWithPearl -is:retweet -is:reply} + recent_tweet_query: ${RECENT_TWEET_QUERY:str:@autonolas OR twitter_max_recent_results: ${TWITTER_MAX_RECENT_RESULTS:int:10} randomness_api: args: @@ -212,7 +212,7 @@ extra: termination_from_block: ${TERMINATION_FROM_BLOCK:int:19717045} mech_chain_id: ${MECH_CHAIN_ID:str:gnosis} mech_interaction_sleep_time: ${MECH_INTERACTION_SLEEP_TIME:int:10} - recent_tweet_query: ${RECENT_TWEET_QUERY:str:@autonolas OR #OlasNetwork OR #StakeWithPearl -is:retweet -is:reply} + recent_tweet_query: ${RECENT_TWEET_QUERY:str:@autonolas OR twitter_max_recent_results: ${TWITTER_MAX_RECENT_RESULTS:int:10} randomness_api: args: @@ -292,7 +292,7 @@ extra: termination_from_block: ${TERMINATION_FROM_BLOCK:int:19717045} mech_chain_id: ${MECH_CHAIN_ID:str:gnosis} mech_interaction_sleep_time: ${MECH_INTERACTION_SLEEP_TIME:int:10} - recent_tweet_query: ${RECENT_TWEET_QUERY:str:@autonolas OR #OlasNetwork OR #StakeWithPearl -is:retweet -is:reply} + recent_tweet_query: ${RECENT_TWEET_QUERY:str:@autonolas OR twitter_max_recent_results: ${TWITTER_MAX_RECENT_RESULTS:int:10} randomness_api: args: @@ -376,7 +376,7 @@ extra: termination_from_block: ${TERMINATION_FROM_BLOCK:int:19717045} mech_chain_id: ${MECH_CHAIN_ID:str:gnosis} mech_interaction_sleep_time: ${MECH_INTERACTION_SLEEP_TIME:int:10} - recent_tweet_query: ${RECENT_TWEET_QUERY:str:@autonolas OR #OlasNetwork OR #StakeWithPearl -is:retweet -is:reply} + recent_tweet_query: ${RECENT_TWEET_QUERY:str:@autonolas OR twitter_max_recent_results: ${TWITTER_MAX_RECENT_RESULTS:int:10} randomness_api: args: diff --git a/packages/valory/services/impact_evaluator_local/service.yaml b/packages/valory/services/impact_evaluator_local/service.yaml index df7d3174..87913ba9 100644 --- a/packages/valory/services/impact_evaluator_local/service.yaml +++ b/packages/valory/services/impact_evaluator_local/service.yaml @@ -8,7 +8,7 @@ license: Apache-2.0 fingerprint: README.md: bafybeign56hilwuoa6bgos3uqabss4gew4vadkik7vhj3ucpqw6nxtqtpe fingerprint_ignore_patterns: [] -agent: valory/impact_evaluator:0.1.0:bafybeifaq3zorj63ujyy6oqqbsy4gufkbtnosmrjrriofk75p5dqe6ah6e +agent: valory/impact_evaluator:0.1.0:bafybeicvdmud7tlhabiy5nmc4xnckglwz2bv6lvghayob5xn5ckqnhgohe number_of_agents: 1 deployment: agent: @@ -117,7 +117,7 @@ models: termination_from_block: ${TERMINATION_FROM_BLOCK:int:0} mech_chain_id: ${MECH_CHAIN_ID:str:gnosis} mech_interaction_sleep_time: ${MECH_INTERACTION_SLEEP_TIME:int:10} - recent_tweet_query: ${RECENT_TWEET_QUERY:str:@autonolas OR #OlasNetwork OR #StakeWithPearl -is:retweet -is:reply} + recent_tweet_query: ${RECENT_TWEET_QUERY:str:@autonolas OR twitter_max_recent_results: ${TWITTER_MAX_RECENT_RESULTS:int:10} randomness_api: args: diff --git a/packages/valory/skills/impact_evaluator_abci/skill.yaml b/packages/valory/skills/impact_evaluator_abci/skill.yaml index fa14cb00..7862ef4a 100644 --- a/packages/valory/skills/impact_evaluator_abci/skill.yaml +++ b/packages/valory/skills/impact_evaluator_abci/skill.yaml @@ -10,7 +10,7 @@ fingerprint: behaviours.py: bafybeibdq5p4zi47nsycpcjs7ouw2fjxqjn7hgbd4aalp7yumdhnmlfufq composition.py: bafybeifwmregxn3ibbln3dfkqpmyl5iznfwzepplkjwduoxramikzv6p34 dialogues.py: bafybeigjknz4qqynbsltjje46gidg4rftsqw6ybjwegz24wetmycutpzh4 - fsm_specification.yaml: bafybeigxkqdtkr5vldnvd3izkjejtvv5l5fura5dwfljllpoblsc6nty6u + fsm_specification.yaml: bafybeibmri3227lxzrdmswvyf52jrar6s5pdupjwbbdtriftyl3pt6pdca handlers.py: bafybeidkli6fphcmdgwsys4lkyf3fx6fbawet4nt2pnixfypzijhg6b3ze models.py: bafybeif7kujqjk77j7xnnat7lpi7ldgqzh2tp5xhojdjqentfg3s4md6y4 tests/__init__.py: bafybeievwzwojvq4aofk5kjpf4jzygfes7ew6s6svc6b6frktjnt3sicce @@ -26,7 +26,7 @@ skills: - valory/abstract_round_abci:0.1.0:bafybeih2soag6gy472x7vbbxmetmr532jqzm2efcapymi3725as33fvjvu - valory/ceramic_read_abci:0.1.0:bafybeihmqflb7aaofxpmvqfwoipzxbwe7jyefddwxgozjdvocamisrfezi - valory/generic_scoring_abci:0.1.0:bafybeihn7dp3ck5rhuyar35xllb3f7uyblnz4yfnrpgrmtoyu4kxxsavkm -- valory/twitter_scoring_abci:0.1.0:bafybeiauqp3cbs57fjevnkwkrzwpbtrjwmtncmhrpet7kshuvr3hryzigu +- valory/twitter_scoring_abci:0.1.0:bafybeicr2uomhzlroks5hohmhltk365crayyzmsgkiezcjn652oj7weqyi - valory/ceramic_write_abci:0.1.0:bafybeibovogo3fzi3clu2eurlrq7nct5bxej3ktd255ajosk55igq4dlqe - valory/dynamic_nft_abci:0.1.0:bafybeihxqueloy3vpevun72wgikkrgmsgpnm3khxb7tt43hdol32imt6by - valory/registration_abci:0.1.0:bafybeiaxomjnv2h7ozc7mvmz5wysfinfgtqnniznpcdunijb5iiekv4mzi @@ -36,7 +36,7 @@ skills: - valory/decision_making_abci:0.1.0:bafybeih7uhlaqfr6mxaiiwujk27afwiml3reia3af53z6c3fioaehvqbsq - valory/llm_abci:0.1.0:bafybeicljcawuikys472glztza4mfj7ccl2eqxcbptysl7tdexik6w6zvm - valory/twitter_write_abci:0.1.0:bafybeiaofknda4gzs6xks53ta5xwhgs2dvyvfqypdk2ey7if6egjpgarna -- valory/olas_week_abci:0.1.0:bafybeigsx6qsinvq2dgcz5hpodyip75vm6qpjmgrhtf5ahxyvscqg3quia +- valory/olas_week_abci:0.1.0:bafybeiayeqzb3u6e4culfuzenkhtwlgnzttpvahdeqlekfdottm2rmrj4i - valory/mech_interact_abci:0.1.0:bafybeicj4c6brjtijdqywn3knejwl4o5ynzi4jcc4sinezeyj7lbgxbu5q behaviours: main: @@ -190,7 +190,8 @@ models: tweet_consensus_veolas: 2000000 mech_chain_id: gnosis mech_interaction_sleep_time: 10 - recent_tweet_query: '@autonolas OR #OlasNetwork OR #StakeWithPearl -is:retweet -is:reply' + recent_tweet_query: '@autonolas OR #OlasNetwork OR #StakeWithPearl -is:retweet + -is:reply' twitter_max_recent_results: 10 class_name: Params randomness_api: diff --git a/packages/valory/skills/twitter_scoring_abci/payloads.py b/packages/valory/skills/twitter_scoring_abci/payloads.py index 63876706..08e695f3 100644 --- a/packages/valory/skills/twitter_scoring_abci/payloads.py +++ b/packages/valory/skills/twitter_scoring_abci/payloads.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # ------------------------------------------------------------------------------ # -# Copyright 2023 Valory AG +# Copyright 2023-2024 Valory AG # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/valory/skills/twitter_scoring_abci/skill.yaml b/packages/valory/skills/twitter_scoring_abci/skill.yaml index c22d466c..d41a5122 100644 --- a/packages/valory/skills/twitter_scoring_abci/skill.yaml +++ b/packages/valory/skills/twitter_scoring_abci/skill.yaml @@ -8,14 +8,14 @@ license: Apache-2.0 aea_version: '>=1.0.0, <2.0.0' fingerprint: __init__.py: bafybeifudgakkjoyahuewp2o4gvqayw7nsgpyxw2ayrpgmzexurh2xomaq - behaviours.py: bafybeifchonz4zghkztye742qtv4xrt63kylidcftfkkf5wbqturcuhzwy + behaviours.py: bafybeib5f265xhosn4flr5vdwrl6rx73nzw4m6iqifqzhhob4enn5kqg4q dialogues.py: bafybeibdqzn37hbo2cq4skww4uh2zvvsjyaxxvdhxisefbdvmjp7rh53si - fsm_specification.yaml: bafybeihbq2olrdqx2x5lcvypxyi44ecog4nntsvw2v6oeab4imvj5nz6ae + fsm_specification.yaml: bafybeihz4bgnyzntjnjdxgrmc33iimpkuf4wyrf7ly4rs34piozksmlbf4 handlers.py: bafybeid3nqvcyotqj5g5hlgrz57nf7vpjysmgvsxe3p7644f4z5dcwqn6u - models.py: bafybeihrwlkjzhxktcmtj4uqzzobukf4mpyt7ba73pcpldk4i76pofaei4 - payloads.py: bafybeidb2qiw26b5fhyxujseibbo2ji5zo54auhyelllsqnj7zn5sbc3pi + models.py: bafybeifgonkpxbuzcazmarak2xvflmjl2qa7lsgcuq46l2dlx6xbuvq4t4 + payloads.py: bafybeiajehgklh2y4gzzosg5xxdxt5r4egkynfs36ma4wd5ul6jvfrbgqa prompts.py: bafybeieiuqn427bgwfnzynxf3vtqfpvmqqscs5tyw4oibfofwropifotke - rounds.py: bafybeibprkxnaneq3gz4amlipnfvxp7d6rzx4lnhcuefc4ewwthxrbasce + rounds.py: bafybeidub7bf4hq37kefsg727rpmxt7ies6zgrytlxz62og2i3waa63jly tests/__init__.py: bafybeidwzzd4ejsyf3aryd5kmrvd63h7ajgqyrxphmfaacvpjnneacejay tests/test_behaviours.py: bafybeigtf3jc65rl4c3p73mbtv667w62yvhkxiivnqb42serqij4rxhx4q tests/test_dialogues.py: bafybeiheyq7klonzb7rnjub2i22h7bmsnoimn2pq4j7ofikt3yovstvgt4 @@ -141,7 +141,8 @@ models: slash_threshold_amount: 10000000000000000 light_slash_unit_amount: 5000000000000000 serious_slash_unit_amount: 8000000000000000 - recent_tweet_query: '@autonolas OR #OlasNetwork OR #StakeWithPearl -is:retweet -is:reply' + recent_tweet_query: '@autonolas OR #OlasNetwork OR #StakeWithPearl -is:retweet + -is:reply' twitter_max_recent_results: 10 class_name: Params randomness_api: From c6de24c692011651d136397477352e9a60b47403 Mon Sep 17 00:00:00 2001 From: David Vilela Date: Wed, 12 Jun 2024 17:23:06 +0200 Subject: [PATCH 3/4] fix: linters --- docs/index.md | 2 +- packages/packages.json | 12 ++++++------ .../valory/agents/impact_evaluator/aea-config.yaml | 4 ++-- packages/valory/connections/tweepy/connection.py | 4 ++-- packages/valory/connections/tweepy/connection.yaml | 4 +++- .../valory/services/impact_evaluator/service.yaml | 2 +- .../services/impact_evaluator_local/service.yaml | 2 +- .../valory/skills/impact_evaluator_abci/skill.yaml | 2 +- .../valory/skills/twitter_scoring_abci/behaviours.py | 5 ++--- .../valory/skills/twitter_scoring_abci/skill.yaml | 2 +- 10 files changed, 20 insertions(+), 19 deletions(-) diff --git a/docs/index.md b/docs/index.md index 5a625d8a..e0f3e14c 100644 --- a/docs/index.md +++ b/docs/index.md @@ -31,7 +31,7 @@ In order to run a local demo service based on the IEKit: 2. Fetch the IEKit. ```bash - autonomy fetch valory/impact_evaluator:0.1.0:bafybeic6wdlxvrppf2e4xkhstwh4vvitptoarx6avjcppru7cgny6wkfye --service + autonomy fetch valory/impact_evaluator:0.1.0:bafybeicdmqmqulnel2x4k7kgr3hmdnhm7gjfjap23unxhxuk3s3e52guzu --service ``` 3. Build the Docker image of the service agents diff --git a/packages/packages.json b/packages/packages.json index d1aa9734..acd61239 100644 --- a/packages/packages.json +++ b/packages/packages.json @@ -12,12 +12,12 @@ "connection/valory/twitter/0.1.0": "bafybeidcalgczvpoq6l2uh7kkfblwxmytm6bicmsxub7pevu2k46sp7mky", "connection/valory/openai/0.1.0": "bafybeigyehjbahya5mp7vyp5tjvn36rey4btvzskp3ql3mgxp3zu6gwq5a", "connection/valory/farcaster/0.1.0": "bafybeibbdas7lxbipksodaphjms3uop7vnzjqkroktjq2g6wbvgtlldaxi", - "connection/valory/tweepy/0.1.0": "bafybeidantmqvkut7fpuii45ewdfxb5zy3pwdndposr5hmzwicfzjf4cpa", + "connection/valory/tweepy/0.1.0": "bafybeiaws7qm4ksaeb7fgxw75j5kakjbl2wsivwyn7o2uleett2xxs3eia", "skill/valory/dynamic_nft_abci/0.1.0": "bafybeihxqueloy3vpevun72wgikkrgmsgpnm3khxb7tt43hdol32imt6by", - "skill/valory/twitter_scoring_abci/0.1.0": "bafybeicr2uomhzlroks5hohmhltk365crayyzmsgkiezcjn652oj7weqyi", + "skill/valory/twitter_scoring_abci/0.1.0": "bafybeigyvtr5nhqrlasgy2nxfy5nx7lrmv7tizh55xnewb3z7hlobdc7aq", "skill/valory/ceramic_read_abci/0.1.0": "bafybeihmqflb7aaofxpmvqfwoipzxbwe7jyefddwxgozjdvocamisrfezi", "skill/valory/ceramic_write_abci/0.1.0": "bafybeibovogo3fzi3clu2eurlrq7nct5bxej3ktd255ajosk55igq4dlqe", - "skill/valory/impact_evaluator_abci/0.1.0": "bafybeicyha3z6jjsow3heeckem2yrxq652hwc3jh4iotu5ewf65wovnrsm", + "skill/valory/impact_evaluator_abci/0.1.0": "bafybeiahefujrd4usf4bvsa6oo5hv3u5mmh7bbkeyvmg4jb4ha6q3crwnq", "skill/valory/generic_scoring_abci/0.1.0": "bafybeihn7dp3ck5rhuyar35xllb3f7uyblnz4yfnrpgrmtoyu4kxxsavkm", "skill/valory/twitter_write_abci/0.1.0": "bafybeiaofknda4gzs6xks53ta5xwhgs2dvyvfqypdk2ey7if6egjpgarna", "skill/valory/llm_abci/0.1.0": "bafybeicljcawuikys472glztza4mfj7ccl2eqxcbptysl7tdexik6w6zvm", @@ -26,10 +26,10 @@ "skill/valory/mech_interact_abci/0.1.0": "bafybeicj4c6brjtijdqywn3knejwl4o5ynzi4jcc4sinezeyj7lbgxbu5q", "skill/valory/farcaster_write_abci/0.1.0": "bafybeibqepsbwfbrlpp6lqauyb2zlapo5j35omuhcwv4s2cmnfl7ujmcc4", "skill/valory/farcaster_test_abci/0.1.0": "bafybeibgcz7cxdrkkbzwsfn7webtktxh55m63umlb3brh2wx7qwckbmlua", - "agent/valory/impact_evaluator/0.1.0": "bafybeicvdmud7tlhabiy5nmc4xnckglwz2bv6lvghayob5xn5ckqnhgohe", + "agent/valory/impact_evaluator/0.1.0": "bafybeicfzcda2sqqvbxj53jzgyrkyy52xkunc6xzibzub5brbzpt4dkv3i", "agent/valory/farcaster_test/0.1.0": "bafybeifcwix6xwyee5wo37jjfazfshsf3h3srmspple4mruyyqz2dsimf4", - "service/valory/impact_evaluator/0.1.0": "bafybeic6wdlxvrppf2e4xkhstwh4vvitptoarx6avjcppru7cgny6wkfye", - "service/valory/impact_evaluator_local/0.1.0": "bafybeidt4rlis4cnemim4livkjitiy5o72bsiheh2idq4hvdtyu4qsyv5a" + "service/valory/impact_evaluator/0.1.0": "bafybeicdmqmqulnel2x4k7kgr3hmdnhm7gjfjap23unxhxuk3s3e52guzu", + "service/valory/impact_evaluator_local/0.1.0": "bafybeigxwceytf46gxajolitjh3mmpae4sbl3ku5y2kpirtpjxd264tb4i" }, "third_party": { "protocol/open_aea/signing/1.0.0": "bafybeihv62fim3wl2bayavfcg3u5e5cxu3b7brtu4cn5xoxd6lqwachasi", diff --git a/packages/valory/agents/impact_evaluator/aea-config.yaml b/packages/valory/agents/impact_evaluator/aea-config.yaml index 779bab08..206f9b83 100644 --- a/packages/valory/agents/impact_evaluator/aea-config.yaml +++ b/packages/valory/agents/impact_evaluator/aea-config.yaml @@ -43,9 +43,9 @@ protocols: skills: - valory/abstract_abci:0.1.0:bafybeibzxex6md6xp5eibvp23cmkk7xgdyd44r5lxhfhlquwe3nb22ujgy - valory/abstract_round_abci:0.1.0:bafybeih2soag6gy472x7vbbxmetmr532jqzm2efcapymi3725as33fvjvu -- valory/impact_evaluator_abci:0.1.0:bafybeicyha3z6jjsow3heeckem2yrxq652hwc3jh4iotu5ewf65wovnrsm +- valory/impact_evaluator_abci:0.1.0:bafybeiahefujrd4usf4bvsa6oo5hv3u5mmh7bbkeyvmg4jb4ha6q3crwnq - valory/generic_scoring_abci:0.1.0:bafybeihn7dp3ck5rhuyar35xllb3f7uyblnz4yfnrpgrmtoyu4kxxsavkm -- valory/twitter_scoring_abci:0.1.0:bafybeicr2uomhzlroks5hohmhltk365crayyzmsgkiezcjn652oj7weqyi +- valory/twitter_scoring_abci:0.1.0:bafybeigyvtr5nhqrlasgy2nxfy5nx7lrmv7tizh55xnewb3z7hlobdc7aq - valory/ceramic_read_abci:0.1.0:bafybeihmqflb7aaofxpmvqfwoipzxbwe7jyefddwxgozjdvocamisrfezi - valory/ceramic_write_abci:0.1.0:bafybeibovogo3fzi3clu2eurlrq7nct5bxej3ktd255ajosk55igq4dlqe - valory/dynamic_nft_abci:0.1.0:bafybeihxqueloy3vpevun72wgikkrgmsgpnm3khxb7tt43hdol32imt6by diff --git a/packages/valory/connections/tweepy/connection.py b/packages/valory/connections/tweepy/connection.py index 05f6affd..4c426c72 100644 --- a/packages/valory/connections/tweepy/connection.py +++ b/packages/valory/connections/tweepy/connection.py @@ -45,7 +45,7 @@ import os from collections import deque from pathlib import Path -from typing import Any, Deque, Dict, List, Optional, Tuple, cast +from typing import Any, Dict, List, Optional, Tuple, cast import jsonschema import requests @@ -478,7 +478,7 @@ def process_media(self, **kwargs) -> Optional[List]: # Media hashes is always a list of lists # Each tweet can contain several media items # A thread can contain several tweets - # media_hashes = [[], [hashes_for_tweet_2], [], [hashes_for_tweet_4]] + # media_hashes = [[], [hashes_for_tweet_2], [], [hashes_for_tweet_4]] # noqa: E800 media_hashes = kwargs.get("media_hashes") self.logger.info(f"Processing media: {media_hashes}") thread_media_ids = [] diff --git a/packages/valory/connections/tweepy/connection.yaml b/packages/valory/connections/tweepy/connection.yaml index ce9b6fa4..affe3d48 100644 --- a/packages/valory/connections/tweepy/connection.yaml +++ b/packages/valory/connections/tweepy/connection.yaml @@ -7,7 +7,7 @@ license: Apache-2.0 aea_version: '>=1.0.0, <2.0.0' fingerprint: __init__.py: bafybeicrwqrdownfmeyvvzu45fllxouiwvtynrzs5fhbpt3wndyyhn66eu - connection.py: bafybeieprt2qojuunh72rxd3fm3r3tom7uqxvak3llc7xbmqt65eq6n4ey + connection.py: bafybeievtn5rxaecndbnxmtqvye3pptyrdauj7u7qapbktxyi3nfklevae readme.md: bafybeib5oflnp3gymrottersu6qrnitjmaifl2gvbvjq7kbmsdbihhzfaa fingerprint_ignore_patterns: [] connections: [] @@ -28,5 +28,7 @@ dependencies: version: ==2.28.1 open-aea-cli-ipfs: version: ==1.53.0 + jsonschema: + version: <4.4.0,>=4.3.0 is_abstract: false cert_requests: [] diff --git a/packages/valory/services/impact_evaluator/service.yaml b/packages/valory/services/impact_evaluator/service.yaml index 826a6764..03235a0a 100644 --- a/packages/valory/services/impact_evaluator/service.yaml +++ b/packages/valory/services/impact_evaluator/service.yaml @@ -8,7 +8,7 @@ license: Apache-2.0 fingerprint: README.md: bafybeign56hilwuoa6bgos3uqabss4gew4vadkik7vhj3ucpqw6nxtqtpe fingerprint_ignore_patterns: [] -agent: valory/impact_evaluator:0.1.0:bafybeicvdmud7tlhabiy5nmc4xnckglwz2bv6lvghayob5xn5ckqnhgohe +agent: valory/impact_evaluator:0.1.0:bafybeicfzcda2sqqvbxj53jzgyrkyy52xkunc6xzibzub5brbzpt4dkv3i number_of_agents: 4 deployment: agent: diff --git a/packages/valory/services/impact_evaluator_local/service.yaml b/packages/valory/services/impact_evaluator_local/service.yaml index 87913ba9..663074ff 100644 --- a/packages/valory/services/impact_evaluator_local/service.yaml +++ b/packages/valory/services/impact_evaluator_local/service.yaml @@ -8,7 +8,7 @@ license: Apache-2.0 fingerprint: README.md: bafybeign56hilwuoa6bgos3uqabss4gew4vadkik7vhj3ucpqw6nxtqtpe fingerprint_ignore_patterns: [] -agent: valory/impact_evaluator:0.1.0:bafybeicvdmud7tlhabiy5nmc4xnckglwz2bv6lvghayob5xn5ckqnhgohe +agent: valory/impact_evaluator:0.1.0:bafybeicfzcda2sqqvbxj53jzgyrkyy52xkunc6xzibzub5brbzpt4dkv3i number_of_agents: 1 deployment: agent: diff --git a/packages/valory/skills/impact_evaluator_abci/skill.yaml b/packages/valory/skills/impact_evaluator_abci/skill.yaml index 7862ef4a..584ec0a6 100644 --- a/packages/valory/skills/impact_evaluator_abci/skill.yaml +++ b/packages/valory/skills/impact_evaluator_abci/skill.yaml @@ -26,7 +26,7 @@ skills: - valory/abstract_round_abci:0.1.0:bafybeih2soag6gy472x7vbbxmetmr532jqzm2efcapymi3725as33fvjvu - valory/ceramic_read_abci:0.1.0:bafybeihmqflb7aaofxpmvqfwoipzxbwe7jyefddwxgozjdvocamisrfezi - valory/generic_scoring_abci:0.1.0:bafybeihn7dp3ck5rhuyar35xllb3f7uyblnz4yfnrpgrmtoyu4kxxsavkm -- valory/twitter_scoring_abci:0.1.0:bafybeicr2uomhzlroks5hohmhltk365crayyzmsgkiezcjn652oj7weqyi +- valory/twitter_scoring_abci:0.1.0:bafybeigyvtr5nhqrlasgy2nxfy5nx7lrmv7tizh55xnewb3z7hlobdc7aq - valory/ceramic_write_abci:0.1.0:bafybeibovogo3fzi3clu2eurlrq7nct5bxej3ktd255ajosk55igq4dlqe - valory/dynamic_nft_abci:0.1.0:bafybeihxqueloy3vpevun72wgikkrgmsgpnm3khxb7tt43hdol32imt6by - valory/registration_abci:0.1.0:bafybeiaxomjnv2h7ozc7mvmz5wysfinfgtqnniznpcdunijb5iiekv4mzi diff --git a/packages/valory/skills/twitter_scoring_abci/behaviours.py b/packages/valory/skills/twitter_scoring_abci/behaviours.py index 07614dc8..9a4e65ae 100644 --- a/packages/valory/skills/twitter_scoring_abci/behaviours.py +++ b/packages/valory/skills/twitter_scoring_abci/behaviours.py @@ -772,12 +772,11 @@ def get_registration(self, text: str) -> Optional[str]: class TwitterScoringRoundBehaviour(AbstractRoundBehaviour): """TwitterScoringRoundBehaviour""" - initial_behaviour_cls = TwitterMentionsCollectionBehaviour + initial_behaviour_cls = TwitterDecisionMakingBehaviour abci_app_cls = TwitterScoringAbciApp # type: ignore behaviours: Set[Type[BaseBehaviour]] = [ TwitterDecisionMakingBehaviour, - TwitterMentionsCollectionBehaviour, - TwitterHashtagsCollectionBehaviour, + TweetCollectionBehaviour, DBUpdateBehaviour, TwitterRandomnessBehaviour, TwitterSelectKeepersBehaviour, diff --git a/packages/valory/skills/twitter_scoring_abci/skill.yaml b/packages/valory/skills/twitter_scoring_abci/skill.yaml index d41a5122..1d95cd58 100644 --- a/packages/valory/skills/twitter_scoring_abci/skill.yaml +++ b/packages/valory/skills/twitter_scoring_abci/skill.yaml @@ -8,7 +8,7 @@ license: Apache-2.0 aea_version: '>=1.0.0, <2.0.0' fingerprint: __init__.py: bafybeifudgakkjoyahuewp2o4gvqayw7nsgpyxw2ayrpgmzexurh2xomaq - behaviours.py: bafybeib5f265xhosn4flr5vdwrl6rx73nzw4m6iqifqzhhob4enn5kqg4q + behaviours.py: bafybeifbl74uparj6f34zou2tpcxv4tdjstqg5c2ynvigotft43holla7e dialogues.py: bafybeibdqzn37hbo2cq4skww4uh2zvvsjyaxxvdhxisefbdvmjp7rh53si fsm_specification.yaml: bafybeihz4bgnyzntjnjdxgrmc33iimpkuf4wyrf7ly4rs34piozksmlbf4 handlers.py: bafybeid3nqvcyotqj5g5hlgrz57nf7vpjysmgvsxe3p7644f4z5dcwqn6u From f36da236fb81b280c17481f571be566f5ddf292e Mon Sep 17 00:00:00 2001 From: David Vilela Date: Wed, 12 Jun 2024 17:35:05 +0200 Subject: [PATCH 4/4] fix: linters --- packages/packages.json | 2 +- .../valory/connections/tweepy/connection.py | 21 ------------------- .../valory/connections/tweepy/connection.yaml | 2 +- 3 files changed, 2 insertions(+), 23 deletions(-) diff --git a/packages/packages.json b/packages/packages.json index acd61239..eda14303 100644 --- a/packages/packages.json +++ b/packages/packages.json @@ -12,7 +12,7 @@ "connection/valory/twitter/0.1.0": "bafybeidcalgczvpoq6l2uh7kkfblwxmytm6bicmsxub7pevu2k46sp7mky", "connection/valory/openai/0.1.0": "bafybeigyehjbahya5mp7vyp5tjvn36rey4btvzskp3ql3mgxp3zu6gwq5a", "connection/valory/farcaster/0.1.0": "bafybeibbdas7lxbipksodaphjms3uop7vnzjqkroktjq2g6wbvgtlldaxi", - "connection/valory/tweepy/0.1.0": "bafybeiaws7qm4ksaeb7fgxw75j5kakjbl2wsivwyn7o2uleett2xxs3eia", + "connection/valory/tweepy/0.1.0": "bafybeiajlogdtu3qqoqqh3pftxob7biopnhsauhmskkom4sqblfozsklzu", "skill/valory/dynamic_nft_abci/0.1.0": "bafybeihxqueloy3vpevun72wgikkrgmsgpnm3khxb7tt43hdol32imt6by", "skill/valory/twitter_scoring_abci/0.1.0": "bafybeigyvtr5nhqrlasgy2nxfy5nx7lrmv7tizh55xnewb3z7hlobdc7aq", "skill/valory/ceramic_read_abci/0.1.0": "bafybeihmqflb7aaofxpmvqfwoipzxbwe7jyefddwxgozjdvocamisrfezi", diff --git a/packages/valory/connections/tweepy/connection.py b/packages/valory/connections/tweepy/connection.py index 4c426c72..f17a2aff 100644 --- a/packages/valory/connections/tweepy/connection.py +++ b/packages/valory/connections/tweepy/connection.py @@ -18,27 +18,6 @@ # # ------------------------------------------------------------------------------ - -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# ------------------------------------------------------------------------------ -# -# Copyright 2021-2024 David Vilela Freire -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# ------------------------------------------------------------------------------ - """Tweepy connection.""" import json diff --git a/packages/valory/connections/tweepy/connection.yaml b/packages/valory/connections/tweepy/connection.yaml index affe3d48..67726431 100644 --- a/packages/valory/connections/tweepy/connection.yaml +++ b/packages/valory/connections/tweepy/connection.yaml @@ -7,7 +7,7 @@ license: Apache-2.0 aea_version: '>=1.0.0, <2.0.0' fingerprint: __init__.py: bafybeicrwqrdownfmeyvvzu45fllxouiwvtynrzs5fhbpt3wndyyhn66eu - connection.py: bafybeievtn5rxaecndbnxmtqvye3pptyrdauj7u7qapbktxyi3nfklevae + connection.py: bafybeida6vznywho622atwu6esrspco32ndv62pk2q2sjpwo2m5iswmtpm readme.md: bafybeib5oflnp3gymrottersu6qrnitjmaifl2gvbvjq7kbmsdbihhzfaa fingerprint_ignore_patterns: [] connections: []