diff --git a/.example.env b/.example.env index b084fd96..2901184c 100644 --- a/.example.env +++ b/.example.env @@ -1,4 +1,4 @@ -FILE_HASH_TO_TOOLS='[["bafybeiepacjq6yjukta4gfiq2fyvsj4b4r2g7swtkb2qfjugbbtsjt47ya",["openai-text-davinci-002","openai-text-davinci-003","openai-gpt-3.5-turbo","openai-gpt-4"]],["bafybeiepc5v4ixwuu5m6p5stck5kf2ecgkydf6crj52i5umnl2qm5swb4i",["stabilityai-stable-diffusion-v1-5","stabilityai-stable-diffusion-xl-beta-v2-2-2","stabilityai-stable-diffusion-512-v2-1","stabilityai-stable-diffusion-768-v2-1"]],["bafybeidpbnqbruzqlq424qt3i5dcvyqmcimshjilftabnrroujmjhdmteu",["transfer-native"]],["bafybeicf53hgwzvk74sso4ujdr5vzxorklxld4fvcn33dlf2pwhxz4w6rm",["prediction-offline","prediction-online"]]]' +FILE_HASH_TO_TOOLS='[["bafybeiepacjq6yjukta4gfiq2fyvsj4b4r2g7swtkb2qfjugbbtsjt47ya",["openai-text-davinci-002","openai-text-davinci-003","openai-gpt-3.5-turbo","openai-gpt-4"]],["bafybeiepc5v4ixwuu5m6p5stck5kf2ecgkydf6crj52i5umnl2qm5swb4i",["stabilityai-stable-diffusion-v1-5","stabilityai-stable-diffusion-xl-beta-v2-2-2","stabilityai-stable-diffusion-512-v2-1","stabilityai-stable-diffusion-768-v2-1"]],["bafybeidpbnqbruzqlq424qt3i5dcvyqmcimshjilftabnrroujmjhdmteu",["transfer-native"]],["bafybeiglhy5epaytvt5qqdx77ld23ekouli53qrf2hjyebd5xghlunidfi",["prediction-offline","prediction-online"]]]' API_KEYS='[["openai","dummy_api_key"],["stabilityai","dummy_api_key"]]' ETHEREUM_LEDGER_RPC_0=https://rpc.eu-central-2.gateway.fm/v4/gnosis/non-archival/mainnet ETHEREUM_WEBSOCKET_RPC_0=wss://rpc.eu-central-2.gateway.fm/ws/v4/gnosis/non-archival/mainnet diff --git a/packages/packages.json b/packages/packages.json index 91bd685f..9d3e76fa 100644 --- a/packages/packages.json +++ b/packages/packages.json @@ -2,12 +2,12 @@ "dev": { "connection/valory/websocket_client/0.1.0": "bafybeicz53kzs5uvyiod2azntl76zwgmpgr22ven4wl5fnwt2m546j3wsu", "skill/valory/contract_subscription/0.1.0": "bafybeif3hkpgbzuoxsbqxnd752qkvk3onytltrufnyrphnqbi62si4mdhy", - "agent/valory/mech/0.1.0": "bafybeihmzkhamfw7u3nkc6xf6ummstgvcxpcpl6ofcsc62kwwb5cqhw5ay", + "agent/valory/mech/0.1.0": "bafybeifepf5hooublgkygudt3jnss2lqzm2hbcp4v64e4pq2j2q32jy2uu", "skill/valory/multiplexer_abci/0.1.0": "bafybeiabjge5xdlvdox4rauhb255v2b3idfzpbhgrfl46njnbcuisr5c6i", "skill/valory/task_execution_abci/0.1.0": "bafybeicqan3ycxjlm7qfnuie6ektn7cwsxawuuqam24sljtkemfva2isou", "skill/valory/mech_abci/0.1.0": "bafybeigeroajs3xaxzia3s2ytnsquehoymqiap6luzwrngsucao7emtotm", "contract/valory/agent_mech/0.1.0": "bafybeiasgzwum4kypi5d4ubldvj7ihg6tyvpnvt5gyqbx3jdsku7b3gxgy", - "service/valory/mech/0.1.0": "bafybeihnasayen5wolcxd4b6asp3mcl4kni3hodg3axiqoyc24ho3tbj5a", + "service/valory/mech/0.1.0": "bafybeifnjsj2qomu3n6mo36xahnd6lrdo3wlq3uzdosx4e5ve3y4hpiiuy", "protocol/valory/acn_data_share/0.1.0": "bafybeieyixetwvz767zekhvg7r6etumyanzys6xbalx2brrfswybinnlhi", "protocol/valory/default/1.0.0": "bafybeiecmut3235aen7wxukllv424f3dysvvlgfmn562kzdunc5hdj3hxu" }, diff --git a/packages/valory/agents/mech/aea-config.yaml b/packages/valory/agents/mech/aea-config.yaml index db6c9311..332accce 100644 --- a/packages/valory/agents/mech/aea-config.yaml +++ b/packages/valory/agents/mech/aea-config.yaml @@ -148,7 +148,7 @@ models: setup: all_participants: ${list:["0x10E867Ac2Fb0Aa156ca81eF440a5cdf373bE1AaC"]} safe_contract_address: ${str:0x5e1D1eb61E1164D5a50b28C575dA73A29595dFf7} - file_hash_to_tools_json: ${list:[["bafybeibi34bhbvesmvd6o24jxvuldrwen4wj62na3lhva7k4afkg2shinu",["openai-text-davinci-002","openai-text-davinci-003","openai-gpt-3.5-turbo","openai-gpt-4"]],["bafybeiafdm3jctiz6wwo3rmo3vdubk7j7l5tumoxi5n5rc3x452mtkgyua",["stabilityai-stable-diffusion-v1-5","stabilityai-stable-diffusion-xl-beta-v2-2-2","stabilityai-stable-diffusion-512-v2-1","stabilityai-stable-diffusion-768-v2-1"]],["bafybeidpbnqbruzqlq424qt3i5dcvyqmcimshjilftabnrroujmjhdmteu",["transfer-native"]],["bafybeicf53hgwzvk74sso4ujdr5vzxorklxld4fvcn33dlf2pwhxz4w6rm",["prediction-online","prediction-offline"]]]} + file_hash_to_tools_json: ${list:[["bafybeibi34bhbvesmvd6o24jxvuldrwen4wj62na3lhva7k4afkg2shinu",["openai-text-davinci-002","openai-text-davinci-003","openai-gpt-3.5-turbo","openai-gpt-4"]],["bafybeiafdm3jctiz6wwo3rmo3vdubk7j7l5tumoxi5n5rc3x452mtkgyua",["stabilityai-stable-diffusion-v1-5","stabilityai-stable-diffusion-xl-beta-v2-2-2","stabilityai-stable-diffusion-512-v2-1","stabilityai-stable-diffusion-768-v2-1"]],["bafybeidpbnqbruzqlq424qt3i5dcvyqmcimshjilftabnrroujmjhdmteu",["transfer-native"]],["bafybeiglhy5epaytvt5qqdx77ld23ekouli53qrf2hjyebd5xghlunidfi",["prediction-online","prediction-offline"]]]} api_keys_json: ${list:[["openai", "dummy_api_key"],["stabilityai", "dummy_api_key"],["google_api_key", "dummy_api_key"],["google_engine_id", "dummy_api_key"]]} use_polling: ${bool:false} diff --git a/packages/valory/services/mech/service.yaml b/packages/valory/services/mech/service.yaml index f0ee226d..ace4c7e6 100644 --- a/packages/valory/services/mech/service.yaml +++ b/packages/valory/services/mech/service.yaml @@ -7,7 +7,7 @@ license: Apache-2.0 fingerprint: README.md: bafybeif7ia4jdlazy6745ke2k2x5yoqlwsgwr6sbztbgqtwvs3ndm2p7ba fingerprint_ignore_patterns: [] -agent: valory/mech:0.1.0:bafybeihmzkhamfw7u3nkc6xf6ummstgvcxpcpl6ofcsc62kwwb5cqhw5ay +agent: valory/mech:0.1.0:bafybeifepf5hooublgkygudt3jnss2lqzm2hbcp4v64e4pq2j2q32jy2uu number_of_agents: 4 deployment: agent: diff --git a/tools/prediction_request.py b/tools/prediction_request.py index 402e2a45..9197f8a4 100644 --- a/tools/prediction_request.py +++ b/tools/prediction_request.py @@ -134,7 +134,7 @@ def get_urls_from_queries(queries: List[str], api_key: str, engine: str) -> List query=query, api_key=api_key, engine=engine, - num=3, # Number of returned results + num=3, # Number of returned results ): results.append(url) unique_results = list(set(results)) @@ -205,7 +205,8 @@ def fetch_additional_information( temperature=temperature, max_tokens=max_tokens, n=1, - timeout=120, + timeout=90, + request_timeout=90, stop=None, ) json_data = json.loads(response.choices[0].message.content) @@ -258,7 +259,8 @@ def run(**kwargs) -> Tuple[str, Optional[Dict[str, Any]]]: temperature=temperature, max_tokens=max_tokens, n=1, - timeout=120, + timeout=150, + request_timeout=150, stop=None, ) return response.choices[0].message.content, None