Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Corcel request mech tool #253

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .example.env
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
FILE_HASH_TO_TOOLS='[["bafybeibdcttrlgp5udygntka5fofi566pitkxhquke37ng7csvndhy4s2i",["openai-gpt-3.5-turbo-instruct","openai-gpt-3.5-turbo","openai-gpt-4"]],["bafybeiegbsq2ajxyipajac4mmxyvbt22ctwyuypuid6ziavqpndns6fsjy",["stabilityai-stable-diffusion-v1-5","stabilityai-stable-diffusion-xl-beta-v2-2-2","stabilityai-stable-diffusion-512-v2-1","stabilityai-stable-diffusion-768-v2-1"]],["bafybeihugaylajwh2fgypxatcw5qrw5qxadtrsi2h2s2246442wlvjirtm",["transfer-native"]],["bafybeiayity7o6c2yynrpe6libyak37hgf4fp7a4kjfcnqkkxwy3zkp3ie",["prediction-offline","prediction-online"]],["bafybeifpalgiya4dyu42hehsqxlwr36fjtxdo75xf3gcpfrauvoxgargy4",["gemini-pro", "gemini-1.0-pro-001", "gemini-1.0-pro-latest", "gemini-1.5-pro-latest"]]]'
API_KEYS='[["openai","dummy_api_key"],["stabilityai","dummy_api_key"],["gemini","dummy_api_key"]]'
FILE_HASH_TO_TOOLS='[["bafybeibdcttrlgp5udygntka5fofi566pitkxhquke37ng7csvndhy4s2i",["openai-gpt-3.5-turbo-instruct","openai-gpt-3.5-turbo","openai-gpt-4"]],["bafybeiegbsq2ajxyipajac4mmxyvbt22ctwyuypuid6ziavqpndns6fsjy",["stabilityai-stable-diffusion-v1-5","stabilityai-stable-diffusion-xl-beta-v2-2-2","stabilityai-stable-diffusion-512-v2-1","stabilityai-stable-diffusion-768-v2-1"]],["bafybeihugaylajwh2fgypxatcw5qrw5qxadtrsi2h2s2246442wlvjirtm",["transfer-native"]],["bafybeiayity7o6c2yynrpe6libyak37hgf4fp7a4kjfcnqkkxwy3zkp3ie",["prediction-offline","prediction-online"]],["bafybeifpalgiya4dyu42hehsqxlwr36fjtxdo75xf3gcpfrauvoxgargy4",["gemini-pro", "gemini-1.0-pro-001", "gemini-1.0-pro-latest", "gemini-1.5-pro-latest"]],["bafybeid7jc3fazl2e4bacs2f3vci6gpmfukdq3rqtkqwcevmgawroycmra",["llama-3-1-8b", "llama-3-1-70b", "gpt-3.5-turbo", "cortext-ultra", "cortext", "cortext-lite", "gpt-4-1106-preview", "gpt-4-turbo-2024-04-09", "gpt-40", "gemini-pro", "davinci-002", "gpt-4-turbo-preview", "gpt-4-0125-preview", "babbage-002", "gpt-4-0613", "gpt-3.5-turbo-16k", "gpt-3.5-turbo-1106", "gpt-3.5-turbo-instruct-0914", "gpt-3.5-turbo-instruct", "gpt-3.5-turbo-0301", "gpt-3.5-turbo-0125", "gpt-4-turbo", "gpt-3.5-turbo-0613", "gpt-4o-2024-05-13", "claude-3-opus-20240229", "claude-3-sonnet-20240229", "claude-3-haiku-20240307", "gemma-7b-it", "llama3-70b-8192", "llama3-8b-8192", "mixtral-8x7b-32768", "anthropic.claude-3-sonnet-20240229-v1:0", "cohere.command-r-v1:0", "meta.llama2-70b-chat-v1", "amazon.titan-text-express-v1", "mistral.mistral-7b-instruct-v0:2"]]]'
API_KEYS='[["openai","dummy_api_key"],["stabilityai","dummy_api_key"],["gemini","dummy_api_key"],["corcel","dummy_api_key"]]'
ETHEREUM_LEDGER_RPC_0=https://rpc.eu-central-2.gateway.fm/v4/gnosis/non-archival/mainnet
GNOSIS_RPC_0=https://rpc.eu-central-2.gateway.fm/v4/gnosis/non-archival/mainnet
ETHEREUM_WEBSOCKET_RPC_0=wss://rpc.eu-central-2.gateway.fm/ws/v4/gnosis/non-archival/mainnet
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/common_checks.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -168,6 +168,7 @@ jobs:
OPENROUTER_API_KEY: ${{ secrets.OPEN_ROUTER_API_KEY }}
GNOSIS_RPC_URL: ${{ secrets.GNOSIS_RPC_URL }}
GEMINI_API_KEY: ${{ secrets.GEMINI_API_KEY }}
CORCEL_API_KEY: ${{ secrets.CORCEL_API_KEY }}
GRAPH_API_KEY: ${{ secrets.GRAPH_API_KEY }}
run: |
printenv
Expand Down
1 change: 1 addition & 0 deletions packages/gnosis/customs/ofv_market_resolver/component.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ license: Apache-2.0
aea_version: '>=1.0.0, <2.0.0'
fingerprint:
__init__.py: bafybeie2727utrijcawhgm5bgj7p5lfah2pzepaufnk6m5swuwyduhf2eu
log/factcheck_dev.log: bafybeid5wvhhutaabmf4jlnwiy3ur5lj3xhptrz2mkxrvv3l3jyfzfkbzy
ofv_market_resolver.py: bafybeic6kxngm3mdkpmcjucrrerhyex7kizn7tw7qmpehaiycnc7cb6umu
fingerprint_ignore_patterns: []
entry_point: omen_buy_sell.py
Expand Down
3 changes: 2 additions & 1 deletion packages/packages.json
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,9 @@
"custom/gnosis/omen_tools/0.1.0": "bafybeiglmyy3esctsejdqmz63juvaridbbjwjw3ch4mqudicsrgoir4qrq",
"custom/victorpolisetty/dalle_request/0.1.0": "bafybeieqqtd6gtlry7vheix54nj3ok4cag3uy47yoxlufhi6y3u5i6doti",
"custom/jhehemann/prediction_sentence_embeddings/0.1.0": "bafybeifyyb2wpa77tl7a7fs3fabns45llivhgccbnrpupubojmq2fwe4si",
"custom/gnosis/ofv_market_resolver/0.1.0": "bafybeiemvfq6uxiz3wvdplnxg7wloy6siuggejerlkfkchks6ytgk27uqa",
"custom/gnosis/ofv_market_resolver/0.1.0": "bafybeiab6getmeliadgmuzzlcohplwgvumbk5e2pogckajy5iycxqugosy",
"custom/valory/tee_openai_request/0.1.0": "bafybeictmezaorzxelsy4dztbxh5n2343zio3rk6vo7wc5lptxlobhdnku",
"custom/victorpolisetty/corcel_request/0.1.0": "bafybeid7jc3fazl2e4bacs2f3vci6gpmfukdq3rqtkqwcevmgawroycmra",
"protocol/valory/acn_data_share/0.1.0": "bafybeih5ydonnvrwvy2ygfqgfabkr47s4yw3uqxztmwyfprulwfsoe7ipq",
"protocol/valory/websocket_client/0.1.0": "bafybeifjk254sy65rna2k32kynzenutujwqndap2r222afvr3zezi27mx4",
"contract/valory/agent_mech/0.1.0": "bafybeiah6b5epo2hlvzg5rr2cydgpp2waausoyrpnoarf7oa7bw33rex34",
Expand Down
19 changes: 19 additions & 0 deletions packages/victorpolisetty/customs/corcel_request/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# ------------------------------------------------------------------------------
#
# Copyright 2024 Valory AG
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ------------------------------------------------------------------------------
14 changes: 14 additions & 0 deletions packages/victorpolisetty/customs/corcel_request/component.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
name: corcel_request
author: victorpolisetty
version: 0.1.0
type: custom
description: A tool that runs a prompt against the Corcel API.
license: Apache-2.0
aea_version: '>=1.0.0, <2.0.0'
fingerprint:
__init__.py: bafybeiaesxnkadyuhsacvtrz774edwal2ygz3hillffbkdi4yq2z44r5hm
corcel_request.py: bafybeiaf52eguz3r5spy4nfoby3nthyxpnrm2p6jnus6u7gthr63izyioa
fingerprint_ignore_patterns: []
entry_point: corcel_request.py
callable: run
dependencies: {}
146 changes: 146 additions & 0 deletions packages/victorpolisetty/customs/corcel_request/corcel_request.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,146 @@
# -*- coding: utf-8 -*-
# ------------------------------------------------------------------------------
#
# Copyright 2023-2024 Valory AG
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ------------------------------------------------------------------------------
"""Contains the job definitions"""

from typing import Any, Dict, Optional, Tuple

import requests
import json

DEFAULT_CORCEL_SETTINGS = {
"temperature": 0.1,
"max_tokens": 500,
}

CORCEL_URL = "https://api.corcel.io/v1/chat/completions"

ENGINES = {
"chat": [
"llama-3-1-8b",
"llama-3-1-70b",
"gpt-3.5-turbo",
"cortext-ultra",
"cortext",
"cortext-lite",
"gpt-4-1106-preview",
"gpt-4-turbo-2024-04-09",
"gpt-40",
"gemini-pro",
"davinci-002",
"gpt-4-turbo-preview",
"gpt-4-0125-preview",
"babbage-002",
"gpt-4-0613",
"gpt-3.5-turbo-16k",
"gpt-3.5-turbo-1106",
"gpt-3.5-turbo-instruct-0914",
"gpt-3.5-turbo-instruct",
"gpt-3.5-turbo-0301",
"gpt-3.5-turbo-0125",
"gpt-4-turbo",
"gpt-3.5-turbo-0613",
"gpt-4o-2024-05-13",
"claude-3-opus-20240229",
"claude-3-sonnet-20240229",
"claude-3-haiku-20240307",
"gemma-7b-it",
"llama3-70b-8192",
"llama3-8b-8192",
"mixtral-8x7b-32768",
"anthropic.claude-3-sonnet-20240229-v1:0",
"cohere.command-r-v1:0",
"meta.llama2-70b-chat-v1",
"amazon.titan-text-express-v1",
"mistral.mistral-7b-instruct-v0:2"
]
}

ALLOWED_TOOLS = [value for value in ENGINES["chat"]]


def error_response(msg: str) -> Tuple[str, None, None, None]:
"""Return an error mech response."""
return msg, None, None, None


def run(**kwargs) -> Tuple[Optional[str], Optional[Dict[str, Any]], Any, Any]:
"""Run the task"""

api_key = kwargs["api_keys"]["corcel"]
tool = kwargs["tool"]
prompt = kwargs["prompt"]

if api_key is None:
return error_response("Corcel API key is not available.")

if tool is None:
return error_response("No tool has been specified.")

if tool not in ALLOWED_TOOLS:
return (
f"Model {tool} is not in the list of supported models.",
None,
None,
None,
)

if prompt is None:
return error_response("No prompt has been given.")

temperature = kwargs.get("temperature", DEFAULT_CORCEL_SETTINGS["temperature"])
max_tokens = kwargs.get("max_tokens", DEFAULT_CORCEL_SETTINGS["max_tokens"])
counter_callback = kwargs.get("counter_callback", None)

try:
payload = {
"model": tool,
"temperature": temperature,
"max_tokens": max_tokens,
"messages": [
{
"role": "user",
"content": prompt
}
]
}
headers = {
"accept": "application/json",
"content-type": "application/json",
"Authorization": api_key,
}

response = requests.post(CORCEL_URL, json=payload, headers=headers)

# Collect the chunks and concatenate the final response
full_response = ""
for line in response.iter_lines():
if line:
# Parse each line to extract the "delta" content
try:
data = json.loads(line.decode("utf-8").replace("data: ", ""))
if "choices" in data and len(data["choices"]) > 0:
chunk = data["choices"][0].get("delta", {}).get("content", "")
full_response += chunk
except json.JSONDecodeError:
continue

except Exception as e:
return f"An error occurred: {str(e)}", None, None, None

return full_response, prompt, None, counter_callback
1 change: 1 addition & 0 deletions tests/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,3 +35,4 @@
GNOSIS_RPC_URL = os.getenv("GNOSIS_RPC_URL")
GEMINI_API_KEY = os.getenv("GEMINI_API_KEY")
SERPER_API_KEY = os.getenv("SERPER_API_KEY")
CORCEL_API_KEY = os.getenv("CORCEL_API_KEY")
5 changes: 4 additions & 1 deletion tests/test_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,9 @@
NEWS_API_KEY,
OPENROUTER_API_KEY,
GNOSIS_RPC_URL,
GEMINI_API_KEY, SERPER_API_KEY
GEMINI_API_KEY,
SERPER_API_KEY,
CORCEL_API_KEY,
)


Expand All @@ -64,6 +66,7 @@ class BaseToolTest:
"gnosis_rpc_url": [GNOSIS_RPC_URL],
"gemini": [GEMINI_API_KEY],
"serperapi": [SERPER_API_KEY],
"corcel": [CORCEL_API_KEY],
}
)
models: List = [None]
Expand Down
Loading