Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feat/keychain #229

Merged
merged 10 commits into from
May 24, 2024
Merged
Show file tree
Hide file tree
Changes from 7 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ license: Apache-2.0
aea_version: '>=1.0.0, <2.0.0'
fingerprint:
__init__.py: bafybeibbn67pnrrm4qm3n3kbelvbs3v7fjlrjniywmw2vbizarippidtvi
prediction_sum_url_content.py: bafybeiaw7kjpgyl6nqtsl4vuzalf3lnhwpuz4phhqqw3ohi4eul2h3seqm
prediction_sum_url_content.py: bafybeieywowx265yycgf5735bw4zyabfy6ivwnntl6smxa2hicktipgeby
fingerprint_ignore_patterns: []
entry_point: prediction_sum_url_content.py
callable: run
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,13 +18,16 @@
# ------------------------------------------------------------------------------

"""This module implements a Mech tool for binary predictions."""

from typing import Any, Dict, Generator, List, Optional, Tuple
import functools
from typing import Any, Dict, Generator, List, Optional, Tuple, Callable
from datetime import datetime, timezone
import json
import re
from concurrent.futures import Future, ThreadPoolExecutor

import anthropic
import googleapiclient
import openai
from bs4 import BeautifulSoup, NavigableString
from googleapiclient.discovery import build
from openai import OpenAI
Expand All @@ -43,6 +46,58 @@

client: Optional[OpenAI] = None

MechResponse = Tuple[str, Optional[str], Optional[Dict[str, Any]], Any, Any]


def with_key_rotation(func: Callable):
@functools.wraps(func)
def wrapper(*args, **kwargs) -> MechResponse:
# this is expected to be a KeyChain object,
# although it is not explicitly typed as such
api_keys = kwargs["api_keys"]
retries_left: Dict[str, int] = api_keys.max_retries()

def execute() -> MechResponse:
"""Retry the function with a new key."""
try:
result = func(*args, **kwargs)
return result + (api_keys, )
except anthropic.RateLimitError as e:
# try with a new key again
service = "anthropic"
if retries_left[service] <= 0:
raise e
retries_left[service] -= 1
api_keys.rotate(service)
return execute()
except openai.RateLimitError as e:
# try with a new key again
if retries_left["openai"] <= 0 and retries_left["openrouter"] <= 0:
raise e
retries_left["openai"] -= 1
retries_left["openrouter"] -= 1
api_keys.rotate("openai")
api_keys.rotate("openrouter")
return execute()
except googleapiclient.errors.HttpError as e:
# try with a new key again
rate_limit_exceeded_code = 429
if e.status_code != rate_limit_exceeded_code:
raise e
service = "google_api_key"
if retries_left[service] <= 0:
raise e
api_keys.rotate(service)
return execute()
except Exception as e:
return str(e), "", None, None, api_keys

mech_response = execute()
return mech_response

return wrapper



class OpenAIClientManager:
"""Client context manager for OpenAI."""
Expand Down Expand Up @@ -1056,6 +1111,7 @@ def fetch_additional_information(
return additional_informations


@with_key_rotation
def run(**kwargs) -> Tuple[str, Optional[str], Optional[Dict[str, Any]], Any]:
"""
Run the task with the given arguments.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ license: Apache-2.0
aea_version: '>=1.0.0, <2.0.0'
fingerprint:
__init__.py: bafybeibt7f7crtwvmkg7spy3jhscmlqltvyblzp32g6gj44v7tlo5lycuq
prediction_request_rag.py: bafybeicllugnruskdj7ipmrj2vrtlxmjpqtwlk4c3cfjttfzuvkeldp3m4
prediction_request_rag.py: bafybeicz56wjrxavm4iwkuh7x7wbk5eiviavnikyf334u7z7cmr3mrhe4i
fingerprint_ignore_patterns: []
entry_point: prediction_request_rag.py
callable: run
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,15 @@
# limitations under the License.
#
# ------------------------------------------------------------------------------

import functools
import re
import json

import anthropic
import faiss
import PyPDF2
import googleapiclient
import openai
import requests
import numpy as np
from io import BytesIO
Expand All @@ -36,6 +40,61 @@
from tiktoken import encoding_for_model




MechResponse = Tuple[str, Optional[str], Optional[Dict[str, Any]], Any, Any]


def with_key_rotation(func: Callable):
@functools.wraps(func)
def wrapper(*args, **kwargs) -> MechResponse:
# this is expected to be a KeyChain object,
# although it is not explicitly typed as such
api_keys = kwargs["api_keys"]
retries_left: Dict[str, int] = api_keys.max_retries()

def execute() -> MechResponse:
"""Retry the function with a new key."""
try:
result = func(*args, **kwargs)
return result + (api_keys, )
except anthropic.RateLimitError as e:
# try with a new key again
service = "anthropic"
if retries_left[service] <= 0:
raise e
retries_left[service] -= 1
api_keys.rotate(service)
return execute()
except openai.RateLimitError as e:
# try with a new key again
if retries_left["openai"] <= 0 and retries_left["openrouter"] <= 0:
raise e
retries_left["openai"] -= 1
retries_left["openrouter"] -= 1
api_keys.rotate("openai")
api_keys.rotate("openrouter")
return execute()
except googleapiclient.errors.HttpError as e:
# try with a new key again
rate_limit_exceeded_code = 429
if e.status_code != rate_limit_exceeded_code:
raise e
service = "google_api_key"
if retries_left[service] <= 0:
raise e
api_keys.rotate(service)
return execute()
except Exception as e:
return str(e), "", None, None, api_keys

mech_response = execute()
return mech_response

return wrapper



class LLMClientManager:
"""Client context manager for LLMs."""

Expand Down Expand Up @@ -658,6 +717,7 @@ def parser_prediction_response(response: str) -> str:
if "p_yes" not in response:
print("Not a valid answer from the model")
print(f"response = {response}")
results = json.dumps(results)
return results

for key in ["p_yes", "p_no", "info_utility", "confidence"]:
Expand All @@ -674,6 +734,7 @@ def parser_prediction_response(response: str) -> str:
return results


@with_key_rotation
def run(**kwargs) -> Tuple[Optional[str], Any, Optional[Dict[str, Any]], Any]:
"""Run the task"""
tool = kwargs["tool"]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ license: Apache-2.0
aea_version: '>=1.0.0, <2.0.0'
fingerprint:
__init__.py: bafybeiekjzoy2haayvkiwhb2u2epflpqxticud34mma3gdhfzgu36lxwiq
prediction_request_rag_cohere.py: bafybeib4jviue2jqktqbxca4gtzxrvvxi5oihhsbvarymiqyp3xkee7soi
prediction_request_rag_cohere.py: bafybeigusvetxjp37yzrpracrmtfai4lhcsktlzy5yimjissw7cgxhr6rm
fingerprint_ignore_patterns: []
entry_point: prediction_request_rag_cohere.py
callable: run
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,15 @@
# limitations under the License.
#
# ------------------------------------------------------------------------------

import functools
import re
import json

import anthropic
import faiss
import PyPDF2
import googleapiclient
import openai
import requests
import numpy as np
from io import BytesIO
Expand All @@ -36,6 +40,61 @@
from tiktoken import encoding_for_model




MechResponse = Tuple[str, Optional[str], Optional[Dict[str, Any]], Any, Any]


def with_key_rotation(func: Callable):
@functools.wraps(func)
def wrapper(*args, **kwargs) -> MechResponse:
# this is expected to be a KeyChain object,
# although it is not explicitly typed as such
api_keys = kwargs["api_keys"]
retries_left: Dict[str, int] = api_keys.max_retries()

def execute() -> MechResponse:
"""Retry the function with a new key."""
try:
result = func(*args, **kwargs)
return result + (api_keys, )
except anthropic.RateLimitError as e:
# try with a new key again
service = "anthropic"
if retries_left[service] <= 0:
raise e
retries_left[service] -= 1
api_keys.rotate(service)
return execute()
except openai.RateLimitError as e:
# try with a new key again
if retries_left["openai"] <= 0 and retries_left["openrouter"] <= 0:
raise e
retries_left["openai"] -= 1
retries_left["openrouter"] -= 1
api_keys.rotate("openai")
api_keys.rotate("openrouter")
return execute()
except googleapiclient.errors.HttpError as e:
# try with a new key again
rate_limit_exceeded_code = 429
if e.status_code != rate_limit_exceeded_code:
raise e
service = "google_api_key"
if retries_left[service] <= 0:
raise e
api_keys.rotate(service)
return execute()
except Exception as e:
return str(e), "", None, None, api_keys

mech_response = execute()
return mech_response

return wrapper



class LLMClientManager:
"""Client context manager for LLMs."""

Expand Down Expand Up @@ -645,6 +704,7 @@ def parser_prediction_response(response: str) -> str:
raise ValueError(f"Error parsing the response of the model {response}")


@with_key_rotation
def run(**kwargs) -> Tuple[Optional[str], Any, Optional[Dict[str, Any]], Any]:
"""Run the task"""
model = kwargs.get("model")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ license: Apache-2.0
aea_version: '>=1.0.0, <2.0.0'
fingerprint:
__init__.py: bafybeib36ew6vbztldut5xayk5553rylrq7yv4cpqyhwc5ktvd4cx67vwu
prediction_request_reasoning.py: bafybeidb43nygtvbhimnsd223ddpoii46dwirb5znmp2g473u4jii36jqa
prediction_request_reasoning.py: bafybeiggh5wuwqr6ggoeug6vng7uzuzjup5n46vrryrkxczpqeioadajhm
fingerprint_ignore_patterns: []
entry_point: prediction_request_reasoning.py
callable: run
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,15 @@
# ------------------------------------------------------------------------------

"""This module implements a Mech tool for binary predictions."""

import functools
import re
import json

import anthropic
import faiss
import PyPDF2
import googleapiclient
import openai
import requests
import time
import numpy as np
Expand All @@ -39,6 +43,62 @@
from typing import Any, Dict, Generator, List, Optional, Tuple, Callable, Union




MechResponse = Tuple[str, Optional[str], Optional[Dict[str, Any]], Any, Any]


def with_key_rotation(func: Callable):
@functools.wraps(func)
def wrapper(*args, **kwargs) -> MechResponse:
# this is expected to be a KeyChain object,
# although it is not explicitly typed as such
api_keys = kwargs["api_keys"]
retries_left: Dict[str, int] = api_keys.max_retries()

def execute() -> MechResponse:
"""Retry the function with a new key."""
try:
result = func(*args, **kwargs)
return result + (api_keys, )
except anthropic.RateLimitError as e:
# try with a new key again
service = "anthropic"
if retries_left[service] <= 0:
raise e
retries_left[service] -= 1
api_keys.rotate(service)
return execute()
except openai.RateLimitError as e:
# try with a new key again
if retries_left["openai"] <= 0 and retries_left["openrouter"] <= 0:
raise e
retries_left["openai"] -= 1
retries_left["openrouter"] -= 1
api_keys.rotate("openai")
api_keys.rotate("openrouter")
return execute()
except googleapiclient.errors.HttpError as e:
# try with a new key again
rate_limit_exceeded_code = 429
if e.status_code != rate_limit_exceeded_code:
raise e
service = "google_api_key"
if retries_left[service] <= 0:
raise e
api_keys.rotate(service)
return execute()
except Exception as e:
return str(e), "", None, None, api_keys

mech_response = execute()
return mech_response

return wrapper




class LLMClientManager:
"""Client context manager for LLMs."""

Expand Down Expand Up @@ -840,6 +900,7 @@ def extract_question(prompt: str) -> str:
return question


@with_key_rotation
def run(**kwargs) -> Tuple[str, Optional[str], Optional[Dict[str, Any]], Any]:
"""Run the task"""
tool = kwargs["tool"]
Expand Down
Loading
Loading