Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add litellm Demo - call cohere models using chatGPT I/O format #70

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
163 changes: 163 additions & 0 deletions notebooks/liteLLM_Demo.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,163 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "iocfcLrsvB0u",
"outputId": "1bb21c93-a8df-4377-bd11-6e76a663557b"
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Requirement already satisfied: litellm in /usr/local/lib/python3.10/dist-packages (0.1.1)\n",
"Requirement already satisfied: openai in /usr/local/lib/python3.10/dist-packages (from litellm) (0.27.8)\n",
"Requirement already satisfied: cohere in /usr/local/lib/python3.10/dist-packages (from litellm) (4.18.0)\n",
"Requirement already satisfied: aiohttp<4.0,>=3.0 in /usr/local/lib/python3.10/dist-packages (from cohere->litellm) (3.8.5)\n",
"Requirement already satisfied: backoff<3.0,>=2.0 in /usr/local/lib/python3.10/dist-packages (from cohere->litellm) (2.2.1)\n",
"Requirement already satisfied: fastavro==1.7.4 in /usr/local/lib/python3.10/dist-packages (from cohere->litellm) (1.7.4)\n",
"Requirement already satisfied: importlib_metadata<7.0,>=6.0 in /usr/local/lib/python3.10/dist-packages (from cohere->litellm) (6.8.0)\n",
"Requirement already satisfied: requests<3.0.0,>=2.25.0 in /usr/local/lib/python3.10/dist-packages (from cohere->litellm) (2.27.1)\n",
"Requirement already satisfied: urllib3<3,>=1.26 in /usr/local/lib/python3.10/dist-packages (from cohere->litellm) (1.26.16)\n",
"Requirement already satisfied: tqdm in /usr/local/lib/python3.10/dist-packages (from openai->litellm) (4.65.0)\n",
"Requirement already satisfied: attrs>=17.3.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp<4.0,>=3.0->cohere->litellm) (23.1.0)\n",
"Requirement already satisfied: charset-normalizer<4.0,>=2.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp<4.0,>=3.0->cohere->litellm) (2.0.12)\n",
"Requirement already satisfied: multidict<7.0,>=4.5 in /usr/local/lib/python3.10/dist-packages (from aiohttp<4.0,>=3.0->cohere->litellm) (6.0.4)\n",
"Requirement already satisfied: async-timeout<5.0,>=4.0.0a3 in /usr/local/lib/python3.10/dist-packages (from aiohttp<4.0,>=3.0->cohere->litellm) (4.0.2)\n",
"Requirement already satisfied: yarl<2.0,>=1.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp<4.0,>=3.0->cohere->litellm) (1.9.2)\n",
"Requirement already satisfied: frozenlist>=1.1.1 in /usr/local/lib/python3.10/dist-packages (from aiohttp<4.0,>=3.0->cohere->litellm) (1.4.0)\n",
"Requirement already satisfied: aiosignal>=1.1.2 in /usr/local/lib/python3.10/dist-packages (from aiohttp<4.0,>=3.0->cohere->litellm) (1.3.1)\n",
"Requirement already satisfied: zipp>=0.5 in /usr/local/lib/python3.10/dist-packages (from importlib_metadata<7.0,>=6.0->cohere->litellm) (3.16.2)\n",
"Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests<3.0.0,>=2.25.0->cohere->litellm) (2023.7.22)\n",
"Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests<3.0.0,>=2.25.0->cohere->litellm) (3.4)\n"
]
}
],
"source": [
"# @title Install litellm\n",
"!pip install litellm"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "iw4ahUdVvK1E"
},
"outputs": [],
"source": [
"# @title Setup Keys, Imports\n",
"import openai\n",
"from litellm import completion\n",
"import os\n",
"\n",
"os.environ[\"OPENAI_API_KEY\"] = \"\"# @param\n",
"os.environ[\"COHERE_API_KEY\"] = \"\" # @param\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "sefF9mAmwxz2",
"outputId": "f027add3-0f25-4952-b088-6710aeb6dfb7"
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"\n",
"OpenAI call\n",
"{\n",
" \"id\": \"chatcmpl-7gnaeTRBVsFrLYym0oZbY0i0qEHtv\",\n",
" \"object\": \"chat.completion\",\n",
" \"created\": 1690434756,\n",
" \"model\": \"gpt-3.5-turbo-0613\",\n",
" \"choices\": [\n",
" {\n",
" \"index\": 0,\n",
" \"message\": {\n",
" \"role\": \"assistant\",\n",
" \"content\": \"Hello! I'm an AI language model, so I don't have feelings, but I'm here to help you. How may I assist you today?\"\n",
" },\n",
" \"finish_reason\": \"stop\"\n",
" }\n",
" ],\n",
" \"usage\": {\n",
" \"prompt_tokens\": 13,\n",
" \"completion_tokens\": 31,\n",
" \"total_tokens\": 44\n",
" }\n",
"}\n"
]
}
],
"source": [
"# @title OpenAI completion call\n",
"messages = [{ \"content\": \"Hello, how are you?\",\"role\": \"user\"}]\n",
"# openai call\n",
"response = completion(model=\"gpt-3.5-turbo\", messages=messages)\n",
"print(\"\\nOpenAI call\")\n",
"print(response)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "sM6G-QUywIjP",
"outputId": "7218f504-f955-4072-c526-423dba00cc60"
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"\n",
"Cohere call\n",
"{'choices': [{'finish_reason': 'stop', 'index': 0, 'message': {'content': cohere.Generation {\n",
"\tid: bc2f04d9-dd49-47df-98f5-465f3e1f7465\n",
"\tprompt: Hello, how are you?\n",
"\ttext: I'm doing well, thank you. I'm a large language model created by the team at Co\n",
"\tlikelihood: None\n",
"\tfinish_reason: None\n",
"\ttoken_likelihoods: None\n",
"}, 'role': 'assistant'}}]}\n"
]
}
],
"source": [
"# @title Cohere completion call\n",
"response = completion(\"command-nightly\", messages)\n",
"print(\"\\nCohere call\")\n",
"print(response)\n"
]
}
],
"metadata": {
"colab": {
"provenance": []
},
"kernelspec": {
"display_name": "Python 3",
"name": "python3"
},
"language_info": {
"name": "python"
}
},
"nbformat": 4,
"nbformat_minor": 0
}