File size: 1,422 Bytes
447ebeb
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
{
  "nbformat": 4,
  "nbformat_minor": 0,
  "metadata": {
    "colab": {
      "provenance": []
    },
    "kernelspec": {
      "name": "python3",
      "display_name": "Python 3"
    },
    "language_info": {
      "name": "python"
    }
  },
  "cells": [
    {
      "cell_type": "code",
      "execution_count": null,
      "metadata": {
        "id": "adotBkqZSh5g"
      },
      "outputs": [],
      "source": [
        "!pip install litellm"
      ]
    },
    {
      "cell_type": "code",
      "source": [
        "from litellm import completion\n",
        "\n",
        "## set ENV variables\n",
        "os.environ[\"OPENAI_API_KEY\"] = \"openai key\"\n",
        "os.environ[\"COHERE_API_KEY\"] = \"cohere key\"\n",
        "os.environ[\"REPLICATE_API_KEY\"] = \"replicate key\"\n",
        "messages = [{ \"content\": \"Hello, how are you?\",\"role\": \"user\"}]\n",
        "\n",
        "# openai call\n",
        "response = completion(model=\"gpt-3.5-turbo\", messages=messages)\n",
        "\n",
        "# cohere call\n",
        "response = completion(\"command-nightly\", messages)\n",
        "\n",
        "# replicate call\n",
        "response = completion(\"replicate/llama-2-70b-chat:2c1608e18606fad2812020dc541930f2d0495ce32eee50074220b87300bc16e1\", messages)"
      ],
      "metadata": {
        "id": "LeOqznSgSj-z"
      },
      "execution_count": null,
      "outputs": []
    }
  ]
}