File size: 2,550 Bytes
49079cf
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
import time
import openai

#openai.api_key = "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"


# Sentence Generator (Decoder) for GPT-3 ...
def decoder_for_gpt3(input, max_length, temperature=0, engine="text-davinci-003"):
    # GPT-3 API allows each users execute the API within 60 times in a minute ...
    if engine == "gpt-3.5-turbo":
        time.sleep(1)
        response  = openai.ChatCompletion.create(
            model=engine,
            messages=[
                #{"role": "system", "content": "You need to answer commonsense questions."},
                {"role": "user", "content": input}
            ],
            max_tokens=max_length,
            temperature=temperature,
            stop=None
        )
        response = response["choices"][0]["message"]["content"]

    else:
        time.sleep(1)
        response = openai.Completion.create(
            model=engine,
            prompt=input,
            max_tokens=max_length,
            stop=None,
            temperature=temperature
        )
        response = response["choices"][0]["text"]
    return response

def decoder_for_gpt3_consistency(input, max_length, temp=0.7, n=5, engine="text-davinci-003"):
    # GPT-3 API allows each users execute the API within 60 times in a minute ...
    if engine == "gpt-3.5-turbo":
        time.sleep(1)
        responses = openai.ChatCompletion.create(
            model=engine,
            messages=[
                {"role": "user", "content": input}
            ],
            max_tokens=max_length,
            temperature=temp,
            top_p=1,
            n=5,
            stop=["\n"],
        )
        responses = [responses["choices"][i]["message"]["content"] for i in range(n)]
    else:
        time.sleep(1)
        responses = openai.Completion.create(
            model=engine,
            prompt=input,
            max_tokens=max_length,
            temperature=temp,
            stop=["\n"],
            n=5,
            logprobs=5,
            top_p=1,
        )
        responses = [responses["choices"][i]["text"] for i in range(n)]

    return responses

def zero_shot(question):
    input = question + " " + "Among A through E, the answer is"
    response = openai.ChatCompletion.create(
        model="gpt-3.5-turbo",
        messages=[
            {"role": "system", "content": "You are a helpful assistant that answer commonsense questions."},
            {"role": "user", "content": input}
            ]
        )
    response = response["choices"][0]["message"]["content"]
    return response