PrakharPratap's picture
initial commit
1f8f99f
import os, json, requests
# enter the client id from imss here
client_id = 'MDSR_Firefall'
client_secret = 's8e-8CGebu-kO3Vt_ICCNzQU8sCVYCHqcuFq' #enter the client secret from imss here
permanent_auth_code = 'eyJhbGciOiJSUzI1NiIsIng1dSI6Imltc19uYTEtc3RnMS1rZXktcGFjLTEuY2VyIiwia2lkIjoiaW1zX25hMS1zdGcxLWtleS1wYWMtMSIsIml0dCI6InBhYyJ9.eyJpZCI6Ik1EU1JfRmlyZWZhbGxfc3RnIiwidHlwZSI6ImF1dGhvcml6YXRpb25fY29kZSIsImNsaWVudF9pZCI6Ik1EU1JfRmlyZWZhbGwiLCJ1c2VyX2lkIjoiTURTUl9GaXJlZmFsbEBBZG9iZUlEIiwiYXMiOiJpbXMtbmExLXN0ZzEiLCJvdG8iOmZhbHNlLCJjcmVhdGVkX2F0IjoiMTY4MTE0NTIxNDk1MCIsInNjb3BlIjoic3lzdGVtIn0.Yoz7IPhmIBV2uNKl1CJJ9rJ0HmvDBQFbh0AihlHdsOa1E3yBs7WB9ilTCUVodifg8gh1yw4QRllV1NKS2RYeiGxQU7rXAF7SEnH_X_Tqdl735PBnBFL8sW_x76dzmT6MZIzynz8Ywu57qztvFnHoLMfJ7HsNt7rkOqF3IZByOinxyJzRTwMfygHSKjoQx6A4S7LbuQWjlqDbM9RaeCcakMEqGvSKqkLQvtMg40ZQYSNELoFtbATfwuVrHWOglAQS4A2FR24ziop137imu4HrTr-syDYki8VWV27WuGGo632_K2vJwqbaYjZvyrtsuBLH3fGGgXgyM5EA_Jk_lcMFog' #imss -> service tokens -> permanent auth token
ims_url = 'https://ims-na1-stg1.adobelogin.com/ims/token/v2'
firefall_client_id = "MDSR_Firefall"
ims_org_id = client_id
api_key = client_id
azure_url = 'https://firefall-stage.adobe.io/v1/completions'
def get_openai_response(azure_url, ims_org_id, api_key, temp_auth_token, json_data):
headers = {
'x-gw-ims-org-id': ims_org_id,
'x-api-key': api_key,
'Authorization': f'Bearer {temp_auth_token}',
'Content-Type': 'application/json',
}
response = requests.post(azure_url, headers=headers, json=json_data)
return json.loads(response.text)
def get_temp_auth_token(ims_url, client_id, client_secret, permanent_auth_code):
params = {
'client_id': client_id,
'client_secret': client_secret,
'code': permanent_auth_code,
'grant_type': 'authorization_code',
}
response = requests.post(ims_url, params=params)
return json.loads(response.text)
response = get_temp_auth_token(ims_url, client_id, client_secret, permanent_auth_code)
# print(response)
temp_auth_token = response['access_token']
query = """hello world!"""
json_data = {
"dialogue":{
"question": query
},
"llm_metadata": {
"model_name": "gpt-4",
"temperature": 0.0,
"max_tokens": 8071,
"top_p": 1.0,
"frequency_penalty": 0,
"presence_penalty": 0,
"n": 1,
"llm_type": "azure_chat_openai"
}
}
openai_response = get_openai_response(azure_url, ims_org_id, api_key, temp_auth_token, json_data)
# print("OpenAI Response:", openai_response)
print("OpenAI Response:", openai_response['generations'][0][0]['text'])