File size: 1,353 Bytes
c4cefdf efdd6d9 c4cefdf efdd6d9 c4cefdf efdd6d9 c4cefdf efdd6d9 c4cefdf |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 |
import json
import os
import sys
import numpy as np
from openai import OpenAI
from transformers import AutoModel
def infer(texts):
# Load model directly from Hub
model = AutoModel.from_pretrained("govtech/lionguard-2", trust_remote_code=True)
# Get embeddings (users to input their own OpenAI API key)
client = OpenAI(api_key=os.getenv("OPENAI_API_KEY"))
response = client.embeddings.create(input=texts, model="text-embedding-3-large")
embeddings = np.array([data.embedding for data in response.data])
# Run inference
results = model.predict(embeddings)
return results
if __name__ == "__main__":
# Load the data
try:
input_data = sys.argv[1]
batch_text = json.loads(input_data)
print("Using provided input texts")
except (json.JSONDecodeError, IndexError) as e:
print(f"Error parsing input data: {e}")
print("Falling back to default sample texts")
batch_text = ["Eh you damn stupid lah!", "Have a nice day :)"]
# Generate the scores and predictions
results = infer(batch_text)
for i in range(len(batch_text)):
print(f"Text: '{batch_text[i]}'")
for category in results.keys():
print(f"[Text {i+1}] {category} score: {results[category][i]:.4f}")
print("---------------------------------------------")
|