NoaiGPT commited on
Commit
d008ab1
·
1 Parent(s): 17b0e6e
Files changed (1) hide show
  1. app.py +11 -10
app.py CHANGED
@@ -52,9 +52,8 @@
52
  # import dependencies
53
  import gradio as gr
54
  from openai import OpenAI
55
- import os
56
- import re
57
- from transformers import pipeline, DistilBertForSequenceClassification, DistilBertTokenizerFast
58
 
59
  # define the openai key
60
  api_key = "sk-proj-UCoZZMs4MyfyHwXdHjT8T3BlbkFJjYkSZyPfIPNqXfXwoekm"
@@ -65,16 +64,18 @@ client = OpenAI(api_key = api_key)
65
  # finetuned model instance
66
  finetuned_model = "ft:gpt-3.5-turbo-0125:personal::9qGC8cwZ"
67
 
68
- # Load the AI detection model
69
- model_name = "tommyliphys/ai-detector-distilbert"
70
- model = DistilBertForSequenceClassification.from_pretrained(model_name)
71
- tokenizer = DistilBertTokenizerFast.from_pretrained(model_name)
72
-
73
- pipe = pipeline("text-classification", model=model, tokenizer=tokenizer)
74
 
75
  # Define the function to get predictions
76
  def get_prediction(text):
77
- return pipe(text)[0]
 
 
 
 
 
78
 
79
  # function to humanize the text
80
  def humanize_text(AI_text):
 
52
  # import dependencies
53
  import gradio as gr
54
  from openai import OpenAI
55
+ import torch
56
+ from transformers import AutoTokenizer, AutoModelForSequenceClassification
 
57
 
58
  # define the openai key
59
  api_key = "sk-proj-UCoZZMs4MyfyHwXdHjT8T3BlbkFJjYkSZyPfIPNqXfXwoekm"
 
64
  # finetuned model instance
65
  finetuned_model = "ft:gpt-3.5-turbo-0125:personal::9qGC8cwZ"
66
 
67
+ # Load model directly
68
+ tokenizer = AutoTokenizer.from_pretrained("tommyliphys/ai-detector-distilbert")
69
+ model = AutoModelForSequenceClassification.from_pretrained("tommyliphys/ai-detector-distilbert")
 
 
 
70
 
71
  # Define the function to get predictions
72
  def get_prediction(text):
73
+ inputs = tokenizer(text, return_tensors="pt", truncation=True, padding=True, max_length=512)
74
+ with torch.no_grad():
75
+ outputs = model(**inputs)
76
+ probabilities = torch.nn.functional.softmax(outputs.logits, dim=-1)
77
+ ai_probability = probabilities[0][1].item() # Assuming 1 is the index for "AI"
78
+ return {"label": "AI" if ai_probability > 0.5 else "Human", "score": ai_probability}
79
 
80
  # function to humanize the text
81
  def humanize_text(AI_text):