NoaiGPT commited on
Commit
9582224
·
1 Parent(s): d5c5375
Files changed (2) hide show
  1. app.py +8 -15
  2. requirements.txt +3 -1
app.py CHANGED
@@ -52,8 +52,9 @@
52
  # import dependencies
53
  import gradio as gr
54
  from openai import OpenAI
55
- import torch
56
- from transformers import AutoTokenizer, AutoModelForSequenceClassification, TFAutoModelForSequenceClassification
 
57
 
58
  # define the openai key
59
  api_key = "sk-proj-UCoZZMs4MyfyHwXdHjT8T3BlbkFJjYkSZyPfIPNqXfXwoekm"
@@ -64,24 +65,16 @@ client = OpenAI(api_key = api_key)
64
  # finetuned model instance
65
  finetuned_model = "ft:gpt-3.5-turbo-0125:personal::9qGC8cwZ"
66
 
67
- # Load model directly
68
  model_name = "tommyliphys/ai-detector-distilbert"
69
- tokenizer = AutoTokenizer.from_pretrained(model_name)
 
70
 
71
- # Load TensorFlow model
72
- tf_model = TFAutoModelForSequenceClassification.from_pretrained(model_name, from_tf=True)
73
-
74
- # Convert to PyTorch model
75
- model = AutoModelForSequenceClassification.from_pretrained(model_name, from_tf=True)
76
 
77
  # Define the function to get predictions
78
  def get_prediction(text):
79
- inputs = tokenizer(text, return_tensors="pt", truncation=True, padding=True, max_length=512)
80
- with torch.no_grad():
81
- outputs = model(**inputs)
82
- probabilities = torch.nn.functional.softmax(outputs.logits, dim=-1)
83
- ai_probability = probabilities[0][1].item() # Assuming 1 is the index for "AI"
84
- return {"label": "AI" if ai_probability > 0.5 else "Human", "score": ai_probability}
85
 
86
  # function to humanize the text
87
  def humanize_text(AI_text):
 
52
  # import dependencies
53
  import gradio as gr
54
  from openai import OpenAI
55
+ import os
56
+ import re
57
+ from transformers import pipeline, DistilBertForSequenceClassification, DistilBertTokenizerFast
58
 
59
  # define the openai key
60
  api_key = "sk-proj-UCoZZMs4MyfyHwXdHjT8T3BlbkFJjYkSZyPfIPNqXfXwoekm"
 
65
  # finetuned model instance
66
  finetuned_model = "ft:gpt-3.5-turbo-0125:personal::9qGC8cwZ"
67
 
68
+ # Load the AI detection model
69
  model_name = "tommyliphys/ai-detector-distilbert"
70
+ model = DistilBertForSequenceClassification.from_pretrained(model_name)
71
+ tokenizer = DistilBertTokenizerFast.from_pretrained(model_name)
72
 
73
+ pipe = pipeline("text-classification", model=model, tokenizer=tokenizer)
 
 
 
 
74
 
75
  # Define the function to get predictions
76
  def get_prediction(text):
77
+ return pipe(text)[0]
 
 
 
 
 
78
 
79
  # function to humanize the text
80
  def humanize_text(AI_text):
requirements.txt CHANGED
@@ -1,3 +1,5 @@
1
  openai
2
  transformers
3
- torch
 
 
 
1
  openai
2
  transformers
3
+ torch
4
+ tensorflow
5
+ tf-keras