sayyedAhmed commited on
Commit
eae485e
·
1 Parent(s): a786e13
Files changed (1) hide show
  1. inference.py +17 -21
inference.py CHANGED
@@ -1,28 +1,24 @@
1
- from huggingface_hub import InferenceClient
2
- from dotenv import load_dotenv
3
  import os
4
 
5
- # Load the .env file to get environment variables
6
- load_dotenv()
7
 
8
- # Get the API key from the environment variable
9
- api_key = os.getenv("HF_API_KEY")
10
 
11
- # Initialize the InferenceClient with your API key
12
- client = InferenceClient(api_key="hf_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx")
 
 
13
 
14
- # Define the model path or model name on Hugging Face
15
- model_name = "sayyedAhmed/Crisis_Severity_Predictor_LSTM"
 
16
 
17
- # Define the input data (make sure this matches your model's expected input format)
18
- # For example, this could be a list of features for one instance
19
- input_data = [[1.23, 4.56, 7.89, 10.11]] # Adjust this based on your model's input format
20
 
21
- # Request inference (you can customize this request as needed, such as adding more parameters)
22
- predictions = client.predict(
23
- model=model_name,
24
- inputs=input_data
25
- )
26
-
27
- # Print out the predictions
28
- print("Predictions:", predictions)
 
1
+ import requests
2
+ import json
3
  import os
4
 
5
+ # Your Hugging Face model URL
6
+ API_URL = "sayyedAhmed/Crisis_Severity_Predictor_LSTM" # Replace with your model's URL
7
 
8
+ # Load your Hugging Face API token
9
+ API_KEY = os.getenv("HF_API_KEY") # Ensure the API key is stored in the environment or replace with the actual key
10
 
11
+ headers = {
12
+ "Authorization": f"Bearer {API_KEY}",
13
+ "Content-Type": "application/json"
14
+ }
15
 
16
+ payload = {
17
+ "inputs": "Your test input here" # Replace this with the actual input for your model
18
+ }
19
 
20
+ # Make the POST request to Hugging Face Inference API
21
+ response = requests.post(API_URL, headers=headers, json=payload)
 
22
 
23
+ # Print the response (the predictions)
24
+ print(response.json())