karanzrk commited on
Commit
3ccc1d1
·
1 Parent(s): 03a21d4

Update space

Browse files
Files changed (1) hide show
  1. app.py +2 -1
app.py CHANGED
@@ -7,7 +7,7 @@ For more information on `huggingface_hub` Inference API support, please check th
7
  # client = InferenceClient("karanzrk/bert-Causal-QA")
8
 
9
  from transformers import pipeline
10
- generator = pipeline('text-generation', model = 'karanzrk/bert-Causal-QA', tokenizer="bert-base-uncased")
11
  # generator("Hello, I'm a language model", max_length = 30, num_return_sequences=3)
12
 
13
 
@@ -64,6 +64,7 @@ def inference(text):
64
  # classifier = pipeline("text-classification", model="karanzrk/essayl0")
65
  text = "Question: " + text
66
  output = generator(text)
 
67
  return output
68
 
69
  # launcher = gr.Interface(
 
7
  # client = InferenceClient("karanzrk/bert-Causal-QA")
8
 
9
  from transformers import pipeline
10
+ generator = pipeline('text-generation', model = 'karanzrk/bert-Causal-QA', tokenizer="bert-base-uncased", max_length = 128)
11
  # generator("Hello, I'm a language model", max_length = 30, num_return_sequences=3)
12
 
13
 
 
64
  # classifier = pipeline("text-classification", model="karanzrk/essayl0")
65
  text = "Question: " + text
66
  output = generator(text)
67
+ answer = output["generated_text"]
68
  return output
69
 
70
  # launcher = gr.Interface(