christopher commited on
Commit
9c44dc5
Β·
1 Parent(s): 42dd69b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -31,7 +31,7 @@ def get_first_subword(word):
31
  except:
32
  return tokenizer(word, add_special_tokens=False)['input_ids'][0]
33
 
34
- def search(token_to_lookup, num_neighbors=500):
35
  i = get_first_subword(token_to_lookup)
36
  _ , I = index_L2_unnormalized.search(unnormalized_input_embeddings[i:i+1], num_neighbors)
37
  hits = lookup_table.take(I[0])
@@ -43,8 +43,8 @@ def search(token_to_lookup, num_neighbors=500):
43
  iface = gr.Interface(
44
  fn=search,
45
 
46
- #inputs=[gr.Textbox(lines=1, label="Vocabulary Token", placeholder="Enter token..."), gr.Number(value=50, label="number of neighbors")],
47
- inputs=gr.Textbox(lines=1, label="Vocabulary Token", placeholder="Enter token..."),
48
  outputs=[gr.Textbox(label="Nearest tokens"), gr.Textbox(label="Nearest subwords")],
49
  examples=[
50
  ["##logy"],
 
31
  except:
32
  return tokenizer(word, add_special_tokens=False)['input_ids'][0]
33
 
34
+ def search(token_to_lookup, num_neighbors):
35
  i = get_first_subword(token_to_lookup)
36
  _ , I = index_L2_unnormalized.search(unnormalized_input_embeddings[i:i+1], num_neighbors)
37
  hits = lookup_table.take(I[0])
 
43
  iface = gr.Interface(
44
  fn=search,
45
 
46
+ inputs=[gr.Textbox(lines=1, label="Vocabulary Token", placeholder="Enter token..."), gr.Number(value=50, label="number of neighbors")],
47
+ #inputs=gr.Textbox(lines=1, label="Vocabulary Token", placeholder="Enter token..."),
48
  outputs=[gr.Textbox(label="Nearest tokens"), gr.Textbox(label="Nearest subwords")],
49
  examples=[
50
  ["##logy"],