bambadij commited on
Commit
23e260e
·
1 Parent(s): 7cfe572
Files changed (3) hide show
  1. () +41 -0
  2. app.py +2 -3
  3. requirements.txt +1 -0
() ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ pick fe6ac59 initial commit
2
+ pick 7cfe572 Add application file
3
+ pick 741ded9 Add application file
4
+
5
+
6
+
7
+
8
+
9
+
10
+
11
+
12
+ # Rebase 9419c38 onto d215d22 (10 commands)
13
+ #
14
+ # Commands:
15
+ # p, pick <commit> = use commit
16
+ # r, reword <commit> = use commit, but edit the commit message
17
+ # e, edit <commit> = use commit, but stop for amending
18
+ # s, squash <commit> = use commit, but meld into previous commit
19
+ # f, fixup [-C | -c] <commit> = like "squash" but keep only the previous
20
+ # commit's log message, unless -C is used, in which case
21
+ # keep only this commit's message; -c is same as -C but
22
+ # opens the editor
23
+ # x, exec <command> = run command (the rest of the line) using shell
24
+ # b, break = stop here (continue rebase later with 'git rebase --continue')
25
+ # d, drop <commit> = remove commit
26
+ # l, label <label> = label current HEAD with a name
27
+ # t, reset <label> = reset HEAD to a label
28
+ # m, merge [-C <commit> | -c <commit>] <label> [# <oneline>]
29
+ # create a merge commit using the original merge commit's
30
+ # message (or the oneline, if no original merge commit was
31
+ # specified); use -c <commit> to reword the commit message
32
+ # u, update-ref <ref> = track a placeholder for the <ref> to be updated
33
+ # to this position in the new commits. The <ref> is
34
+ # updated at the end of the rebase
35
+ #
36
+ # These lines can be re-ordered; they are executed from top to bottom.
37
+ #
38
+ # If you remove a line here THAT COMMIT WILL BE LOST.
39
+ #
40
+ # However, if you remove everything, the rebase will be aborted.
41
+ #
app.py CHANGED
@@ -2,10 +2,9 @@ import gradio as gr
2
  from transformers import AutoModelForCausalLM, AutoTokenizer
3
 
4
  # Configuration du modèle et du tokenizer
5
- access_token ="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"
6
  # Load model directly
7
- tokenizer = AutoTokenizer.from_pretrained("mistralai/Mistral-Nemo-Instruct-2407",token=access_token)
8
- model = AutoModelForCausalLM.from_pretrained("mistralai/Mistral-Nemo-Instruct-2407",token=access_token)
9
 
10
 
11
  default_prompt = """Bonjour,
 
2
  from transformers import AutoModelForCausalLM, AutoTokenizer
3
 
4
  # Configuration du modèle et du tokenizer
 
5
  # Load model directly
6
+ tokenizer = AutoTokenizer.from_pretrained("mistralai/Mistral-Nemo-Instruct-2407")
7
+ model = AutoModelForCausalLM.from_pretrained("mistralai/Mistral-Nemo-Instruct-2407")
8
 
9
 
10
  default_prompt = """Bonjour,
requirements.txt ADDED
@@ -0,0 +1 @@
 
 
1
+ transformers==4.44.1