Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -2,7 +2,7 @@ import gradio as gr
|
|
2 |
from huggingface_hub import InferenceClient
|
3 |
import os
|
4 |
from mitreattack.stix20 import MitreAttackData
|
5 |
-
from descriptions import descriptions
|
6 |
|
7 |
# Chemins des fichiers JSON
|
8 |
ics_attack_path = 'ics-attack.json'
|
@@ -19,6 +19,9 @@ techniques_str = "\n".join([f"{technique['name']} ({mitre_attack_data.get_attack
|
|
19 |
|
20 |
client = InferenceClient(model='mistralai/Mixtral-8x7B-Instruct-v0.1')
|
21 |
|
|
|
|
|
|
|
22 |
|
23 |
def respond(
|
24 |
message,
|
@@ -53,16 +56,32 @@ def respond(
|
|
53 |
response += token
|
54 |
yield response
|
55 |
|
56 |
-
|
57 |
demo = gr.ChatInterface(
|
58 |
respond,
|
59 |
additional_inputs=[
|
60 |
-
gr.Textbox(
|
|
|
61 |
gr.Slider(minimum=1, maximum=2048, value=1024, step=1, label="Max new tokens"),
|
62 |
gr.Slider(minimum=0.1, maximum=1.0, value=0.1, step=0.1, label="Temperature"),
|
63 |
gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)"),
|
64 |
],
|
|
|
|
|
65 |
)
|
66 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
67 |
if __name__ == "__main__":
|
68 |
demo.launch()
|
|
|
2 |
from huggingface_hub import InferenceClient
|
3 |
import os
|
4 |
from mitreattack.stix20 import MitreAttackData
|
5 |
+
from descriptions import descriptions # Assurez-vous que descriptions.py est dans le même répertoire
|
6 |
|
7 |
# Chemins des fichiers JSON
|
8 |
ics_attack_path = 'ics-attack.json'
|
|
|
19 |
|
20 |
client = InferenceClient(model='mistralai/Mixtral-8x7B-Instruct-v0.1')
|
21 |
|
22 |
+
def generate_system_message(log_input):
|
23 |
+
description_output = descriptions(log_input)
|
24 |
+
return f"""<s>[INST] Given these TTPs: {techniques_str}\n\n and here are the descriptions: {description_output}\n\nFigure out which technique is used in these logs and respond in bullet points and nothing else.[/INST]"""
|
25 |
|
26 |
def respond(
|
27 |
message,
|
|
|
56 |
response += token
|
57 |
yield response
|
58 |
|
|
|
59 |
demo = gr.ChatInterface(
|
60 |
respond,
|
61 |
additional_inputs=[
|
62 |
+
gr.Textbox(label="Log Input", placeholder="Enter log here..."),
|
63 |
+
gr.Textbox(label="System message", value="", interactive=False),
|
64 |
gr.Slider(minimum=1, maximum=2048, value=1024, step=1, label="Max new tokens"),
|
65 |
gr.Slider(minimum=0.1, maximum=1.0, value=0.1, step=0.1, label="Temperature"),
|
66 |
gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)"),
|
67 |
],
|
68 |
+
title="TTP Detection Interface",
|
69 |
+
description="Enter logs to detect TTPs using the model.",
|
70 |
)
|
71 |
|
72 |
+
# Update system message based on log input
|
73 |
+
def update_system_message(log_input):
|
74 |
+
return generate_system_message(log_input)
|
75 |
+
|
76 |
+
demo.additional_inputs[1].update(value=gr.update(value=generate_system_message("")))
|
77 |
+
|
78 |
+
# Event handler for log input
|
79 |
+
def on_log_input_change(log_input):
|
80 |
+
system_message = generate_system_message(log_input)
|
81 |
+
demo.additional_inputs[1].update(value=gr.update(value=system_message))
|
82 |
+
|
83 |
+
# Bind event handler
|
84 |
+
demo.additional_inputs[0].change(on_log_input_change)
|
85 |
+
|
86 |
if __name__ == "__main__":
|
87 |
demo.launch()
|