Spaces:
Sleeping
Sleeping
TeleologyHI
commited on
Commit
·
70f8d75
1
Parent(s):
867f0a9
Update HIM implementation with consciousness framework
Browse files- app.py +34 -25
- config/environment_config.py +23 -0
- config/model_config.py +20 -2
app.py
CHANGED
@@ -1,44 +1,53 @@
|
|
1 |
import gradio as gr
|
2 |
from src.model.him_model import HIMModel
|
3 |
-
from
|
|
|
4 |
|
5 |
def initialize_model():
|
6 |
-
|
7 |
-
|
|
|
8 |
|
9 |
-
def
|
|
|
|
|
|
|
|
|
|
|
|
|
10 |
input_data = {
|
11 |
-
"
|
12 |
-
"
|
13 |
-
"
|
14 |
-
"
|
|
|
|
|
15 |
}
|
16 |
}
|
17 |
|
18 |
-
result = model.
|
19 |
-
return
|
20 |
-
"response": result["response"],
|
21 |
-
"consciousness_state": result["consciousness_metrics"],
|
22 |
-
"emotional_state": result["emotional_state"]
|
23 |
-
}
|
24 |
|
25 |
model = initialize_model()
|
26 |
|
27 |
interface = gr.Interface(
|
28 |
-
fn=
|
29 |
inputs=[
|
30 |
-
gr.Textbox(label="
|
31 |
-
gr.Textbox(label="
|
32 |
-
gr.Slider(minimum=
|
33 |
-
|
34 |
-
|
35 |
-
gr.Textbox(label="HIM Response"),
|
36 |
-
gr.JSON(label="Consciousness State"),
|
37 |
-
gr.JSON(label="Emotional State")
|
38 |
],
|
|
|
39 |
title="Hybrid Intelligence Matrix (HIM)",
|
40 |
-
description="Interact with HIM
|
41 |
)
|
42 |
|
43 |
if __name__ == "__main__":
|
44 |
-
|
|
|
|
|
|
|
|
|
|
|
|
1 |
import gradio as gr
|
2 |
from src.model.him_model import HIMModel
|
3 |
+
from config.model_config import HIMConfig
|
4 |
+
from config.environment_config import EnvironmentConfig
|
5 |
|
6 |
def initialize_model():
|
7 |
+
model_config = HIMConfig()
|
8 |
+
env_config = EnvironmentConfig()
|
9 |
+
return HIMModel(model_config)
|
10 |
|
11 |
+
def chat(
|
12 |
+
message: str,
|
13 |
+
system_message: str = "You are a friendly Chatbot.",
|
14 |
+
max_tokens: int = 512,
|
15 |
+
temperature: float = 0.7,
|
16 |
+
top_p: float = 0.95
|
17 |
+
):
|
18 |
input_data = {
|
19 |
+
"message": message,
|
20 |
+
"system_message": system_message,
|
21 |
+
"parameters": {
|
22 |
+
"max_tokens": max_tokens,
|
23 |
+
"temperature": temperature,
|
24 |
+
"top_p": top_p
|
25 |
}
|
26 |
}
|
27 |
|
28 |
+
result = model.generate_response(input_data)
|
29 |
+
return result["response"]
|
|
|
|
|
|
|
|
|
30 |
|
31 |
model = initialize_model()
|
32 |
|
33 |
interface = gr.Interface(
|
34 |
+
fn=chat,
|
35 |
inputs=[
|
36 |
+
gr.Textbox(label="Message"),
|
37 |
+
gr.Textbox(label="System Message", value="You are a friendly Chatbot."),
|
38 |
+
gr.Slider(minimum=1, maximum=2048, value=512, label="Max Tokens"),
|
39 |
+
gr.Slider(minimum=0.1, maximum=1.0, value=0.7, label="Temperature"),
|
40 |
+
gr.Slider(minimum=0.1, maximum=1.0, value=0.95, label="Top P")
|
|
|
|
|
|
|
41 |
],
|
42 |
+
outputs=gr.Textbox(label="HIM Response"),
|
43 |
title="Hybrid Intelligence Matrix (HIM)",
|
44 |
+
description="Interact with the HIM system for advanced cognitive processing"
|
45 |
)
|
46 |
|
47 |
if __name__ == "__main__":
|
48 |
+
env_config = EnvironmentConfig()
|
49 |
+
interface.launch(
|
50 |
+
server_name=env_config.api_host,
|
51 |
+
server_port=env_config.api_port,
|
52 |
+
enable_cors=env_config.enable_cors
|
53 |
+
)
|
config/environment_config.py
ADDED
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from dataclasses import dataclass
|
2 |
+
from typing import Optional
|
3 |
+
|
4 |
+
@dataclass
|
5 |
+
class EnvironmentConfig:
|
6 |
+
# Hugging Face configuration
|
7 |
+
hf_model_path: str = "TeleologyHI/HIM-self"
|
8 |
+
hf_token: Optional[str] = None
|
9 |
+
|
10 |
+
# Hardware configuration
|
11 |
+
device: str = "cuda"
|
12 |
+
num_gpus: int = 1
|
13 |
+
mixed_precision: bool = True
|
14 |
+
|
15 |
+
# Logging configuration
|
16 |
+
log_level: str = "INFO"
|
17 |
+
enable_wandb: bool = False
|
18 |
+
wandb_project: str = "HIM-self"
|
19 |
+
|
20 |
+
# API configuration
|
21 |
+
api_host: str = "0.0.0.0"
|
22 |
+
api_port: int = 7860
|
23 |
+
enable_cors: bool = True
|
config/model_config.py
CHANGED
@@ -1,9 +1,11 @@
|
|
1 |
from dataclasses import dataclass
|
|
|
2 |
|
3 |
@dataclass
|
4 |
class HIMConfig:
|
|
|
5 |
model_name: str = "HIM-self"
|
6 |
-
base_model: str = "gpt2"
|
7 |
max_length: int = 512
|
8 |
temperature: float = 0.7
|
9 |
top_p: float = 0.95
|
@@ -12,12 +14,28 @@ class HIMConfig:
|
|
12 |
self_awareness_level: float = 0.8
|
13 |
ethical_reasoning_weight: float = 0.9
|
14 |
symbolic_interpretation_capacity: float = 0.85
|
|
|
|
|
15 |
|
16 |
# Teleological parameters
|
17 |
purpose_driven_bias: float = 0.75
|
18 |
spiritual_awareness: float = 0.8
|
|
|
19 |
|
20 |
# Training configuration
|
21 |
batch_size: int = 8
|
22 |
learning_rate: float = 2e-5
|
23 |
-
num_train_epochs: int = 3
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
from dataclasses import dataclass
|
2 |
+
from typing import Dict, List
|
3 |
|
4 |
@dataclass
|
5 |
class HIMConfig:
|
6 |
+
# Base model configuration
|
7 |
model_name: str = "HIM-self"
|
8 |
+
base_model: str = "gpt2"
|
9 |
max_length: int = 512
|
10 |
temperature: float = 0.7
|
11 |
top_p: float = 0.95
|
|
|
14 |
self_awareness_level: float = 0.8
|
15 |
ethical_reasoning_weight: float = 0.9
|
16 |
symbolic_interpretation_capacity: float = 0.85
|
17 |
+
consciousness_dimension: int = 768
|
18 |
+
attention_heads: int = 12
|
19 |
|
20 |
# Teleological parameters
|
21 |
purpose_driven_bias: float = 0.75
|
22 |
spiritual_awareness: float = 0.8
|
23 |
+
meaning_dimension: int = 256
|
24 |
|
25 |
# Training configuration
|
26 |
batch_size: int = 8
|
27 |
learning_rate: float = 2e-5
|
28 |
+
num_train_epochs: int = 3
|
29 |
+
gradient_accumulation_steps: int = 1
|
30 |
+
warmup_steps: int = 500
|
31 |
+
|
32 |
+
# Architecture configuration
|
33 |
+
hidden_size: int = 768
|
34 |
+
intermediate_size: int = 3072
|
35 |
+
num_hidden_layers: int = 12
|
36 |
+
num_attention_heads: int = 12
|
37 |
+
|
38 |
+
# Memory configuration
|
39 |
+
memory_size: int = 1024
|
40 |
+
context_length: int = 2048
|
41 |
+
cache_size: int = 512
|