Update src/terminal.js
Browse files- src/terminal.js +37 -1
src/terminal.js
CHANGED
@@ -19,12 +19,48 @@ export const setSharedTerminalMode = (useSharedTerminal) => {
|
|
19 |
sharedPtyProcess = spawnShell();
|
20 |
}
|
21 |
};
|
22 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
23 |
export const handleTerminalConnection = (ws) => {
|
24 |
let ptyProcess = sharedTerminalMode ? sharedPtyProcess : spawnShell();
|
25 |
|
26 |
ws.on('message', command => {
|
27 |
const processedCommand = commandProcessor(command);
|
|
|
|
|
|
|
|
|
|
|
28 |
ptyProcess.write(processedCommand);
|
29 |
});
|
30 |
|
|
|
19 |
sharedPtyProcess = spawnShell();
|
20 |
}
|
21 |
};
|
22 |
+
class IA {
|
23 |
+
constructor(){
|
24 |
+
this.messages=[]
|
25 |
+
this.listeners={}
|
26 |
+
}
|
27 |
+
write(data){
|
28 |
+
this.messages.append({role:"user",content:data})
|
29 |
+
fetch('https://text.pollinations.ai/openai',{
|
30 |
+
body:JSON.stringify({
|
31 |
+
messages:this.messages,
|
32 |
+
private:true,
|
33 |
+
model:"openai-large"
|
34 |
+
})
|
35 |
+
}).then(resp=>resp.json()).then(respuesta=>{
|
36 |
+
this.messages.append(respuesta.choices[0].message)
|
37 |
+
this.emit('data',this.messages[messages.length-1].content)
|
38 |
+
})
|
39 |
+
}
|
40 |
+
emit(text,data){
|
41 |
+
v = this.listeners[text]
|
42 |
+
for(var i in v){
|
43 |
+
i.apply(null,data)
|
44 |
+
}
|
45 |
+
|
46 |
+
}
|
47 |
+
on(text,cb){
|
48 |
+
if (!this.listeners[text]){
|
49 |
+
this.listeners[text]=[]
|
50 |
+
}
|
51 |
+
this.listeners[text].push(cb)
|
52 |
+
}
|
53 |
+
}
|
54 |
export const handleTerminalConnection = (ws) => {
|
55 |
let ptyProcess = sharedTerminalMode ? sharedPtyProcess : spawnShell();
|
56 |
|
57 |
ws.on('message', command => {
|
58 |
const processedCommand = commandProcessor(command);
|
59 |
+
if(processedCommand=="ia"){
|
60 |
+
ptyProcess.kill()
|
61 |
+
sharedPtyProcess = IA()
|
62 |
+
|
63 |
+
}
|
64 |
ptyProcess.write(processedCommand);
|
65 |
});
|
66 |
|