Elaineyy commited on
Commit
bb40de3
·
verified ·
1 Parent(s): 3e00208

Update server.py

Browse files
Files changed (1) hide show
  1. server.py +52 -7
server.py CHANGED
@@ -1,20 +1,65 @@
1
- from fastapi import FastAPI
2
  from pydantic import BaseModel
 
 
3
  import subprocess
 
 
4
 
5
  app = FastAPI()
6
 
 
 
 
 
 
7
  class CodeRequest(BaseModel):
8
  user_story: str
9
 
 
 
 
10
  @app.post("/generate-code")
11
  def generate_code(request: CodeRequest):
12
- result = subprocess.run(
13
- ["ollama", "run", "deepseek-coder-v2", request.user_story],
14
- capture_output=True,
15
- text=True
16
- )
17
- return {"generated_code": result.stdout.strip()}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
18
 
19
  if __name__ == "__main__":
20
  import uvicorn
 
1
+ from fastapi import FastAPI, HTTPException
2
  from pydantic import BaseModel
3
+ from transformers import AutoModelForCausalLM, AutoTokenizer
4
+ import torch
5
  import subprocess
6
+ import tempfile
7
+ import os
8
 
9
  app = FastAPI()
10
 
11
+ # Load DeepSeek-Coder-V2-Base Model
12
+ model_name = "deepseek-ai/DeepSeek-Coder-V2-Base"
13
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
14
+ model = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype=torch.float16, device_map="auto")
15
+
16
  class CodeRequest(BaseModel):
17
  user_story: str
18
 
19
+ class TestRequest(BaseModel):
20
+ code: str
21
+
22
  @app.post("/generate-code")
23
  def generate_code(request: CodeRequest):
24
+ """Generates code based on user story"""
25
+ prompt = f"Generate structured code for: {request.user_story}"
26
+
27
+ inputs = tokenizer(prompt, return_tensors="pt").to("cuda" if torch.cuda.is_available() else "cpu")
28
+ output = model.generate(**inputs, max_length=300)
29
+ generated_code = tokenizer.decode(output[0], skip_special_tokens=True)
30
+
31
+ return {"generated_code": generated_code}
32
+
33
+ @app.post("/test-code")
34
+ def test_code(request: TestRequest):
35
+ """Runs automated testing on the generated code"""
36
+ try:
37
+ with tempfile.NamedTemporaryFile(delete=False, suffix=".py") as temp_file:
38
+ temp_file.write(request.code.encode())
39
+ temp_file.close()
40
+
41
+ result = subprocess.run(["pytest", temp_file.name], capture_output=True, text=True)
42
+ os.unlink(temp_file.name)
43
+
44
+ if result.returncode == 0:
45
+ return {"test_status": "All tests passed!"}
46
+ else:
47
+ return {"test_status": "Test failed!", "details": result.stderr}
48
+
49
+ except Exception as e:
50
+ raise HTTPException(status_code=500, detail=str(e))
51
+
52
+ @app.get("/execute-code")
53
+ def execute_code():
54
+ """Executes AI-generated code"""
55
+ sample_code = "print('Hello from AI-generated code!')"
56
+
57
+ try:
58
+ result = subprocess.run(["python3", "-c", sample_code], capture_output=True, text=True)
59
+ return {"status": "Execution successful!", "output": result.stdout}
60
+
61
+ except Exception as e:
62
+ return {"status": "Execution failed!", "error": str(e)}
63
 
64
  if __name__ == "__main__":
65
  import uvicorn