darshankr commited on
Commit
0075fd3
·
verified ·
1 Parent(s): 45a86ac

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +23 -55
app.py CHANGED
@@ -1,52 +1,37 @@
1
  # app.py
2
  import streamlit as st
3
- from fastapi import FastAPI, HTTPException, Request
4
- from fastapi.responses import JSONResponse
5
- from pydantic import BaseModel
6
- from typing import List
7
  import torch
8
  from transformers import AutoModelForSeq2SeqLM, AutoTokenizer
9
  from IndicTransToolkit import IndicProcessor
10
- import json
11
- from fastapi.middleware.cors import CORSMiddleware
12
- import uvicorn
13
  from starlette.applications import Starlette
14
- from starlette.routing import Mount, Route
15
  from starlette.staticfiles import StaticFiles
16
- import asyncio
17
  import nest_asyncio
 
18
 
19
  # Enable nested event loops
20
  nest_asyncio.apply()
21
 
22
- # Initialize FastAPI
23
- app = FastAPI()
24
-
25
- # Add CORS middleware
26
- app.add_middleware(
27
- CORSMiddleware,
28
- allow_origins=["*"],
29
- allow_credentials=True,
30
- allow_methods=["*"],
31
- allow_headers=["*"],
32
- )
33
-
34
- # Initialize models and processors
35
- model = AutoModelForSeq2SeqLM.from_pretrained(
36
- "ai4bharat/indictrans2-en-indic-1B",
37
- trust_remote_code=True
38
- )
39
- tokenizer = AutoTokenizer.from_pretrained(
40
- "ai4bharat/indictrans2-en-indic-1B",
41
- trust_remote_code=True
42
- )
43
- ip = IndicProcessor(inference=True)
44
- DEVICE = "cuda" if torch.cuda.is_available() else "cpu"
45
- model = model.to(DEVICE)
46
 
47
- class TranslationRequest(BaseModel):
48
- sentences: List[str]
49
- target_lang: str
50
 
51
  def translate_text(sentences: List[str], target_lang: str):
52
  try:
@@ -90,23 +75,6 @@ def translate_text(sentences: List[str], target_lang: str):
90
  except Exception as e:
91
  raise Exception(f"Translation failed: {str(e)}")
92
 
93
- # FastAPI routes
94
- @app.get("/api/health")
95
- async def health_check():
96
- return {"status": "healthy"}
97
-
98
- @app.post("/api/translate")
99
- async def translate_endpoint(request: TranslationRequest):
100
- try:
101
- result = translate_text(
102
- sentences=request.sentences,
103
- target_lang=request.target_lang
104
- )
105
- return JSONResponse(content=result)
106
- except Exception as e:
107
- raise HTTPException(status_code=500, detail=str(e))
108
-
109
- # Streamlit interface
110
  def streamlit_app():
111
  st.title("Indic Language Translator")
112
 
@@ -149,7 +117,7 @@ def streamlit_app():
149
  st.markdown("""
150
  To use the translation API, send POST requests to:
151
  ```
152
- https://darshankr-trans-en-indic.hf.space/api/translate
153
  ```
154
  Request body format:
155
  ```json
@@ -163,7 +131,6 @@ def streamlit_app():
163
  for lang, code in target_languages.items():
164
  st.markdown(f"- {lang}: `{code}`")
165
 
166
- # Create a unified application
167
  def create_app():
168
  routes = [
169
  Mount("/api", app),
@@ -175,4 +142,5 @@ if __name__ == "__main__":
175
  if "streamlit" in sys.argv[0]:
176
  streamlit_app()
177
  else:
 
178
  uvicorn.run(create_app(), host="0.0.0.0", port=7860)
 
1
  # app.py
2
  import streamlit as st
 
 
 
 
3
  import torch
4
  from transformers import AutoModelForSeq2SeqLM, AutoTokenizer
5
  from IndicTransToolkit import IndicProcessor
6
+ from typing import List
7
+ import sys
 
8
  from starlette.applications import Starlette
9
+ from starlette.routing import Mount
10
  from starlette.staticfiles import StaticFiles
 
11
  import nest_asyncio
12
+ from api import app
13
 
14
  # Enable nested event loops
15
  nest_asyncio.apply()
16
 
17
+ # Initialize models and processors (lazy loading)
18
+ @st.cache_resource
19
+ def load_models():
20
+ model = AutoModelForSeq2SeqLM.from_pretrained(
21
+ "ai4bharat/indictrans2-en-indic-1B",
22
+ trust_remote_code=True
23
+ )
24
+ tokenizer = AutoTokenizer.from_pretrained(
25
+ "ai4bharat/indictrans2-en-indic-1B",
26
+ trust_remote_code=True
27
+ )
28
+ ip = IndicProcessor(inference=True)
29
+ DEVICE = "cuda" if torch.cuda.is_available() else "cpu"
30
+ model = model.to(DEVICE)
31
+ return model, tokenizer, ip, DEVICE
 
 
 
 
 
 
 
 
 
32
 
33
+ # Global variables for models
34
+ model, tokenizer, ip, DEVICE = load_models()
 
35
 
36
  def translate_text(sentences: List[str], target_lang: str):
37
  try:
 
75
  except Exception as e:
76
  raise Exception(f"Translation failed: {str(e)}")
77
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
78
  def streamlit_app():
79
  st.title("Indic Language Translator")
80
 
 
117
  st.markdown("""
118
  To use the translation API, send POST requests to:
119
  ```
120
+ https://YOUR-SPACE-NAME.hf.space/api/translate
121
  ```
122
  Request body format:
123
  ```json
 
131
  for lang, code in target_languages.items():
132
  st.markdown(f"- {lang}: `{code}`")
133
 
 
134
  def create_app():
135
  routes = [
136
  Mount("/api", app),
 
142
  if "streamlit" in sys.argv[0]:
143
  streamlit_app()
144
  else:
145
+ import uvicorn
146
  uvicorn.run(create_app(), host="0.0.0.0", port=7860)