wannaphong commited on
Commit
414dfdc
·
1 Parent(s): b090190

Update docs

Browse files
Files changed (2) hide show
  1. app.py +8 -0
  2. routers/tokenize.py +3 -3
app.py CHANGED
@@ -2,6 +2,7 @@ from fastapi import Depends, FastAPI, Header, HTTPException
2
  from fastapi.middleware.cors import CORSMiddleware
3
  from fastapi.responses import RedirectResponse
4
  from routers import tokenize
 
5
 
6
 
7
  DESC_TEXT = "PyThaiNLP API"
@@ -26,4 +27,11 @@ def index():
26
  response = RedirectResponse(url='/docs')
27
  return response
28
 
 
 
 
 
 
 
 
29
  app.include_router(tokenize.router, prefix="/tokenize", tags=["Tokenize"])
 
2
  from fastapi.middleware.cors import CORSMiddleware
3
  from fastapi.responses import RedirectResponse
4
  from routers import tokenize
5
+ import pythainlp
6
 
7
 
8
  DESC_TEXT = "PyThaiNLP API"
 
27
  response = RedirectResponse(url='/docs')
28
  return response
29
 
30
+ @app.get("/version")
31
+ def version():
32
+ """
33
+ Get PyThaiNLP Version
34
+ """
35
+ return {"version": pythainlp.__version__}
36
+
37
  app.include_router(tokenize.router, prefix="/tokenize", tags=["Tokenize"])
routers/tokenize.py CHANGED
@@ -16,9 +16,6 @@ class SentTokenizeEngine(str, Enum):
16
  class WordTokenizeEngine(str, Enum):
17
  newmm = "newmm"
18
  longest = "longest"
19
- deepcut = "deepcut"
20
- icu = "icu"
21
- ulmfit = "ulmfit"
22
 
23
 
24
  class SubwordTokenizeEngine(str, Enum):
@@ -33,6 +30,9 @@ class SubwordTokenizeResponse(BaseModel):
33
 
34
  @router.get('/word_tokenize', response_model=WordTokenizeResponse)
35
  def word_tokenize(text: str, engine: WordTokenizeEngine = "newmm"):
 
 
 
36
  return {"words": py_word_tokenize(text=text, engine=engine)}
37
 
38
 
 
16
  class WordTokenizeEngine(str, Enum):
17
  newmm = "newmm"
18
  longest = "longest"
 
 
 
19
 
20
 
21
  class SubwordTokenizeEngine(str, Enum):
 
30
 
31
  @router.get('/word_tokenize', response_model=WordTokenizeResponse)
32
  def word_tokenize(text: str, engine: WordTokenizeEngine = "newmm"):
33
+ """
34
+ Word tokenize
35
+ """
36
  return {"words": py_word_tokenize(text=text, engine=engine)}
37
 
38