matt HOFFNER commited on
Commit
cf8ccfd
Β·
1 Parent(s): f373356

fix dockerfile

Browse files
Files changed (2) hide show
  1. Dockerfile +53 -1
  2. src/components/ChatWindow.jsx +3 -3
Dockerfile CHANGED
@@ -1,4 +1,56 @@
1
- FROM node:18-alpine AS base
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2
 
3
  # Install dependencies only when needed
4
  FROM base AS deps
 
1
+ # Basic mirror image
2
+ FROM alpine:edge as base
3
+
4
+ # Maintenanceer information
5
+ LABEL maintainer "a76yyyy <[email protected]>"
6
+ LABEL org.opencontainers.image.source=https://github.com/qiandao-today/ddddocr-docker
7
+
8
+ # Envirenment for onnxruntime
9
+ ENV ONNXRUNTIME_TAG=v1.13.1
10
+
11
+ # Replace the alpine image source & Install packages
12
+ RUN sed -i 's/dl-cdn.alpinelinux.org/mirrors.ustc.edu.cn/g' /etc/apk/repositories && \
13
+ echo 'http://mirrors.ustc.edu.cn/alpine/v3.16/main' >> /etc/apk/repositories && \
14
+ echo 'http://mirrors.ustc.edu.cn/alpine/v3.16/community' >> /etc/apk/repositories && \
15
+ apk update && \
16
+ apk add --update --no-cache bash git tzdata nano openssh-client ca-certificates file python3 py3-pip py3-setuptools py3-wheel && \
17
+ # ln -s /usr/bin/python3 /usr/bin/python && \
18
+ [[ $(getconf LONG_BIT) = "32" ]] && \
19
+ { bashtmp='' && cxxtmp=''; } || { \
20
+ [[ -z $(file /bin/busybox | grep -i "arm") ]] && \
21
+ { bashtmp='/onnxruntime/build.sh' && cxxtmp=''; } || \
22
+ { bashtmp='setarch arm64 /onnxruntime/build.sh' && cxxtmp='-Wno-psabi'; }; } && \
23
+ echo $bashtmp && echo $cxxtmp && {\
24
+ [[ -n "$bashtmp" ]] && { \
25
+ apk add --update --no-cache py3-numpy-dev py3-opencv py3-pillow && {\
26
+ apk add --update --no-cache --virtual .build_deps cmake make perl autoconf g++=11.2.1_git20220219-r2 libexecinfo-dev=1.1-r1 automake linux-headers libtool util-linux openblas-dev python3-dev protobuf-dev date-dev gtest-dev eigen-dev py3-pybind11-dev flatbuffers-dev=2.0.0-r1 patch boost-dev nlohmann-json || \
27
+ apk add --update --no-cache --virtual .build_deps cmake make perl autoconf g++=11.2.1_git20220219-r2 libexecinfo-dev=1.1-r1 automake linux-headers libtool util-linux openblas-dev python3-dev protobuf-dev date-dev gtest-dev eigen-dev py3-pybind11-dev patch boost-dev nlohmann-json ;} && \
28
+ git clone --depth 1 --branch $ONNXRUNTIME_TAG https://github.com/Microsoft/onnxruntime && \
29
+ cd /onnxruntime && \
30
+ git submodule update --init --recursive && \
31
+ cd .. && \
32
+ $bashtmp --config MinSizeRel \
33
+ --parallel \
34
+ --build_wheel \
35
+ --enable_pybind \
36
+ --cmake_extra_defines \
37
+ CMAKE_CXX_FLAGS="-Wno-deprecated-copy -Wno-unused-variable -Wno-unused-parameter $cxxtmp"\
38
+ onnxruntime_BUILD_UNIT_TESTS=OFF \
39
+ onnxruntime_BUILD_SHARED_LIB=OFF \
40
+ onnxruntime_USE_PREINSTALLED_EIGEN=ON \
41
+ onnxruntime_PREFER_SYSTEM_LIB=ON \
42
+ eigen_SOURCE_PATH=/usr/include/eigen3 \
43
+ --skip_tests && \
44
+ apk add --update --no-cache libprotobuf-lite && \
45
+ pip install --no-cache-dir /onnxruntime/build/Linux/MinSizeRel/dist/onnxruntime*.whl && \
46
+ ln -s $(python -c 'import warnings;warnings.filterwarnings("ignore");\
47
+ from distutils.sysconfig import get_python_lib;print(get_python_lib())')/onnxruntime/capi/libonnxruntime_providers_shared.so /usr/lib && \
48
+ cd / && rm -rf /onnxruntime && \
49
+ apk del .build_deps ;} || { \
50
+ apk add --update --no-cache libprotobuf-lite && \
51
+ echo "Onnxruntime Builder does not currently support building i386 and arm32 wheels";} ;} && \
52
+ rm -rf /var/cache/apk/* && \
53
+ rm -rf /usr/share/man/*
54
 
55
  # Install dependencies only when needed
56
  FROM base AS deps
src/components/ChatWindow.jsx CHANGED
@@ -12,7 +12,7 @@ function ChatWindow({
12
  stopStrings,
13
  maxTokens,
14
  }) {
15
- const { loadingStatus, send, isGenerating, setOnMessage } = useLLM();
16
  const [fileText, setFileText] = useState();
17
  const [userInput, setUserInput] = useState("");
18
 
@@ -31,7 +31,6 @@ function ChatWindow({
31
  console.log('found file text splitting into chunks')
32
  const textSplitter = new RecursiveCharacterTextSplitter({ chunkSize: 1000 });
33
  const docs = await textSplitter.createDocuments([fileText]);
34
- let queryResult;
35
  let qaPrompt;
36
  console.log(docs);
37
  try {
@@ -40,7 +39,8 @@ function ChatWindow({
40
  [...docs.map((v, k) => k)],
41
  new XenovaTransformersEmbeddings()
42
  )
43
- let queryResult = await vectorStore.similaritySearch(userInput, 1);
 
44
  console.log("queryResult", queryResult);
45
  qaPrompt =
46
  `You are an AI assistant providing helpful advice. You are given the following extracted parts of a long document and a question. Provide a conversational answer based on the context provided.
 
12
  stopStrings,
13
  maxTokens,
14
  }) {
15
+ const { loadingStatus, send, isGenerating } = useLLM();
16
  const [fileText, setFileText] = useState();
17
  const [userInput, setUserInput] = useState("");
18
 
 
31
  console.log('found file text splitting into chunks')
32
  const textSplitter = new RecursiveCharacterTextSplitter({ chunkSize: 1000 });
33
  const docs = await textSplitter.createDocuments([fileText]);
 
34
  let qaPrompt;
35
  console.log(docs);
36
  try {
 
39
  [...docs.map((v, k) => k)],
40
  new XenovaTransformersEmbeddings()
41
  )
42
+ console.log(vectorStore);
43
+ const queryResult = await vectorStore.similaritySearch(userInput, 1);
44
  console.log("queryResult", queryResult);
45
  qaPrompt =
46
  `You are an AI assistant providing helpful advice. You are given the following extracted parts of a long document and a question. Provide a conversational answer based on the context provided.