You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
RUN apk add --virtual .dev-deps git clang clang-dev g++ make automake autoconf libtool pkgconfig cmake ninja
40
-
RUN apk add --virtual .dev-testing-deps -X http://dl-3.alpinelinux.org/alpine/edge/testing autoconf-archive
32
+
#RUN apk update
33
+
#RUN apk upgrade
34
+
35
+
## Install the same packages with apt as with apk, but ensure they exist in apt
36
+
RUN apt install -y file openssl bash tini libpng-dev aspell-en
37
+
RUN apt install -y git clang g++ make automake autoconf libtool cmake
38
+
RUN apt install -y autoconf-archive wget
39
+
41
40
RUN ln -s /usr/include/locale.h /usr/include/xlocale.h
42
41
43
-
RUN apk add tesseract-ocr
44
-
RUN apk add poppler-utils
42
+
#RUN apk add tesseract-ocr
43
+
RUN apt install -y tesseract-ocr
44
+
#RUN apk add poppler-utils
45
+
RUN apt install -y poppler-utils
46
+
RUN apt clean && rm -rf /var/lib/apt/lists/*
45
47
46
48
# Install from main
47
49
RUN mkdir /usr/local/share/tessdata
50
+
RUN wget https://github.com/tesseract-ocr/tessdata_fast/raw/main/eng.traineddata -P /usr/local/share/tessdata
51
+
48
52
RUN mkdir src
49
53
RUN cd src
50
-
RUN wget https://github.com/tesseract-ocr/tessdata_fast/raw/main/eng.traineddata -P /usr/local/share/tessdata
54
+
51
55
RUN git clone --depth 1 https://github.com/tesseract-ocr/tesseract.git
56
+
57
+
#RUN curl -fsSL https://ollama.com/install.sh | sh
58
+
# Install to /usr/local
59
+
RUN wget https://ollama.com/install.sh -O /usr/local/bin/ollama-install
60
+
RUN chmod +x /usr/local/bin/ollama-install
61
+
RUN ls /usr/local/bin
62
+
RUN sh /usr/local/bin/ollama-install
63
+
64
+
RUN ls -alh /usr/bin
65
+
RUN which ollama
66
+
67
+
#RUN /usr/local/bin/ollama pull llama3.2
68
+
RUN ollama serve & sleep 2 && ollama pull llama3
69
+
70
+
#RUN rm /usr/local/bin/ollama
52
71
#RUN cd tesseract && ./autogen.sh && ./configure --build=x86_64-alpine-linux-musl --host=x86_64-alpine-linux-musl && make && make install && cd /tmp/src
description: "Runs a local LLM based on ollama with any of their models from https://github.com/ollama/ollama?tab=readme-ov-file#model-library"
16
+
parameters:
17
+
- name: question
18
+
description: "The input question to the model"
19
+
required: true
20
+
multiline: true
21
+
example: ""
22
+
schema:
23
+
type: string
24
+
25
+
- name: shuffle_cloud_inference
15
26
description: Input ANY kind of data in the format you want, and the format you want it in. Default is a business-y email. Uses ShuffleGPT, which is based on OpenAI and our own model.
0 commit comments