Add `chat-ui` and `chat-ui-db` docker images (#613)
* Add a `Dockerfile.local` image to build * Add a build & publish workflow * fix workflow * Add support for local mongoDB * Fix problem with POST requests * use slim image * Set mistral to be the default model in .env * Fix entrypoint * Revert change regarding config folder * replace env variable by build arg for DB and update work * old mention of `conf` folder in readme * env to .env * Revert readme changes * lintpull/619/head
parent
e0c0b0e53f
commit
73b316cac5
|
@ -6,3 +6,5 @@ LICENSE
|
|||
README.md
|
||||
node_modules/
|
||||
.svelte-kit/
|
||||
.env*
|
||||
!.env
|
60
.env
60
.env
|
@ -44,39 +44,39 @@ REJECT_UNAUTHORIZED=true
|
|||
|
||||
# 'name', 'userMessageToken', 'assistantMessageToken' are required
|
||||
MODELS=`[
|
||||
{
|
||||
"name": "OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5",
|
||||
"datasetName": "OpenAssistant/oasst1",
|
||||
"description": "A good alternative to ChatGPT",
|
||||
"websiteUrl": "https://open-assistant.io",
|
||||
"userMessageToken": "<|prompter|>",
|
||||
"assistantMessageToken": "<|assistant|>",
|
||||
"messageEndToken": "</s>",
|
||||
"preprompt": "Below are a series of dialogues between various people and an AI assistant. The AI tries to be helpful, polite, honest, sophisticated, emotionally aware, and humble-but-knowledgeable. The assistant is happy to help with almost anything, and will do its best to understand exactly what is needed. It also tries to avoid giving false or misleading information, and it caveats when it isn't entirely sure about the right answer. That said, the assistant is practical and really does its best, and doesn't let caution get too much in the way of being useful.\n-----\n",
|
||||
"promptExamples": [
|
||||
{
|
||||
"title": "Write an email from bullet list",
|
||||
"prompt": "As a restaurant owner, write a professional email to the supplier to get these products every week: \n\n- Wine (x10)\n- Eggs (x24)\n- Bread (x12)"
|
||||
}, {
|
||||
"title": "Code a snake game",
|
||||
"prompt": "Code a basic snake game in python, give explanations for each step."
|
||||
}, {
|
||||
"title": "Assist in a task",
|
||||
"prompt": "How do I make a delicious lemon cheesecake?"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"temperature": 0.9,
|
||||
"top_p": 0.95,
|
||||
"repetition_penalty": 1.2,
|
||||
"top_k": 50,
|
||||
"truncate": 1000,
|
||||
"max_new_tokens": 1024
|
||||
{
|
||||
"name": "mistralai/Mistral-7B-Instruct-v0.1",
|
||||
"displayName": "mistralai/Mistral-7B-Instruct-v0.1",
|
||||
"description": "Mistral 7B is a new Apache 2.0 model, released by Mistral AI that outperforms Llama2 13B in benchmarks.",
|
||||
"websiteUrl": "https://mistral.ai/news/announcing-mistral-7b/",
|
||||
"preprompt": "",
|
||||
"chatPromptTemplate" : "<s>{{#each messages}}{{#ifUser}}[INST] {{#if @first}}{{#if @root.preprompt}}{{@root.preprompt}}\n{{/if}}{{/if}}{{content}} [/INST]{{/ifUser}}{{#ifAssistant}}{{content}}</s>{{/ifAssistant}}{{/each}}",
|
||||
"parameters": {
|
||||
"temperature": 0.1,
|
||||
"top_p": 0.95,
|
||||
"repetition_penalty": 1.2,
|
||||
"top_k": 50,
|
||||
"truncate": 3072,
|
||||
"max_new_tokens": 1024,
|
||||
"stop": ["</s>"]
|
||||
},
|
||||
"promptExamples": [
|
||||
{
|
||||
"title": "Write an email from bullet list",
|
||||
"prompt": "As a restaurant owner, write a professional email to the supplier to get these products every week: \n\n- Wine (x10)\n- Eggs (x24)\n- Bread (x12)"
|
||||
}, {
|
||||
"title": "Code a snake game",
|
||||
"prompt": "Code a basic snake game in python, give explanations for each step."
|
||||
}, {
|
||||
"title": "Assist in a task",
|
||||
"prompt": "How do I make a delicious lemon cheesecake?"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]`
|
||||
|
||||
OLD_MODELS=`[]`# any removed models, `{ name: string, displayName?: string, id?: string }`
|
||||
TASK_MODEL='' # name of the model used for tasks such as summarizing title, creating query, etc.
|
||||
TASK_MODEL= # name of the model used for tasks such as summarizing title, creating query, etc.
|
||||
|
||||
PUBLIC_ORIGIN=#https://huggingface.co
|
||||
PUBLIC_SHARE_PREFIX=#https://hf.co/chat
|
||||
|
|
|
@ -0,0 +1,103 @@
|
|||
name: Buid and Publish Image
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "main"
|
||||
pull_request:
|
||||
branches:
|
||||
- "*"
|
||||
paths:
|
||||
- "Dockerfile.local"
|
||||
- "entrypoint.sh"
|
||||
workflow_dispatch:
|
||||
release:
|
||||
types: [published, edited]
|
||||
|
||||
jobs:
|
||||
build-and-publish-image-with-db:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Docker metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
ghcr.io/huggingface/chat-ui-db
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=ref,event=pr
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build and Publish Docker Image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
file: Dockerfile.local
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
build-args: |
|
||||
- INCLUDE_DB=true
|
||||
build-and-publish-image-nodb:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Docker metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
ghcr.io/huggingface/chat-ui
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=ref,event=pr
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build and Publish Docker Image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
file: Dockerfile.local
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
build-args: |
|
||||
- INCLUDE_DB=false
|
|
@ -53,4 +53,4 @@ jobs:
|
|||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Build Docker image
|
||||
run: docker build --secret id=DOTENV_LOCAL,src=conf/.env.ci -t chat-ui:latest .
|
||||
run: docker build --secret id=DOTENV_LOCAL,src=.env.ci -t chat-ui:latest .
|
||||
|
|
|
@ -5,10 +5,10 @@ node_modules
|
|||
/package
|
||||
.env
|
||||
.env.*
|
||||
!.env.example
|
||||
!.env.template
|
||||
vite.config.js.timestamp-*
|
||||
vite.config.ts.timestamp-*
|
||||
SECRET_CONFIG
|
||||
.idea
|
||||
!conf/.env.ci
|
||||
!.env.ci
|
||||
!.env
|
||||
!.env.template
|
|
@ -0,0 +1,28 @@
|
|||
ARG INCLUDE_DB=false
|
||||
FROM mongo:latest as mongo
|
||||
|
||||
FROM node:20-slim as local_db_false
|
||||
|
||||
FROM node:20-slim as local_db_true
|
||||
|
||||
RUN apt-get update
|
||||
RUN apt-get install gnupg curl -y
|
||||
|
||||
COPY --from=mongo /usr/bin/mongo* /usr/bin/
|
||||
|
||||
FROM local_db_${INCLUDE_DB} as final
|
||||
ARG INCLUDE_DB=false
|
||||
ENV INCLUDE_DB=${INCLUDE_DB}
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY --link --chown=1000 package-lock.json package.json ./
|
||||
RUN --mount=type=cache,target=/app/.npm \
|
||||
npm set cache /app/.npm && \
|
||||
npm ci
|
||||
|
||||
# copy the rest of the files, run regardless of
|
||||
COPY --chown=1000 --link . .
|
||||
RUN chmod +x /app/entrypoint.sh
|
||||
|
||||
CMD ["/bin/bash", "-c", "/app/entrypoint.sh"]
|
|
@ -24,7 +24,7 @@ A chat interface using open source models, eg OpenAssistant or Llama. It is a Sv
|
|||
5. [Deploying to a HF Space](#deploying-to-a-hf-space)
|
||||
6. [Building](#building)
|
||||
|
||||
## No Setup Deploy
|
||||
## No Setup Deploy
|
||||
|
||||
If you don't want to configure, setup, and launch your own Chat UI yourself, you can use this option as a fast deploy alternative.
|
||||
|
||||
|
|
|
@ -0,0 +1,23 @@
|
|||
if test -z "${DOTENV_LOCAL}" ; then
|
||||
if ! test -f "/app/.env.local" ; then
|
||||
echo "DOTENV_LOCAL was not found in the ENV variables and .env.local is not set using a bind volume. We are using the default .env config."
|
||||
fi;
|
||||
else
|
||||
echo "DOTENV_LOCAL was found in the ENV variables. Creating .env.local file."
|
||||
cat <<< "$DOTENV_LOCAL" > /app/.env.local
|
||||
fi;
|
||||
|
||||
if [ "$INCLUDE_DB" = "true" ] ; then
|
||||
echo "INCLUDE_DB is set to true. Appending MONGODB_URL"
|
||||
|
||||
touch /app/.env.local
|
||||
echo -e "\nMONGODB_URL=mongodb://localhost:27017" >> /app/.env.local
|
||||
|
||||
mkdir -p /data/db
|
||||
mongod &
|
||||
echo "Starting local MongoDB instance"
|
||||
|
||||
fi;
|
||||
|
||||
npm run build
|
||||
npm run preview -- --host 0.0.0.0 --port 3000
|
Loading…
Reference in New Issue