-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathstart
executable file
·81 lines (70 loc) · 2.53 KB
/
start
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
#!/bin/bash
DEBUG_MODE=false
DEBUG_PORT=15675
while getopts d flag
do
case "${flag}" in
d) DEBUG_MODE=true;;
esac
done
CMD=""
if [ $DEBUG_MODE = true ]; then
CMD="debugpy --wait-for-client --listen 0.0.0.0:$DEBUG_PORT -m uvicorn app:app --host 0.0.0.0 --port 80 --reload"
fi
trap "echo -ne '\nstopping container...' && docker stop refinery-embedder > /dev/null 2>&1 && echo -ne '\t\t [done]\n'" EXIT
HOST_IP=$(docker network inspect bridge --format='{{json .IPAM.Config}}' | grep -o '[0-9]\{1,3\}\.[0-9]\{1,3\}\.[0-9]\{1,3\}\.[0-9]\{1,3\}' | tail -1)
echo -ne 'stopping old container...'
docker stop refinery-embedder > /dev/null 2>&1
echo -ne '\t [done]\n'
echo -ne 'building container...'
docker build -t refinery-embedder-dev -f dev.Dockerfile .
echo -ne '\t\t [done]\n'
MODEL_DIR=${PWD%/*}/dev-setup/model-data/
if [ ! -d "$MODEL_DIR" ]
then
MODEL_DIR=${PWD%/*/*}/dev-setup/model-data/
if [ ! -d "$MODEL_DIR" ]
then
# to include volume for local development, use the dev-setup model data folder:
# alternative use manual logic with
# -v /path/to/dev-setup/model-data:/models \
echo "Can't find model data directory: $MODEL_DIR -> stopping"
exit 1
fi
fi
INFERENCE_DIR=${PWD%/*}/dev-setup/inference/
if [ ! -d "$INFERENCE_DIR" ]
then
INFERENCE_DIR=${PWD%/*/*}/dev-setup/inference/
if [ ! -d "$INFERENCE_DIR" ]
then
# to include volume for local development, use the dev-setup inference folder:
# alternative use manual logic with
# -v /path/to/dev-setup/inference:/models \
echo "Can't find model data directory: $INFERENCE_DIR -> stopping"
exit 1
fi
fi
echo -ne 'starting...'
docker run -d --rm \
--name refinery-embedder \
-p $DEBUG_PORT:$DEBUG_PORT \
-p 7058:80 \
-e S3_ENDPOINT_LOCAL=object-storage:9000 \
-e S3_ACCESS_KEY=kern \
-e S3_SECRET_KEY=r6ywtR33!DMlaL*SUUdy \
-e POSTGRES=postgresql://postgres:kern@graphql-postgres:5432 \
-e MODEL_PROVIDER=http://refinery-model-provider:80 \
-e WS_NOTIFY_ENDPOINT="http://refinery-websocket:8080" \
-e NEURAL_SEARCH=http://refinery-neural-search:80 \
--mount type=bind,source="$(pwd)"/,target=/app \
-v /var/run/docker.sock:/var/run/docker.sock \
-v "$MODEL_DIR":/models \
-v "$INFERENCE_DIR":/inference \
--network dev-setup_default \
refinery-embedder-dev $CMD > /dev/null 2>&1
echo -ne '\t\t\t [done]\n'
if [ $DEBUG_MODE = true ]; then
echo -e "\033[0;33muse VSCode Debugger (Python Debugger: refinery-embedder) to start the service\033[0m"
fi
docker logs -f refinery-embedder