-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathdev.sh
More file actions
executable file
·110 lines (93 loc) · 2.86 KB
/
dev.sh
File metadata and controls
executable file
·110 lines (93 loc) · 2.86 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
#!/usr/bin/env bash
set -euo pipefail
ROOT_DIR="$(cd "$(dirname "$0")" && pwd)"
BACKEND_PID=""
FRONTEND_PID=""
NEO4J_STARTED_BY_US=false
cleanup() {
echo ""
echo "Shutting down..."
if [ -n "$FRONTEND_PID" ] && kill -0 "$FRONTEND_PID" 2>/dev/null; then
kill "$FRONTEND_PID" 2>/dev/null
wait "$FRONTEND_PID" 2>/dev/null || true
echo " Frontend stopped"
fi
if [ -n "$BACKEND_PID" ] && kill -0 "$BACKEND_PID" 2>/dev/null; then
kill "$BACKEND_PID" 2>/dev/null
wait "$BACKEND_PID" 2>/dev/null || true
echo " Backend stopped"
fi
if [ "$NEO4J_STARTED_BY_US" = true ]; then
docker compose -f "$ROOT_DIR/docker-compose.yml" stop neo4j >/dev/null 2>&1
echo " Neo4j stopped"
fi
echo "Done."
}
trap cleanup EXIT INT TERM
# --- Embedding preset ---
EMBED_MODE="${1:-ollama}"
case "$EMBED_MODE" in
ollama)
export EMBEDDING_PROVIDER=ollama
export EMBEDDING_MODEL=nomic-embed-text
export EMBEDDING_BASE_URL=http://localhost:11434
export EMBEDDING_DIMENSIONS=768
;;
openai)
export EMBEDDING_PROVIDER=openai
export EMBEDDING_MODEL=nomic-embed-text
export EMBEDDING_BASE_URL=http://localhost:11434
export EMBEDDING_API_KEY=ollama
export EMBEDDING_DIMENSIONS=768
;;
*)
echo "Unknown embedding mode: $EMBED_MODE (use 'ollama' or 'openai')"
exit 1
;;
esac
echo "Embedding: $EMBED_MODE ($EMBEDDING_MODEL)"
# --- AI preset (Ollama) ---
export AI_PROVIDER="${AI_PROVIDER:-ollama}"
export AI_MODEL="${AI_MODEL:-qwen3:8b}"
export AI_BASE_URL="${AI_BASE_URL:-http://localhost:11434}"
echo "AI: $AI_PROVIDER ($AI_MODEL)"
# --- Neo4j ---
if docker compose -f "$ROOT_DIR/docker-compose.yml" ps neo4j 2>/dev/null | grep -q "running"; then
echo "Neo4j already running"
else
echo "Starting Neo4j..."
docker compose -f "$ROOT_DIR/docker-compose.yml" up -d neo4j
NEO4J_STARTED_BY_US=true
echo -n "Waiting for Neo4j to be healthy"
until docker compose -f "$ROOT_DIR/docker-compose.yml" ps neo4j 2>/dev/null | grep -q "healthy"; do
echo -n "."
sleep 2
done
echo " ready"
fi
# --- Backend ---
echo "Starting backend..."
cd "$ROOT_DIR/backend"
uv run uvicorn ontoforge_server.main:app --reload --host 0.0.0.0 --port 8000 &
BACKEND_PID=$!
# Wait for backend to respond
echo -n "Waiting for backend"
until curl -s -o /dev/null http://localhost:8000/docs 2>/dev/null; do
echo -n "."
sleep 1
done
echo " ready"
# --- Frontend ---
echo "Starting frontend..."
cd "$ROOT_DIR/frontend"
npm run dev &
FRONTEND_PID=$!
echo ""
echo "All services running:"
echo " Frontend http://localhost:5173"
echo " Backend http://localhost:8000"
echo " API docs http://localhost:8000/docs"
echo " Neo4j http://localhost:7474"
echo ""
echo "Press Ctrl+C to stop all services."
wait