Skip to content

Commit 45a1081

Browse files
Replaced Ollama urls
1 parent cb3b859 commit 45a1081

36 files changed

Lines changed: 140 additions & 142 deletions

.env.example

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,8 @@
55
# AI Configuration (Ollama - default)
66
# =============================================================================
77

8-
# Ollama server URL (default: Carl AI VM on Azure)
9-
OLLAMA_HOST=http://20.98.70.48:11434
8+
# Ollama server URL (Bay Tides AI)
9+
OLLAMA_HOST=https://ollama.baytides.org
1010

1111
# Ollama model to use (default: llama3.2)
1212
OLLAMA_MODEL=llama3.2
Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -2,17 +2,17 @@
22
set -euo pipefail
33

44
# =============================================================================
5-
# Carl AI Batch Content Generation
5+
# Bay Tides AI Batch Content Generation
66
# =============================================================================
77
# Generates all missing content in batches with progress tracking.
88
# Designed for long-running operation with recovery support.
99
#
1010
# Usage:
11-
# ./carl_batch_generation.sh [--analyses-only|--recommendations-only]
11+
# ./batch_generation.sh [--analyses-only|--recommendations-only]
1212
#
1313
# Environment:
1414
# DATABASE_URL - Database path (default: civitas.db)
15-
# OLLAMA_HOST - Ollama server (default: http://localhost:11434)
15+
# OLLAMA_HOST - Ollama server (default: https://ollama.baytides.org)
1616
# OLLAMA_MODEL - Model to use (default: llama3.1:8b-instruct-q8_0)
1717
# BATCH_SIZE - Items per batch (default: 25)
1818
# SLEEP_BETWEEN - Seconds between batches (default: 5)
@@ -22,7 +22,7 @@ set -euo pipefail
2222
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
2323
CIVITAS_ROOT="${SCRIPT_DIR}/.."
2424
PYTHON_BIN="${PYTHON_BIN:-/opt/civitas/.venv/bin/python}"
25-
LOG_FILE="${LOG_FILE:-/var/log/civitas/carl_batch.log}"
25+
LOG_FILE="${LOG_FILE:-/var/log/civitas/batch.log}"
2626

2727
# Defaults
2828
BATCH_SIZE="${BATCH_SIZE:-25}"
@@ -73,7 +73,7 @@ print(f'{total},{analyzed},{with_recs}')
7373
}
7474

7575
log "=============================================="
76-
log "Carl AI Batch Content Generation"
76+
log "Bay Tides AI Batch Content Generation"
7777
log "=============================================="
7878
log "Model: ${OLLAMA_MODEL}"
7979
log "Batch size: ${BATCH_SIZE}"
Lines changed: 25 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,10 @@
11
#!/bin/bash
2-
# Civitas Deployment Script for Carl AI Server (20.98.70.48)
2+
# Civitas Deployment Script for Bay Tides Server
33
# This script deploys the FastAPI backend and populates the database
44
set -e
55

6-
CARL_HOST="20.98.70.48"
7-
CARL_USER="azureuser"
6+
DEPLOY_HOST="${DEPLOY_HOST:?Set DEPLOY_HOST to your server IP/hostname}"
7+
DEPLOY_USER="${DEPLOY_USER:-azureuser}"
88
DEPLOY_DIR="/opt/civitas"
99
CONGRESS_API_KEY="${CONGRESS_API_KEY:-}"
1010

@@ -14,35 +14,35 @@ if [[ -z "${CONGRESS_API_KEY}" ]]; then
1414
fi
1515

1616
echo "=========================================="
17-
echo "Civitas Deployment to Carl AI Server"
17+
echo "Civitas Deployment to Bay Tides Server"
1818
echo "=========================================="
1919

2020
# Check SSH access
21-
echo "[1/8] Checking SSH access to Carl..."
22-
if ! ssh -o BatchMode=yes -o ConnectTimeout=5 ${CARL_USER}@${CARL_HOST} "echo 'SSH OK'" 2>/dev/null; then
23-
echo "ERROR: Cannot SSH to ${CARL_USER}@${CARL_HOST}"
21+
echo "[1/8] Checking SSH access to server..."
22+
if ! ssh -o BatchMode=yes -o ConnectTimeout=5 ${DEPLOY_USER}@${DEPLOY_HOST} "echo 'SSH OK'" 2>/dev/null; then
23+
echo "ERROR: Cannot SSH to ${DEPLOY_USER}@${DEPLOY_HOST}"
2424
echo "Make sure you have SSH key access configured"
2525
exit 1
2626
fi
2727

28-
# Create deployment directory on Carl
29-
echo "[2/8] Creating deployment directory on Carl..."
30-
ssh ${CARL_USER}@${CARL_HOST} << 'REMOTE_SETUP'
28+
# Create deployment directory on server
29+
echo "[2/8] Creating deployment directory on server..."
30+
ssh ${DEPLOY_USER}@${DEPLOY_HOST} << 'REMOTE_SETUP'
3131
sudo mkdir -p /opt/civitas
3232
sudo chown $USER:$USER /opt/civitas
3333
mkdir -p /opt/civitas/{data,logs}
3434
REMOTE_SETUP
3535

36-
# Sync codebase to Carl
37-
echo "[3/8] Syncing codebase to Carl..."
36+
# Sync codebase to server
37+
echo "[3/8] Syncing codebase to server..."
3838
rsync -avz --exclude '.git' --exclude '.venv' --exclude 'venv' --exclude '__pycache__' \
3939
--exclude 'node_modules' --exclude '.next' --exclude 'out' --exclude '*.db' \
4040
--exclude 'web' --exclude '.pytest_cache' --exclude '.ruff_cache' \
41-
/Users/steven/Github/civitas/ ${CARL_USER}@${CARL_HOST}:${DEPLOY_DIR}/
41+
/Users/steven/Github/civitas/ ${DEPLOY_USER}@${DEPLOY_HOST}:${DEPLOY_DIR}/
4242

43-
# Install dependencies and set up on Carl
44-
echo "[4/8] Setting up Python environment on Carl..."
45-
ssh ${CARL_USER}@${CARL_HOST} << REMOTE_INSTALL
43+
# Install dependencies and set up on server
44+
echo "[4/8] Setting up Python environment on server..."
45+
ssh ${DEPLOY_USER}@${DEPLOY_HOST} << REMOTE_INSTALL
4646
set -e
4747
cd ${DEPLOY_DIR}
4848
@@ -61,7 +61,7 @@ pip install -e ".[all]"
6161
# Create .env file
6262
cat > .env << 'ENV'
6363
CONGRESS_API_KEY=${CONGRESS_API_KEY}
64-
OLLAMA_HOST=http://localhost:11434
64+
OLLAMA_HOST=https://ollama.baytides.org
6565
OLLAMA_MODEL=llama3.2
6666
CIVITAS_AI_PROVIDER=ollama
6767
DATABASE_URL=sqlite:///civitas.db
@@ -72,7 +72,7 @@ REMOTE_INSTALL
7272

7373
# Initialize database and ingest data
7474
echo "[5/8] Initializing database and ingesting data..."
75-
ssh ${CARL_USER}@${CARL_HOST} << 'REMOTE_INGEST'
75+
ssh ${DEPLOY_USER}@${DEPLOY_HOST} << 'REMOTE_INGEST'
7676
set -e
7777
cd /opt/civitas
7878
source .venv/bin/activate
@@ -110,7 +110,7 @@ REMOTE_INGEST
110110

111111
# Set up systemd service
112112
echo "[6/8] Setting up systemd service..."
113-
ssh ${CARL_USER}@${CARL_HOST} << 'REMOTE_SERVICE'
113+
ssh ${DEPLOY_USER}@${DEPLOY_HOST} << 'REMOTE_SERVICE'
114114
sudo tee /etc/systemd/system/civitas-api.service > /dev/null << 'SERVICE'
115115
[Unit]
116116
Description=Civitas FastAPI Backend
@@ -137,7 +137,7 @@ REMOTE_SERVICE
137137

138138
# Set up Nginx reverse proxy
139139
echo "[7/8] Configuring Nginx..."
140-
ssh ${CARL_USER}@${CARL_HOST} << 'REMOTE_NGINX'
140+
ssh ${DEPLOY_USER}@${DEPLOY_HOST} << 'REMOTE_NGINX'
141141
sudo tee /etc/nginx/sites-available/civitas > /dev/null << 'NGINX'
142142
server {
143143
listen 80;
@@ -169,7 +169,7 @@ REMOTE_NGINX
169169

170170
# Verify deployment
171171
echo "[8/8] Verifying deployment..."
172-
ssh ${CARL_USER}@${CARL_HOST} << 'REMOTE_VERIFY'
172+
ssh ${DEPLOY_USER}@${DEPLOY_HOST} << 'REMOTE_VERIFY'
173173
echo "Checking services..."
174174
sudo systemctl status civitas-api --no-pager | head -10
175175
@@ -189,8 +189,8 @@ echo "DEPLOYMENT COMPLETE!"
189189
echo "=========================================="
190190
echo ""
191191
echo "API Endpoints:"
192-
echo " Health: http://${CARL_HOST}:8000/api/health"
193-
echo " Docs: http://${CARL_HOST}:8000/api/docs"
194-
echo " API: http://${CARL_HOST}:8000/api/v1/"
192+
echo " Health: http://${DEPLOY_HOST}:8000/api/health"
193+
echo " Docs: http://${DEPLOY_HOST}:8000/api/docs"
194+
echo " API: http://${DEPLOY_HOST}:8000/api/v1/"
195195
echo ""
196-
echo "Next: Update frontend to use API at http://${CARL_HOST}:8000"
196+
echo "Next: Update frontend to use API at http://${DEPLOY_HOST}:8000"

scripts/download_internet_archive.py

Lines changed: 17 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
- Federal Register volumes
99
1010
All documents are uploaded to Azure Blob Storage and can optionally be
11-
sent to Carl (Ollama server) for AI analysis.
11+
sent to Ollama (via Bay Tides) for AI analysis.
1212
1313
Usage:
1414
# Download everything
@@ -17,8 +17,8 @@
1717
# Download only US Reports
1818
python scripts/download_internet_archive.py --us-reports-only
1919
20-
# Download and immediately process with Carl
21-
python scripts/download_internet_archive.py --process-with-carl
20+
# Download and immediately process with AI
21+
python scripts/download_internet_archive.py --process-with-ai
2222
2323
# Dry run (list what would be downloaded)
2424
python scripts/download_internet_archive.py --dry-run
@@ -76,9 +76,9 @@ def main():
7676
help="Limit number of Federal Register volumes (default: 50)",
7777
)
7878
parser.add_argument(
79-
"--process-with-carl",
79+
"--process-with-ai",
8080
action="store_true",
81-
help="Send downloaded documents to Carl for AI analysis",
81+
help="Send downloaded documents to Bay Tides AI for analysis",
8282
)
8383
parser.add_argument(
8484
"--dry-run",
@@ -249,16 +249,16 @@ def main():
249249

250250
console.print(table)
251251

252-
# Process with Carl if requested
253-
if args.process_with_carl and downloaded_docs:
254-
console.print("\n[bold blue]═══ Processing with Carl ═══[/bold blue]")
255-
process_with_carl(downloaded_docs)
252+
# Process with AI if requested
253+
if args.process_with_ai and downloaded_docs:
254+
console.print("\n[bold blue]═══ Processing with Bay Tides AI ═══[/bold blue]")
255+
process_with_ai(downloaded_docs)
256256

257257
client.close()
258258

259259

260-
def process_with_carl(documents):
261-
"""Send documents to Carl (Ollama) for AI analysis.
260+
def process_with_ai(documents):
261+
"""Send documents to Ollama (via Bay Tides) for AI analysis.
262262
263263
This extracts key information like:
264264
- Case citations and holdings (for US Reports)
@@ -267,10 +267,10 @@ def process_with_carl(documents):
267267
"""
268268
import os
269269

270-
ollama_host = os.getenv("OLLAMA_HOST", "http://20.98.70.48:11434")
270+
ollama_host = os.getenv("OLLAMA_HOST", "https://ollama.baytides.org")
271271
ollama_model = os.getenv("OLLAMA_MODEL", "llama3.2")
272272

273-
console.print(f"Connecting to Carl at {ollama_host}...")
273+
console.print(f"Connecting to Bay Tides AI at {ollama_host}...")
274274
console.print(f"Using model: {ollama_model}")
275275

276276
try:
@@ -279,12 +279,12 @@ def process_with_carl(documents):
279279
# Test connection
280280
response = httpx.get(f"{ollama_host}/api/tags", timeout=10)
281281
if response.status_code != 200:
282-
console.print("[red]Could not connect to Carl[/red]")
282+
console.print("[red]Could not connect to Bay Tides AI[/red]")
283283
return
284-
console.print("[green]Connected to Carl[/green]")
284+
console.print("[green]Connected to Bay Tides AI[/green]")
285285

286286
except Exception as e:
287-
console.print(f"[red]Error connecting to Carl: {e}[/red]")
287+
console.print(f"[red]Error connecting to Bay Tides AI: {e}[/red]")
288288
return
289289

290290
# Process documents based on type
@@ -355,7 +355,7 @@ def process_with_carl(documents):
355355
prompt = f"""Analyze this legal document and provide a summary:
356356
{text_sample}"""
357357

358-
# Send to Carl
358+
# Send to Bay Tides AI
359359
try:
360360
response = httpx.post(
361361
f"{ollama_host}/api/generate",

scripts/extract_p2025_from_text.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@
3131

3232

3333
# Ollama configuration
34-
OLLAMA_HOST = os.getenv("OLLAMA_HOST", "http://localhost:11434")
34+
OLLAMA_HOST = os.getenv("OLLAMA_HOST", "https://ollama.baytides.org")
3535
OLLAMA_MODEL = os.getenv("OLLAMA_MODEL", "llama3.1:8b-instruct-q8_0")
3636

3737

scripts/extract_p2025_policies.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@
3636

3737

3838
# Ollama configuration
39-
OLLAMA_HOST = os.getenv("OLLAMA_HOST", "http://20.98.70.48:11434")
39+
OLLAMA_HOST = os.getenv("OLLAMA_HOST", "https://ollama.baytides.org")
4040
OLLAMA_MODEL = os.getenv("OLLAMA_MODEL", "llama3.1:8b-instruct-q8_0")
4141

4242

scripts/fix_data_gaps.py

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
This script addresses the major discrepancies between vision and implementation:
55
1. Fetch full text for Executive Orders
66
2. Run P2025 tracker to match objectives to EOs
7-
3. Generate resistance recommendations via Carl
7+
3. Generate resistance recommendations via Bay Tides AI
88
4. Populate legal challenges from real court data
99
5. Populate state resistance actions
1010
6. Generate content insights
@@ -185,25 +185,25 @@ def run_p2025_matcher():
185185

186186

187187
def generate_recommendations(limit: int = 50):
188-
"""Generate resistance recommendations via Carl."""
188+
"""Generate resistance recommendations via Bay Tides AI."""
189189
from civitas.db.models import Project2025Policy, ResistanceRecommendation
190190
from civitas.resistance.recommender import ResistanceRecommender
191191

192192
console.print("\n[bold blue]═══ Generating Resistance Recommendations ═══[/bold blue]\n")
193193

194-
# Check Carl connection
195-
ollama_host = os.getenv("OLLAMA_HOST", "http://20.98.70.48:11434")
196-
console.print(f"Using Carl at: {ollama_host}")
194+
# Check AI connection
195+
ollama_host = os.getenv("OLLAMA_HOST", "https://ollama.baytides.org")
196+
console.print(f"Using Bay Tides AI at: {ollama_host}")
197197

198198
try:
199199
import httpx
200200
resp = httpx.get(f"{ollama_host}/api/tags", timeout=10)
201201
if resp.status_code != 200:
202-
console.print("[red]Cannot connect to Carl. Skipping recommendations.[/red]")
202+
console.print("[red]Cannot connect to Bay Tides AI. Skipping recommendations.[/red]")
203203
return 0
204-
console.print("[green]Carl connected[/green]")
204+
console.print("[green]Bay Tides AI connected[/green]")
205205
except Exception as e:
206-
console.print(f"[red]Cannot connect to Carl: {e}[/red]")
206+
console.print(f"[red]Cannot connect to Bay Tides AI: {e}[/red]")
207207
return 0
208208

209209
session = get_session()
@@ -508,17 +508,17 @@ def generate_content_insights(limit: int = 50):
508508

509509
console.print("\n[bold blue]═══ Generating Content Insights ═══[/bold blue]\n")
510510

511-
# Check Carl connection
512-
ollama_host = os.getenv("OLLAMA_HOST", "http://20.98.70.48:11434")
511+
# Check AI connection
512+
ollama_host = os.getenv("OLLAMA_HOST", "https://ollama.baytides.org")
513513

514514
try:
515515
import httpx
516516
resp = httpx.get(f"{ollama_host}/api/tags", timeout=10)
517517
if resp.status_code != 200:
518-
console.print("[red]Cannot connect to Carl. Skipping insights.[/red]")
518+
console.print("[red]Cannot connect to Bay Tides AI. Skipping insights.[/red]")
519519
return 0
520520
except Exception as e:
521-
console.print(f"[red]Cannot connect to Carl: {e}[/red]")
521+
console.print(f"[red]Cannot connect to Bay Tides AI: {e}[/red]")
522522
return 0
523523

524524
session = get_session()

scripts/generate_resistance_expert_content.sh

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,10 @@
11
#!/usr/bin/env bash
22
set -euo pipefail
33

4-
# Generates expert-mode resistance content via Carl (Ollama).
4+
# Generates expert-mode resistance content via Ollama (Bay Tides).
55
# Required env:
66
# DATABASE_URL (or pass --db)
7-
# OLLAMA_HOST (Carl VM, e.g. http://20.98.70.48:11434)
7+
# OLLAMA_HOST (e.g. https://ollama.baytides.org)
88
# Optional env:
99
# OLLAMA_MODEL (default: llama3.2)
1010
# ANALYZE_LIMIT (default: 100)

scripts/generate_scotus_profiles.sh

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,9 @@
11
#!/usr/bin/env bash
22
set -euo pipefail
33

4-
# Generate SCOTUS justice profiles via Carl (Ollama).
4+
# Generate SCOTUS justice profiles via Ollama (Bay Tides).
55
# Required env:
6-
# OLLAMA_HOST (Carl VM, e.g. http://20.98.70.48:11434)
6+
# OLLAMA_HOST (e.g. https://ollama.baytides.org)
77
# Optional env:
88
# OLLAMA_MODEL (default: llama3.2)
99
# DATABASE_URL (or pass --db)

scripts/local_batch_generation.sh

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -16,9 +16,9 @@ cd "$CIVITAS_ROOT"
1616
source .venv/bin/activate
1717

1818
# Configuration
19-
export OLLAMA_HOST="${OLLAMA_HOST:-http://localhost:11434}"
19+
export OLLAMA_HOST="${OLLAMA_HOST:-https://ollama.baytides.org}"
2020
export OLLAMA_MODEL="${OLLAMA_MODEL:-llama3.1:8b-instruct-q8_0}"
21-
DB_PATH="${DB_PATH:-civitas_carl.db}"
21+
DB_PATH="${DB_PATH:-civitas.db}"
2222
LOG_FILE="${LOG_FILE:-/tmp/civitas_local_batch.log}"
2323
BATCH_SIZE="${BATCH_SIZE:-25}"
2424
SLEEP_BETWEEN="${SLEEP_BETWEEN:-2}"
@@ -144,5 +144,5 @@ log " With analyses: ${analyzed} ($(( analyzed * 100 / total ))%)"
144144
log " With recommendations: ${with_recs} ($(( with_recs * 100 / total ))%)"
145145
log "=============================================="
146146
log ""
147-
log "To sync back to Carl:"
148-
log " scp ${DB_PATH} carl:/opt/civitas/civitas.db"
147+
log "To deploy database:"
148+
log " scp ${DB_PATH} server:/opt/civitas/civitas.db"

0 commit comments

Comments
 (0)