Getting it all running

This commit is contained in:
2025-07-25 21:17:58 +01:00
parent a1e0fb1ebb
commit d2540d2b05
6 changed files with 449 additions and 15 deletions

View File

@@ -38,12 +38,41 @@ NEXTCLOUD_ADMIN_USER=${USERNAME}
NEXTCLOUD_ADMIN_PASSWORD=
#
# MineCraft Settings
# MineCraft settings
# Image: itzg:minecraft-server
#
MINECRAFT_PASSWORD=
MINECRAFT_WHITELIST=
#
# LibreChat settings (droid.scarif.space)
# Image: ghcr.io/danny-avila/librechat-server-dev-lite:latest
#
LIBRECHAT_CREDS_KEY=
LIBRECHAT_CREDS_IV=
LIBRECHAT_JWT_SECRET=
LIBRECHAT_JWT_REFRESH_SECRET=
OPENROUTER_KEY=
AIHUBMIX_KEY=
#
# Meilisearch settings
# Image: getmeili/meilisearch:v1.12.3
#
MEILISEARCH_MASTER_KEY=
#
# Searxng settings (holocron.scarif.space)
# Image: searxng/searxng:latest
#
SEARXNG_SECRET_KEY=
#
# Rag API settings
# Image: ghcr.io/danny-avila/librechat-rag-api-dev-lite:latest
#
HF_TOKEN=
#
# Tiny Tiny RSS settings (intel.scarif.space)
# Image: cthulhoo/ttrss-fpm-pgsql-static

View File

@@ -5,6 +5,8 @@ Taking this task one step at a time I started by creating a docker nextcloud ser
> First of all **SWITCH ROUTER DNS SERVER**
### Linux
> This might not be necessary anymore. I think I fixed it by linking to the github repository instead of the AUR.
1. Remove zfs-linux `yay -Rns zfs-linux-lts`
2. Upgrade all packages `yay`
3. Clone the zfs-linux repository `git clone https://aur.archlinux.org/zfs-linux-lts.git`
@@ -171,12 +173,12 @@ The approaches they used were:
I added a new one that put in an nginx reverse proxy container to the custom fpm as that would be needed if I wanted multiple containers serving on the same machine (Monica/Nextcloud/Gitea/etc).
The results I got were similar to the original article:
Solution|Rate|Longest|Shortest|Size (MB)
---|---|---|---|---
Official fpm|143.17|0.92|0.12|
Official apache|503.52|0.53|0.02|415
Custom fpm|2197.80|0.12|0.03|336
Custom fpm proxy|1992.03|0.16|0.02|392
|Solution|Rate|Longest|Shortest|Size (MB)|
|---|---|---|---|---|
|Official fpm|143.17|0.92|0.12| |
|Official apache|503.52|0.53|0.02|415|
|Custom fpm|2197.80|0.12|0.03|336|
|Custom fpm proxy|1992.03|0.16|0.02|392|
# Creating a Nextcloud virtual machine with Rancher
To create the virtual machine I needed to install virtualbox and docker-machine, then I ran the following command:

View File

@@ -5,6 +5,14 @@ x-logging: &logging
max-size: "5m"
max-file: "2"
x-firecrawl-service: &firecrawl-service
image: ghcr.io/mendableai/firecrawl
x-firecrawl-env: &firecrawl-env
REDIS_URL: redis://redis:6379
REDIS_RATE_LIMIT_URL: redis://redis:6379
PLAYWRIGHT_MICROSERVICE_URL: http://playwright:3000/scrape
services:
# change:
# <<: *logging
@@ -154,6 +162,77 @@ services:
depends_on:
- db
librechat:
image: ghcr.io/danny-avila/librechat-dev-api:latest
ports:
- 3080:3080
networks:
- nginx
- redis
- db
depends_on:
- mongodb
- rag_api
restart: always
extra_hosts:
- "host.docker.internal:host-gateway"
<<: *logging
environment:
- VIRTUAL_HOST=droid.${DOMAIN}
- VIRTUAL_PORT=3080
- CERT_NAME=${DOMAIN}
- HOST=0.0.0.0
- NODE_ENV=production
- MONGO_URI=mongodb://${DB_USER}:${DB_PASSWORD}@mongodb:27017/LibreChat?authSource=admin
- MEILI_HOST=http://meilisearch:7700
- MEILI_MASTER_KEY=${MEILI_MASTER_KEY}
- RAG_PORT=8000
- RAG_API_URL=http://rag_api:8000
- SEARXNG_INSTANCE_URL=http://searxng:8080
- SEARXNG_API_KEY=${SEARXNG_SECRET_KEY}
- FIRECRAWL_URL=http://firecrawl:3002
- FIRECRWAL_API_KEY=
- ALLOW_EMAIL_LOGIN=true
- ALLOW_REGISTRATION=false
- ALLOW_SOCIAL_LOGIN=false
- ALLOW_SOCIAL_REGISTRATION=false
- ALLOW_PASSWORD_RESET=false
- ALLOW_ACCOUNT_DELETION=false
- ALLOW_UNVERIFIED_EMAIL_LOGIN=true
- CREDS_KEY=${LIBRECHAT_CREDS_KEY}
- CREDS_IV=${LIBRECHAT_CREDS_IV}
- JWT_SECRET=${LIBRECHAT_JWT_SECRET}
- JWT_REFRESH_SECRET=${LIBRECHAT_JWT_REFRESH_SECRET}
- AIHUBMIX_KEY=${AIHUBMIX_KEY}
- OPENROUTER_KEY=${OPENROUTER_KEY}
- IMAGE_GEN_OAI_API_KEY=${AIHUBMIX_KEY}
- IMAGE_GEN_OAI_MODEL=gpt-image-1
volumes:
- type: bind
source: ./librechat/librechat.yaml
target: /app/librechat.yaml
- /mnt/tower/stardust/files/chris/.droid/images:/app/client/public/images
- /mnt/tower/stardust/files/chris/.droid/uploads:/app/uploads
- /mnt/tower/stardust/files/chris/.droid/logs:/app/logs
- /mnt/tower/stardust/files/chris/:/files
searxng:
image: docker.io/searxng/searxng:latest
restart: unless-stopped
networks:
- nginx
- redis
volumes:
- ./searxng:/etc/searxng:rw
- searxng:/var/cache/searxng:rw
environment:
- SEARXNG_BASE_URL=https://${SEARXNG_HOSTNAME:-localhost}/
- SEARXNG_SECRET=${SEARXNG_SECRET_KEY}
- VIRTUAL_HOST=holocron.${DOMAIN}
- VIRTUAL_PORT=8080
- CERT_NAME=${DOMAIN}
<<: *logging
minecraft:
profiles: ["prod"]
<<: *logging
@@ -580,7 +659,6 @@ services:
aliases:
- jvb.meet.jitsi
db:
profiles: ["prod", "dev"]
<<: *logging
@@ -597,14 +675,115 @@ services:
networks:
- db
playwright:
profiles: ["prod", "dev"]
<<: *logging
image: ghcr.io/mendableai/playwright-service:latest
environment:
PORT: 3000
BLOCK_MEDIA: true
networks:
- playwright
firecrawl:
profiles: ["prod", "dev"]
<<: [*logging, *firecrawl-service]
environment:
<<: *firecrawl-env
HOST: "0.0.0.0"
PORT: ${INTERNAL_PORT:-3002}
FLY_PROCESS_GROUP: app
ENV: local
depends_on:
- redis
- playwright
ports:
- "3002:3002"
command: [ "pnpm", "run", "start:production" ]
firecrawl-worker:
profiles: ["prod", "dev"]
<<: [*firecrawl-service, *logging]
environment:
<<: *firecrawl-env
FLY_PROCESS_GROUP: worker
depends_on:
- redis
- playwright
- firecrawl
command: [ "pnpm", "run", "workers" ]
redis:
profiles: ["prod", "dev"]
<<: *logging
image: redis:alpine
restart: always
networks:
- redis
mongodb:
profiles: ["prod", "dev"]
# ports:
# - 27018:27017
image: mongo
restart: always
volumes:
- mongodb:/data/db
command: mongod
networks:
- db
environment:
- MONGO_INITDB_ROOT_USERNAME=${DB_USER}
- MONGO_INITDB_ROOT_PASSWORD=${DB_PASSWORD}
<<: *logging
meilisearch:
profiles: ["prod", "dev"]
image: getmeili/meilisearch:v1.12.3
restart: always
networks:
- db
# ports:
# - 7700:7700
environment:
- MEILI_HOST=http://meilisearch:7700
- MEILI_NO_ANALYTICS=true
- MEILI_MASTER_KEY=${MEILI_MASTER_KEY}
volumes:
- meilisearch:/meili_data
<<: *logging
vectordb:
profiles: ["prod", "dev"]
image: ankane/pgvector:latest
networks:
- db
environment:
POSTGRES_DB: mydatabase
POSTGRES_USER: myuser
POSTGRES_PASSWORD: mypassword
restart: always
volumes:
- pgdata2:/var/lib/postgresql/data
<<: *logging
rag_api:
profiles: ["prod", "dev"]
image: ghcr.io/danny-avila/librechat-rag-api-dev:latest
environment:
- DB_HOST=vectordb
- RAG_PORT=8000
- EMBEDDINGS_PROVIDER=huggingface
- HF_TOKEN=${HF_TOKEN}
restart: always
networks:
- db
depends_on:
- vectordb
<<: *logging
nginx:
<<: *logging
profiles: ["prod", "dev"]
image: nginxproxy/nginx-proxy
restart: always
@@ -614,15 +793,15 @@ services:
volumes:
- /var/run/docker.sock:/tmp/docker.sock:ro
- /opt/ssl:/etc/nginx/certs:ro
- ./nginx-proxy/vhost.d/labs_location:/etc/nginx/vhost.d/labs.${DOMAIN}_location:ro
- ./nginx-proxy/vhost.d/office:/etc/nginx/vhost.d/office.${DOMAIN}:ro
- ./nginx-proxy/vhost.d/rec:/etc/nginx/vhost.d/rec.${DOMAIN}:ro
- ./nginx-proxy/vhost.d/tower_location_override:/etc/nginx/vhost.d/tower.${DOMAIN}_location_override:ro
- ./nginx-proxy/vhost.d/tower:/etc/nginx/vhost.d/tower.${DOMAIN}:ro
- ./nginx-proxy/conf.d/custom_proxy.conf:/etc/nginx/conf.d/custom_proxy.conf:ro
- ./nginx/vhost.d/labs_location:/etc/nginx/vhost.d/labs.${DOMAIN}_location:ro
- ./nginx/vhost.d/office:/etc/nginx/vhost.d/office.${DOMAIN}:ro
- ./nginx/vhost.d/rec:/etc/nginx/vhost.d/rec.${DOMAIN}:ro
- ./nginx/vhost.d/tower_location_override:/etc/nginx/vhost.d/tower.${DOMAIN}_location_override:ro
- ./nginx/vhost.d/tower:/etc/nginx/vhost.d/tower.${DOMAIN}:ro
- ./nginx/conf.d/custom_proxy.conf:/etc/nginx/conf.d/custom_proxy.conf:ro
- nextcloud:/var/www/html/nextcloud:ro
- ./christmas:/var/www/html/christmas:ro
- tt-rss:/var/www/html/tt-rss:ro
# - tt-rss:/var/www/html/tt-rss:ro
networks:
- nginx
@@ -647,10 +826,15 @@ volumes:
navidrome:
minecraft:
change:
mongodb:
searxng:
pgdata2:
meilisearch:
networks:
db:
nginx:
redis:
meet.jitsi:
playwright:

205
librechat/librechat.yaml Normal file
View File

@@ -0,0 +1,205 @@
# For more information, see the Configuration Guide:
# https://www.librechat.ai/docs/configuration/librechat_yaml
# Configuration version (required)
version: 1.2.1
# Cache settings: Set to true to enable caching
cache: true
# File strategy s3/firebase
# fileStrategy: "s3"
# Custom interface configuration
interface:
customWelcome: "Welcome to DroidChat! How may I be of assistance?"
# MCP Servers UI configuration
mcpServers:
placeholder: 'MCP Servers'
# Privacy policy settings
privacyPolicy:
externalUrl: 'https://librechat.ai/privacy-policy'
openNewTab: true
# Terms of service
termsOfService:
externalUrl: 'https://librechat.ai/tos'
openNewTab: true
modalAcceptance: true
modalTitle: "Terms of Service for LibreChat"
modalContent: |
# Terms and Conditions for LibreChat
Welcome to LibreChat!
endpointsMenu: true
modelSelect: true
parameters: true
sidePanel: true
presets: true
prompts: true
bookmarks: true
multiConvo: true
agents: true
# Temporary chat retention period in hours (default: 720, min: 1, max: 8760)
# temporaryChatRetention: 1
speech:
tts:
openai:
url: 'https://aihubmix.com/v1'
apiKey: '${AIHUBMIX_KEY}'
model: 'gpt-4o-mini-tts'
voices: [
'alloy',
'ash',
'ballad',
'coral',
'echo',
'fable',
'nova',
'onyx',
'sage',
'shimmer',
]
stt:
openai:
url: 'https://aihubmix.com/v1'
apiKey: '${AIHUBMIX_KEY}'
model: 'distil-whisper-large-v3-en'
# Example MCP Servers Object Structure
# mcpServers:
# everything:
# # type: sse # type can optionally be omitted
# url: http://localhost:3001/sse
# timeout: 60000 # 1 minute timeout for this server, this is the default timeout for MCP servers.
puppeteer:
type: stdio
command: npx
args:
- -y
- "@modelcontextprotocol/server-puppeteer"
timeout: 300000 # 5 minutes timeout for this server
filesystem:
# type: stdio
command: npx
args:
- -y
- "@modelcontextprotocol/server-filesystem"
- /files/Library
- /files/RPG/Resources
mcp-obsidian:
command: npx
args:
- -y
- "mcp-obsidian"
- /files/Notes
# Definition of custom endpoints
endpoints:
# assistants:
# disableBuilder: false # Disable Assistants Builder Interface by setting to `true`
# pollIntervalMs: 3000 # Polling interval for checking assistant updates
# timeoutMs: 180000 # Timeout for assistant operations
# # Should only be one or the other, either `supportedIds` or `excludedIds`
# supportedIds: ["asst_supportedAssistantId1", "asst_supportedAssistantId2"]
# # excludedIds: ["asst_excludedAssistantId"]
# # Only show assistants that the user created or that were created externally (e.g. in Assistants playground).
# # privateAssistants: false # Does not work with `supportedIds` or `excludedIds`
# # (optional) Models that support retrieval, will default to latest known OpenAI models that support the feature
# retrievalModels: ["gpt-4-turbo-preview"]
# # (optional) Assistant Capabilities available to all users. Omit the ones you wish to exclude. Defaults to list below.
# capabilities: ["code_interpreter", "retrieval", "actions", "tools", "image_vision"]
# agents:
# # (optional) Default recursion depth for agents, defaults to 25
# recursionLimit: 50
# # (optional) Max recursion depth for agents, defaults to 25
# maxRecursionLimit: 100
# # (optional) Disable the builder interface for agents
# disableBuilder: false
# # (optional) Agent Capabilities available to all users. Omit the ones you wish to exclude. Defaults to list below.
# capabilities: ["execute_code", "file_search", "actions", "tools"]
custom:
- name: 'OpenRouter'
apiKey: '${OPENROUTER_KEY}'
baseURL: 'https://openrouter.ai/api/v1'
models:
default:
- 'switchpoint/router'
- 'moonshotai/kimi-k2:free'
- 'deepseek/deepseek-chat-v3-0324:free'
- 'deepseek/deepseek-r1-0528:free'
- 'openai/gpt-4.1'
- 'openai/o3'
fetch: true
titleConvo: true
titleModel: 'meta-llama/llama-3-70b-instruct'
dropParams: ['stop']
modelDisplayLabel: 'OpenRouter'
- name: 'AiHubMix'
apiKey: '${AIHUBMIX_KEY}'
baseURL: 'https://aihubmix.com/v1'
models:
default:
- 'moonshotai/kimi-k2:free'
- 'deepseek/deepseek-chat-v3-0324:free'
- 'deepseek/deepseek-r1-0528:free'
fetch: true
titleConvo: true
titleModel: 'meta-llama/llama-3-70b-instruct'
dropParams: ['stop']
modelDisplayLabel: 'OpenRouter'
fileConfig:
# endpoints:
# assistants:
# fileLimit: 5
# fileSizeLimit: 10 # Maximum size for an individual file in MB
# totalSizeLimit: 50 # Maximum total size for all files in a single request in MB
# supportedMimeTypes:
# - "image/.*"
# - "application/pdf"
# openAI:
# disabled: true # Disables file uploading to the OpenAI endpoint
# default:
# totalSizeLimit: 20
# YourCustomEndpointName:
# fileLimit: 2
# fileSizeLimit: 5
# serverFileSizeLimit: 100 # Global server file size limit in MB
# avatarSizeLimit: 2 # Limit for user avatar image size in MB
# imageGeneration: # Image Gen settings, either percentage or px
# percentage: 100
# px: 1024
# # Client-side image resizing to prevent upload errors
clientImageResize:
enabled: true
maxWidth: 1900
maxHeight: 1900
quality: 0.92
# Memory configuration for user memories
memory:
# (optional) Disable memory functionality
disabled: false
# (optional) Restrict memory keys to specific values to limit memory storage and improve consistency
validKeys: ["preferences", "work_info", "personal_info", "skills", "interests", "context"]
# (optional) Maximum token limit for memory storage (not yet implemented for token counting)
tokenLimit: 10000
# (optional) Enable personalization features (defaults to true if memory is configured)
# When false, users will not see the Personalization tab in settings
personalize: true
# Memory agent configuration - either use an existing agent by ID or define inline
agent:
# Option 1: Use existing agent by ID
# id: "your-memory-agent-id"
# Option 2: Define agent inline
provider: "openrouter"
model: "openai/gpt-4o-mini"
instructions: "You are a memory management assistant. Store and manage user information accurately."
model_parameters:
temperature: 0.1

7
searxng/limiter.toml Normal file
View File

@@ -0,0 +1,7 @@
# This configuration file updates the default configuration file
# See https://github.com/searxng/searxng/blob/master/searx/limiter.toml
[botdetection.ip_limit]
# activate advanced bot protection
# enable this when running the instance for a public usage on the internet
link_token = false

7
searxng/settings.yml Normal file
View File

@@ -0,0 +1,7 @@
# see https://docs.searxng.org/admin/settings/settings.html#settings-use-default-settings
use_default_settings: true
server:
limiter: true # enable this when running the instance for a public usage on the internet
image_proxy: true
redis:
url: redis://redis:6379/0