commit d4e1b699c6eb71027ba9f2a1d3ac2e02e788d5b1 Author: deepgeek Date: Mon Mar 17 18:08:23 2025 +0800 init(*):初始化;使用sun-panel作为前端;使用dockge管理dockercompose diff --git a/anythinllm/.env b/anythinllm/.env new file mode 100644 index 0000000..6416fc5 --- /dev/null +++ b/anythinllm/.env @@ -0,0 +1,329 @@ + +SERVER_PORT=3001 +STORAGE_DIR="/app/server/storage" +UID='1000' +GID='1000' +# SIG_KEY='passphrase' # Please generate random string at least 32 chars long. +# SIG_SALT='salt' # Please generate random string at least 32 chars long. +JWT_SECRET="Mnx6P2gPXDz1FngbX3Vmn9SB4T2EVeE4JDkrqM2biA0o6nrWxDNE34QTXzxSR7ToKpTnjU6Qk2rGc3UGc3C03XL0w3gsXoUkA7kje1A82f2V2bTchu4N64uPkljZmF1x" # Only needed if AUTH_TOKEN is set. Please generate random string at least 12 chars long. + +########################################### +######## LLM API SElECTION ################ +########################################### +# LLM_PROVIDER='openai' +# OPEN_AI_KEY= +# OPEN_MODEL_PREF='gpt-4o' + +# LLM_PROVIDER='gemini' +# GEMINI_API_KEY= +# GEMINI_LLM_MODEL_PREF='gemini-pro' + +# LLM_PROVIDER='azure' +# AZURE_OPENAI_ENDPOINT= +# AZURE_OPENAI_KEY= +# OPEN_MODEL_PREF='my-gpt35-deployment' # This is the "deployment" on Azure you want to use. Not the base model. +# EMBEDDING_MODEL_PREF='embedder-model' # This is the "deployment" on Azure you want to use for embeddings. Not the base model. Valid base model is text-embedding-ada-002 + +# LLM_PROVIDER='anthropic' +# ANTHROPIC_API_KEY=sk-ant-xxxx +# ANTHROPIC_MODEL_PREF='claude-2' + +# LLM_PROVIDER='lmstudio' +# LMSTUDIO_BASE_PATH='http://your-server:1234/v1' +# LMSTUDIO_MODEL_PREF='Loaded from Chat UI' # this is a bug in LMStudio 0.2.17 +# LMSTUDIO_MODEL_TOKEN_LIMIT=4096 + +# LLM_PROVIDER='localai' +LOCAL_AI_BASE_PATH='http://host.docker.internal:10580/v1' +LOCAL_AI_MODEL_PREF='DeepSeek-R1-Distill-Llama-70B-AWQ' +LOCAL_AI_MODEL_TOKEN_LIMIT=8192 +LOCAL_AI_API_KEY="O8Is3NSYnp5fICWFbhkbwpLWgvMLkdCSuXR5ZggLmgwTKNPEWsjx1NqUxkyU7wLX" + +# LLM_PROVIDER='ollama' +# OLLAMA_BASE_PATH='http://host.docker.internal:11434' +# OLLAMA_MODEL_PREF='llama2' +# OLLAMA_MODEL_TOKEN_LIMIT=4096 +# OLLAMA_AUTH_TOKEN='your-ollama-auth-token-here (optional, only for ollama running behind auth - Bearer token)' + +# LLM_PROVIDER='togetherai' +# TOGETHER_AI_API_KEY='my-together-ai-key' +# TOGETHER_AI_MODEL_PREF='mistralai/Mixtral-8x7B-Instruct-v0.1' + +# LLM_PROVIDER='mistral' +# MISTRAL_API_KEY='example-mistral-ai-api-key' +# MISTRAL_MODEL_PREF='mistral-tiny' + +# LLM_PROVIDER='perplexity' +# PERPLEXITY_API_KEY='my-perplexity-key' +# PERPLEXITY_MODEL_PREF='codellama-34b-instruct' + +# LLM_PROVIDER='openrouter' +# OPENROUTER_API_KEY='my-openrouter-key' +# OPENROUTER_MODEL_PREF='openrouter/auto' + +# LLM_PROVIDER='huggingface' +# HUGGING_FACE_LLM_ENDPOINT=https://uuid-here.us-east-1.aws.endpoints.huggingface.cloud +# HUGGING_FACE_LLM_API_KEY=hf_xxxxxx +# HUGGING_FACE_LLM_TOKEN_LIMIT=8000 + +# LLM_PROVIDER='groq' +# GROQ_API_KEY=gsk_abcxyz +# GROQ_MODEL_PREF=llama3-8b-8192 + +# LLM_PROVIDER='koboldcpp' +# KOBOLD_CPP_BASE_PATH='http://127.0.0.1:5000/v1' +# KOBOLD_CPP_MODEL_PREF='koboldcpp/codellama-7b-instruct.Q4_K_S' +# KOBOLD_CPP_MODEL_TOKEN_LIMIT=4096 + +# LLM_PROVIDER='textgenwebui' +# TEXT_GEN_WEB_UI_BASE_PATH='http://127.0.0.1:5000/v1' +# TEXT_GEN_WEB_UI_TOKEN_LIMIT=4096 +# TEXT_GEN_WEB_UI_API_KEY='sk-123abc' + +# LLM_PROVIDER='generic-openai' +# GENERIC_OPEN_AI_BASE_PATH='http://proxy.url.openai.com/v1' +# GENERIC_OPEN_AI_MODEL_PREF='gpt-3.5-turbo' +# GENERIC_OPEN_AI_MODEL_TOKEN_LIMIT=4096 +# GENERIC_OPEN_AI_API_KEY=sk-123abc + +# LLM_PROVIDER='litellm' +# LITE_LLM_MODEL_PREF='gpt-3.5-turbo' +# LITE_LLM_MODEL_TOKEN_LIMIT=4096 +# LITE_LLM_BASE_PATH='http://127.0.0.1:4000' +# LITE_LLM_API_KEY='sk-123abc' + +# LLM_PROVIDER='novita' +# NOVITA_LLM_API_KEY='your-novita-api-key-here' check on https://novita.ai/settings/key-management +# NOVITA_LLM_MODEL_PREF='deepseek/deepseek-r1' + +# LLM_PROVIDER='cohere' +# COHERE_API_KEY= +# COHERE_MODEL_PREF='command-r' + +# LLM_PROVIDER='bedrock' +# AWS_BEDROCK_LLM_ACCESS_KEY_ID= +# AWS_BEDROCK_LLM_ACCESS_KEY= +# AWS_BEDROCK_LLM_REGION=us-west-2 +# AWS_BEDROCK_LLM_MODEL_PREFERENCE=meta.llama3-1-8b-instruct-v1:0 +# AWS_BEDROCK_LLM_MODEL_TOKEN_LIMIT=8191 + +# LLM_PROVIDER='fireworksai' +# FIREWORKS_AI_LLM_API_KEY='my-fireworks-ai-key' +# FIREWORKS_AI_LLM_MODEL_PREF='accounts/fireworks/models/llama-v3p1-8b-instruct' + +# LLM_PROVIDER='apipie' +# APIPIE_LLM_API_KEY='sk-123abc' +# APIPIE_LLM_MODEL_PREF='openrouter/llama-3.1-8b-instruct' + +# LLM_PROVIDER='xai' +# XAI_LLM_API_KEY='xai-your-api-key-here' +# XAI_LLM_MODEL_PREF='grok-beta' + +# LLM_PROVIDER='nvidia-nim' +# NVIDIA_NIM_LLM_BASE_PATH='http://127.0.0.1:8000' +# NVIDIA_NIM_LLM_MODEL_PREF='meta/llama-3.2-3b-instruct' + +# LLM_PROVIDER='deepseek' +# DEEPSEEK_API_KEY='your-deepseek-api-key-here' +# DEEPSEEK_MODEL_PREF='deepseek-chat' + +# LLM_PROVIDER='ppio' +# PPIO_API_KEY='your-ppio-api-key-here' +# PPIO_MODEL_PREF=deepseek/deepseek-v3/community + +########################################### +######## Embedding API SElECTION ########## +########################################### +# Only used if you are using an LLM that does not natively support embedding (openai or Azure) +# EMBEDDING_ENGINE='openai' +# OPEN_AI_KEY=sk-xxxx +# EMBEDDING_MODEL_PREF='text-embedding-ada-002' + +# EMBEDDING_ENGINE='azure' +# AZURE_OPENAI_ENDPOINT= +# AZURE_OPENAI_KEY= +# EMBEDDING_MODEL_PREF='my-embedder-model' # This is the "deployment" on Azure you want to use for embeddings. Not the base model. Valid base model is text-embedding-ada-002 + +# EMBEDDING_ENGINE='localai' +# EMBEDDING_BASE_PATH='http://localhost:8080/v1' +# EMBEDDING_MODEL_PREF='text-embedding-ada-002' +# EMBEDDING_MODEL_MAX_CHUNK_LENGTH=1000 # The max chunk size in chars a string to embed can be + +# EMBEDDING_ENGINE='ollama' +# EMBEDDING_BASE_PATH='http://host.docker.internal:11434' +# EMBEDDING_MODEL_PREF='nomic-embed-text:latest' +# EMBEDDING_MODEL_MAX_CHUNK_LENGTH=8192 + +# EMBEDDING_ENGINE='lmstudio' +# EMBEDDING_BASE_PATH='https://host.docker.internal:1234/v1' +# EMBEDDING_MODEL_PREF='nomic-ai/nomic-embed-text-v1.5-GGUF/nomic-embed-text-v1.5.Q4_0.gguf' +# EMBEDDING_MODEL_MAX_CHUNK_LENGTH=8192 + +# EMBEDDING_ENGINE='cohere' +# COHERE_API_KEY= +# EMBEDDING_MODEL_PREF='embed-english-v3.0' + +# EMBEDDING_ENGINE='voyageai' +# VOYAGEAI_API_KEY= +# EMBEDDING_MODEL_PREF='voyage-large-2-instruct' + +# EMBEDDING_ENGINE='litellm' +# EMBEDDING_MODEL_PREF='text-embedding-ada-002' +# EMBEDDING_MODEL_MAX_CHUNK_LENGTH=8192 +# LITE_LLM_BASE_PATH='http://127.0.0.1:4000' +# LITE_LLM_API_KEY='sk-123abc' + +# EMBEDDING_ENGINE='generic-openai' +# EMBEDDING_MODEL_PREF='text-embedding-ada-002' +# EMBEDDING_MODEL_MAX_CHUNK_LENGTH=8192 +# EMBEDDING_BASE_PATH='http://127.0.0.1:4000' +# GENERIC_OPEN_AI_EMBEDDING_API_KEY='sk-123abc' +# GENERIC_OPEN_AI_EMBEDDING_MAX_CONCURRENT_CHUNKS=500 + +# EMBEDDING_ENGINE='gemini' +# GEMINI_EMBEDDING_API_KEY= +# EMBEDDING_MODEL_PREF='text-embedding-004' + +########################################### +######## Vector Database Selection ######## +########################################### +# Enable all below if you are using vector database: Chroma. +# VECTOR_DB="chroma" +# CHROMA_ENDPOINT='http://host.docker.internal:8000' +# CHROMA_API_HEADER="X-Api-Key" +# CHROMA_API_KEY="sk-123abc" + +# Enable all below if you are using vector database: Pinecone. +# VECTOR_DB="pinecone" +# PINECONE_API_KEY= +# PINECONE_INDEX= + +# Enable all below if you are using vector database: LanceDB. +# VECTOR_DB="lancedb" + +# Enable all below if you are using vector database: Weaviate. +# VECTOR_DB="weaviate" +# WEAVIATE_ENDPOINT="http://localhost:8080" +# WEAVIATE_API_KEY= + +# Enable all below if you are using vector database: Qdrant. +# VECTOR_DB="qdrant" +# QDRANT_ENDPOINT="http://localhost:6333" +# QDRANT_API_KEY= + +# Enable all below if you are using vector database: Milvus. +# VECTOR_DB="milvus" +# MILVUS_ADDRESS="http://localhost:19530" +# MILVUS_USERNAME= +# MILVUS_PASSWORD= + +# Enable all below if you are using vector database: Zilliz Cloud. +# VECTOR_DB="zilliz" +# ZILLIZ_ENDPOINT="https://sample.api.gcp-us-west1.zillizcloud.com" +# ZILLIZ_API_TOKEN=api-token-here + +# Enable all below if you are using vector database: Astra DB. +# VECTOR_DB="astra" +# ASTRA_DB_APPLICATION_TOKEN= +# ASTRA_DB_ENDPOINT= + +########################################### +######## Audio Model Selection ############ +########################################### +# (default) use built-in whisper-small model. +# WHISPER_PROVIDER="local" + +# use openai hosted whisper model. +# WHISPER_PROVIDER="openai" +# OPEN_AI_KEY=sk-xxxxxxxx + +########################################### +######## TTS/STT Model Selection ########## +########################################### +# TTS_PROVIDER="native" + +# TTS_PROVIDER="openai" +# TTS_OPEN_AI_KEY=sk-example +# TTS_OPEN_AI_VOICE_MODEL=nova + +# TTS_PROVIDER="generic-openai" +# TTS_OPEN_AI_COMPATIBLE_KEY=sk-example +# TTS_OPEN_AI_COMPATIBLE_VOICE_MODEL=nova +# TTS_OPEN_AI_COMPATIBLE_ENDPOINT="https://api.openai.com/v1" + +# TTS_PROVIDER="elevenlabs" +# TTS_ELEVEN_LABS_KEY= +# TTS_ELEVEN_LABS_VOICE_MODEL=21m00Tcm4TlvDq8ikWAM # Rachel + +# CLOUD DEPLOYMENT VARIRABLES ONLY +# AUTH_TOKEN="hunter2" # This is the password to your application if remote hosting. +# DISABLE_TELEMETRY="false" + +########################################### +######## PASSWORD COMPLEXITY ############## +########################################### +# Enforce a password schema for your organization users. +# Documentation on how to use https://github.com/kamronbatman/joi-password-complexity +# Default is only 8 char minimum +# PASSWORDMINCHAR=8 +# PASSWORDMAXCHAR=250 +# PASSWORDLOWERCASE=1 +# PASSWORDUPPERCASE=1 +# PASSWORDNUMERIC=1 +# PASSWORDSYMBOL=1 +# PASSWORDREQUIREMENTS=4 + +########################################### +######## ENABLE HTTPS SERVER ############## +########################################### +# By enabling this and providing the path/filename for the key and cert, +# the server will use HTTPS instead of HTTP. +#ENABLE_HTTPS="true" +#HTTPS_CERT_PATH="sslcert/cert.pem" +#HTTPS_KEY_PATH="sslcert/key.pem" + +########################################### +######## AGENT SERVICE KEYS ############### +########################################### + +#------ SEARCH ENGINES ------- +#============================= +#------ Google Search -------- https://programmablesearchengine.google.com/controlpanel/create +# AGENT_GSE_KEY= +# AGENT_GSE_CTX= + +#------ SearchApi.io ----------- https://www.searchapi.io/ +# AGENT_SEARCHAPI_API_KEY= +# AGENT_SEARCHAPI_ENGINE=google + +#------ Serper.dev ----------- https://serper.dev/ +# AGENT_SERPER_DEV_KEY= + +#------ Bing Search ----------- https://portal.azure.com/ +# AGENT_BING_SEARCH_API_KEY= + +#------ Serply.io ----------- https://serply.io/ +# AGENT_SERPLY_API_KEY= + +#------ SearXNG ----------- https://github.com/searxng/searxng +# AGENT_SEARXNG_API_URL= + +#------ Tavily ----------- https://www.tavily.com/ +# AGENT_TAVILY_API_KEY= + +########################################### +######## Other Configurations ############ +########################################### + +# Disable viewing chat history from the UI and frontend APIs. +# See https://docs.anythingllm.com/configuration#disable-view-chat-history for more information. +# DISABLE_VIEW_CHAT_HISTORY=1 + +# Enable simple SSO passthrough to pre-authenticate users from a third party service. +# See https://docs.anythingllm.com/configuration#simple-sso-passthrough for more information. +# SIMPLE_SSO_ENABLED=1 + +# Specify the target languages for when using OCR to parse images and PDFs. +# This is a comma separated list of language codes as a string. Unsupported languages will be ignored. +# Default is English. See https://tesseract-ocr.github.io/tessdoc/Data-Files-in-different-versions.html for a list of valid language codes. +# TARGET_OCR_LANG=eng,deu,ita,spa,fra,por,rus,nld,tur,hun,pol,ita,spa,fra,por,rus,nld,tur,hun,pol diff --git a/anythinllm/compose.yaml b/anythinllm/compose.yaml new file mode 100644 index 0000000..54cbc96 --- /dev/null +++ b/anythinllm/compose.yaml @@ -0,0 +1,17 @@ +version: "3.8" +services: + anythingllm: + image: docker.citory.tech/mirror/mintplexlabs/anythingllm:latest + container_name: anythingllm + ports: + - 10504:3001 + env_file: + - .env + cap_add: + - SYS_ADMIN + extra_hosts: + - host.docker.internal:host-gateway + restart: always +x-dockge: + urls: + - http://local.citory.tech:10504 diff --git a/chattts/.env b/chattts/.env new file mode 100644 index 0000000..9f441d4 --- /dev/null +++ b/chattts/.env @@ -0,0 +1 @@ +CUDA_VISIBLE_DEVICES=0,1 \ No newline at end of file diff --git a/chattts/compose.yaml b/chattts/compose.yaml new file mode 100644 index 0000000..d73b8b7 --- /dev/null +++ b/chattts/compose.yaml @@ -0,0 +1,23 @@ +version: "3.8" +services: + lenml-chattts-forge: + image: docker.citory.tech/public/lenml-chattts-forge + container_name: lenml-chattts-forge + ports: + - 10583:7860 + env_file: .env + deploy: + resources: + reservations: + devices: + - driver: nvidia + count: all + capabilities: + - gpu + restart: always + command: python webui.py + runtime: nvidia +x-dockge: + urls: + - http://local.citory.tech:10583 +networks: {} diff --git a/comfyui/.env b/comfyui/.env new file mode 100644 index 0000000..c23c858 --- /dev/null +++ b/comfyui/.env @@ -0,0 +1 @@ +# VARIABLE=value #comment \ No newline at end of file diff --git a/comfyui/compose.yaml b/comfyui/compose.yaml new file mode 100644 index 0000000..c71bdc2 --- /dev/null +++ b/comfyui/compose.yaml @@ -0,0 +1,27 @@ +version: "3.8" +services: + comfyui: + image: docker.citory.tech/mirror/yanwk/comfyui-boot:cu121 + container_name: comfyui + ports: + - 10587:8188 + volumes: + - /home/deepgeek/data/data_local/server/comfyui/storage:/home/runner + - /home/deepgeek/data/data_local/server/sd-models/checkpoints:/home/runner/ComfyUI/models/checkpoints + - /home/deepgeek/data/data_local/server/sd-models/loras:/home/runner/ComfyUI/models/loras + deploy: + resources: + reservations: + devices: + - driver: nvidia + count: all + capabilities: + - compute + - utility + - gpu + restart: always + runtime: nvidia +x-dockge: + urls: + - http://local.citory.tech:10587 +networks: {} diff --git a/gpustat-web/.env b/gpustat-web/.env new file mode 100644 index 0000000..c23c858 --- /dev/null +++ b/gpustat-web/.env @@ -0,0 +1 @@ +# VARIABLE=value #comment \ No newline at end of file diff --git a/gpustat-web/compose.yaml b/gpustat-web/compose.yaml new file mode 100644 index 0000000..dc7145a --- /dev/null +++ b/gpustat-web/compose.yaml @@ -0,0 +1,13 @@ +version: "3.8" +services: + gpustat-web: + image: docker.citory.tech/public/gpustat-web:10599 + container_name: gpustat-web + ports: + - 10509:80 + extra_hosts: + - host.docker.internal:host-gateway + restart: always +x-dockge: + urls: + - http://local.citory.tech:10509 diff --git a/kodbox/.env b/kodbox/.env new file mode 100644 index 0000000..c53eac4 --- /dev/null +++ b/kodbox/.env @@ -0,0 +1,5 @@ +MYSQL_PASSWORD=SFMhkWEZdeb6jxtel0igQGToKyJ7bHwd +MYSQL_DATABASE=kodbox +MYSQL_USER=kodbox +KODBOX_ADMIN_USER=deepgeek +KODBOX_ADMIN_PASSWORD=DeepGeek2025 \ No newline at end of file diff --git a/kodbox/compose.yaml b/kodbox/compose.yaml new file mode 100644 index 0000000..2351985 --- /dev/null +++ b/kodbox/compose.yaml @@ -0,0 +1,39 @@ +version: "3.8" +services: + kodbox-db: + image: docker.citory.tech/mirror/mariadb:10.6 + container_name: kodbox-db + restart: always + command: --transaction-isolation=READ-COMMITTED --log-bin=binlog --binlog-format=ROW + volumes: + - ./db:/var/lib/mysql #./db是数据库持久化目录,可以修改 + environment: + - MYSQL_ROOT_PASSWORD=DeepGeek2025 + - MARIADB_AUTO_UPGRADE=1 + - MARIADB_DISABLE_UPGRADE_BACKUP=1 + env_file: + - .env + kodbox: + image: docker.citory.tech/mirror/kodcloud/kodbox:latest + container_name: kodbox + restart: always + ports: + - 10510:80 #左边80是使用端口,可以修改 + volumes: + - /home/deepgeek/data/data_base/kodbox/data:/var/www/html #./site是站点目录位置,可以修改 + environment: + - MYSQL_HOST=kodbox-db + - REDIS_HOST=kodbox-redis + env_file: + - .env + depends_on: + - kodbox-db + - kodbox-redis + kodbox-redis: + image: docker.citory.tech/mirror/redis:alpine + container_name: kodbox-redis + restart: always +x-dockge: + urls: + - http://local.citory.tech:10510 +networks: {} diff --git a/nlp/.env b/nlp/.env new file mode 100644 index 0000000..c23c858 --- /dev/null +++ b/nlp/.env @@ -0,0 +1 @@ +# VARIABLE=value #comment \ No newline at end of file diff --git a/nlp/compose.yaml b/nlp/compose.yaml new file mode 100644 index 0000000..9511932 --- /dev/null +++ b/nlp/compose.yaml @@ -0,0 +1,17 @@ +version: "3.8" +services: + toolbox-nlp: + image: docker.citory.tech/public/toolbox-nlp:1.0.1 + container_name: toolbox-nlp + ports: + - 10584:8080 + deploy: + resources: + reservations: + devices: + - driver: nvidia + count: all + capabilities: + - gpu + restart: always + runtime: nvidia diff --git a/ollama/.env b/ollama/.env new file mode 100644 index 0000000..c23c858 --- /dev/null +++ b/ollama/.env @@ -0,0 +1 @@ +# VARIABLE=value #comment \ No newline at end of file diff --git a/ollama/compose.yaml b/ollama/compose.yaml new file mode 100644 index 0000000..c350356 --- /dev/null +++ b/ollama/compose.yaml @@ -0,0 +1,22 @@ +version: "3.9" +services: + ollama: + image: docker.citory.tech/mirror/ollama/ollama:latest + container_name: ollama + ports: + - 10581:11434 + runtime: nvidia + volumes: + - /home/deepgeek/data/data_local/server/ollama:/root/.ollama + environment: + NVIDIA_VISIBLE_DEVICES: all + deploy: + resources: + reservations: + devices: + - capabilities: + - gpu +x-dockge: + urls: + - http://local.citory.tech:10581 +networks: {} diff --git a/open-webui/.env b/open-webui/.env new file mode 100644 index 0000000..c23c858 --- /dev/null +++ b/open-webui/.env @@ -0,0 +1 @@ +# VARIABLE=value #comment \ No newline at end of file diff --git a/open-webui/compose.yaml b/open-webui/compose.yaml new file mode 100644 index 0000000..e4dc6f6 --- /dev/null +++ b/open-webui/compose.yaml @@ -0,0 +1,22 @@ +version: "3.8" +services: + open-webui: + image: docker.citory.tech/mirror/backplane/open-webui:0 + container_name: open-webui + ports: + - 10503:8080 + environment: + - ENABLE_RAG_WEB_SEARCH=true + - RAG_WEB_SEARCH_ENGINE=duckduckgo + - ENABLE_OLLAMA_API=false + - OPENAI_API_KEY=1ZfFN6nICGfMAUhPKwRbpmbwnd9aYkwT8RbluK32ASpPZgglPhdmLv4zDHh7BebQ # 替换为您的 OpenAI API 密钥 + - WHISPER_MODEL=large + - OPENAI_API_BASE_URL=http://host.docker.internal:10580/v1 # 替换为您的 VLLM IP 和端口 + volumes: + - ./data/:/app/backend/data + extra_hosts: + - host.docker.internal:host-gateway + restart: always +x-dockge: + urls: + - http://local.citory.tech:10503 diff --git a/owlsam/.env b/owlsam/.env new file mode 100644 index 0000000..9f441d4 --- /dev/null +++ b/owlsam/.env @@ -0,0 +1 @@ +CUDA_VISIBLE_DEVICES=0,1 \ No newline at end of file diff --git a/owlsam/compose.yaml b/owlsam/compose.yaml new file mode 100644 index 0000000..6c02459 --- /dev/null +++ b/owlsam/compose.yaml @@ -0,0 +1,21 @@ +version: "3.8" +services: + owlsam: + image: docker.citory.tech/public/toolbox-owlsam:1.0.1 + container_name: owlsam + ports: + - 10582:8080 + env_file: .env + deploy: + resources: + reservations: + devices: + - driver: nvidia + count: all + capabilities: + - gpu + restart: always + runtime: nvidia +x-dockge: + urls: + - http://local.citory.tech:10582 diff --git a/portainer/.env b/portainer/.env new file mode 100644 index 0000000..c23c858 --- /dev/null +++ b/portainer/.env @@ -0,0 +1 @@ +# VARIABLE=value #comment \ No newline at end of file diff --git a/portainer/compose.yaml b/portainer/compose.yaml new file mode 100644 index 0000000..df9ff22 --- /dev/null +++ b/portainer/compose.yaml @@ -0,0 +1,15 @@ +version: "3.8" +services: + portainer: + image: docker.citory.tech/mirror/6053537/portainer-ce:latest + container_name: portainer + ports: + - 10507:9000 + volumes: + - /var/run/docker.sock:/var/run/docker.sock + - /var/lib/docker/volumes:/var/lib/docker/volumes + - ./data:/data + restart: always +x-dockge: + urls: + - http://local.citory.tech:10507 diff --git a/stable-diffusion-webui/.env b/stable-diffusion-webui/.env new file mode 100644 index 0000000..c23c858 --- /dev/null +++ b/stable-diffusion-webui/.env @@ -0,0 +1 @@ +# VARIABLE=value #comment \ No newline at end of file diff --git a/stable-diffusion-webui/compose.yaml b/stable-diffusion-webui/compose.yaml new file mode 100644 index 0000000..5d5c944 --- /dev/null +++ b/stable-diffusion-webui/compose.yaml @@ -0,0 +1,34 @@ +version: "3.9" +services: + stable-diffusion-webui: + image: docker.citory.tech/public/stable-diffusion-webui:installed + container_name: stable-diffusion-webui + ports: + - 10588:7860 + volumes: + - /home/deepgeek/data/data_local/server/stable-diffusion-webui-docker/data:/data + - /home/deepgeek/data/data_local/server/stable-diffusion-webui-docker/data/config/auto/localizations:/stable-diffusion-webui/localizations + - /home/deepgeek/data/data_local/server/stable-diffusion-webui-docker/output:/output + - /home/deepgeek/data/data_local/server/sd-models/checkpoints:/data/models/Stable-diffusion + - /home/deepgeek/data/data_local/server/sd-models/loras:/data/models/Lora + tty: true + environment: + - CUDA_LAUNCH_BLOCKING=1 + - COMMANDLINE_ARGS="--api" + - COMMANDLINE_ARGS="--enable-insecure-extension-access" + deploy: + resources: + reservations: + devices: + - driver: nvidia + count: all + capabilities: + - compute + - utility + - gpu + restart: always + runtime: nvidia +x-dockge: + urls: + - http://local.citory.tech:10588 +networks: {} diff --git a/sun-panel/.env b/sun-panel/.env new file mode 100644 index 0000000..c23c858 --- /dev/null +++ b/sun-panel/.env @@ -0,0 +1 @@ +# VARIABLE=value #comment \ No newline at end of file diff --git a/sun-panel/compose.yaml b/sun-panel/compose.yaml new file mode 100644 index 0000000..9034d6c --- /dev/null +++ b/sun-panel/compose.yaml @@ -0,0 +1,18 @@ +version: "3.8" +services: + sun-panel: + image: docker.citory.tech/mirror/hslr/sun-panel:latest + container_name: sun-panel + volumes: + - ./conf:/app/conf + - /var/run/docker.sock:/var/run/docker.sock # 挂载docker.sock + - ./runtime:/app/runtime # 挂载日志目录 + - /mnt/data_base:/os # 硬盘挂载点(根据自己需求修改) + - /:/system # 硬盘挂载点(根据自己需求修改) + ports: + - 10500:3002 + restart: always +networks: {} +x-dockge: + urls: + - http://local.citory.tech:10500 \ No newline at end of file diff --git a/sun-panel/conf/custom/favicon.ico b/sun-panel/conf/custom/favicon.ico new file mode 100644 index 0000000..4641e52 Binary files /dev/null and b/sun-panel/conf/custom/favicon.ico differ diff --git a/sun-panel/conf/custom/index.css b/sun-panel/conf/custom/index.css new file mode 100644 index 0000000..e69de29 diff --git a/sun-panel/conf/custom/index.js b/sun-panel/conf/custom/index.js new file mode 100644 index 0000000..e69de29 diff --git a/sun-panel/conf/uploads/2025/3/16/10df03379d5c47d63ecebeaa41968c32.png b/sun-panel/conf/uploads/2025/3/16/10df03379d5c47d63ecebeaa41968c32.png new file mode 100644 index 0000000..fc372b2 Binary files /dev/null and b/sun-panel/conf/uploads/2025/3/16/10df03379d5c47d63ecebeaa41968c32.png differ diff --git a/sun-panel/conf/uploads/2025/3/16/2e20b93fbf3a346375f847c468ef3035.jpeg b/sun-panel/conf/uploads/2025/3/16/2e20b93fbf3a346375f847c468ef3035.jpeg new file mode 100644 index 0000000..e1c1d4b Binary files /dev/null and b/sun-panel/conf/uploads/2025/3/16/2e20b93fbf3a346375f847c468ef3035.jpeg differ diff --git a/sun-panel/conf/uploads/2025/3/16/41d76155bd23bd4fdb194675e797dbcf.png b/sun-panel/conf/uploads/2025/3/16/41d76155bd23bd4fdb194675e797dbcf.png new file mode 100644 index 0000000..1c26bfb Binary files /dev/null and b/sun-panel/conf/uploads/2025/3/16/41d76155bd23bd4fdb194675e797dbcf.png differ diff --git a/sun-panel/conf/uploads/2025/3/16/46c41a6f2049ea26e483ccf6542947f4.svg b/sun-panel/conf/uploads/2025/3/16/46c41a6f2049ea26e483ccf6542947f4.svg new file mode 100644 index 0000000..54acc3d --- /dev/null +++ b/sun-panel/conf/uploads/2025/3/16/46c41a6f2049ea26e483ccf6542947f4.svg @@ -0,0 +1 @@ +vLLM \ No newline at end of file diff --git a/sun-panel/conf/uploads/2025/3/16/47e55c2485bb3a79fe74f774c7c28b0f.png b/sun-panel/conf/uploads/2025/3/16/47e55c2485bb3a79fe74f774c7c28b0f.png new file mode 100644 index 0000000..c62f2b8 Binary files /dev/null and b/sun-panel/conf/uploads/2025/3/16/47e55c2485bb3a79fe74f774c7c28b0f.png differ diff --git a/sun-panel/conf/uploads/2025/3/16/4d9c54fc716689d490b6f7d486b0005f.png b/sun-panel/conf/uploads/2025/3/16/4d9c54fc716689d490b6f7d486b0005f.png new file mode 100644 index 0000000..2b20747 Binary files /dev/null and b/sun-panel/conf/uploads/2025/3/16/4d9c54fc716689d490b6f7d486b0005f.png differ diff --git a/sun-panel/conf/uploads/2025/3/16/5b918100963a1305b232eae785a2d3a5.jpg b/sun-panel/conf/uploads/2025/3/16/5b918100963a1305b232eae785a2d3a5.jpg new file mode 100644 index 0000000..9e2b921 Binary files /dev/null and b/sun-panel/conf/uploads/2025/3/16/5b918100963a1305b232eae785a2d3a5.jpg differ diff --git a/sun-panel/conf/uploads/2025/3/16/5d3bac9a4753399641c4d5c0ee278db1.png b/sun-panel/conf/uploads/2025/3/16/5d3bac9a4753399641c4d5c0ee278db1.png new file mode 100644 index 0000000..1c26bfb Binary files /dev/null and b/sun-panel/conf/uploads/2025/3/16/5d3bac9a4753399641c4d5c0ee278db1.png differ diff --git a/sun-panel/conf/uploads/2025/3/16/605fc080b590bc0f42c2a1bdab20aa57.png b/sun-panel/conf/uploads/2025/3/16/605fc080b590bc0f42c2a1bdab20aa57.png new file mode 100644 index 0000000..d46e5f4 Binary files /dev/null and b/sun-panel/conf/uploads/2025/3/16/605fc080b590bc0f42c2a1bdab20aa57.png differ diff --git a/sun-panel/conf/uploads/2025/3/16/7e9970e7aa0c31f2e71f416bcfc0b281.svg b/sun-panel/conf/uploads/2025/3/16/7e9970e7aa0c31f2e71f416bcfc0b281.svg new file mode 100644 index 0000000..b8e2df2 --- /dev/null +++ b/sun-panel/conf/uploads/2025/3/16/7e9970e7aa0c31f2e71f416bcfc0b281.svg @@ -0,0 +1,14 @@ + + + +Created with Fabric.js 5.3.0 + + + + + + + + + + \ No newline at end of file diff --git a/sun-panel/conf/uploads/2025/3/16/97376d7cf0c2d90fa0f9c5e14da2a89f.ico b/sun-panel/conf/uploads/2025/3/16/97376d7cf0c2d90fa0f9c5e14da2a89f.ico new file mode 100644 index 0000000..ebeaa3e Binary files /dev/null and b/sun-panel/conf/uploads/2025/3/16/97376d7cf0c2d90fa0f9c5e14da2a89f.ico differ diff --git a/sun-panel/conf/uploads/2025/3/16/9d135876afb73c0ef637a24b9ad93945.ico b/sun-panel/conf/uploads/2025/3/16/9d135876afb73c0ef637a24b9ad93945.ico new file mode 100644 index 0000000..ebeaa3e Binary files /dev/null and b/sun-panel/conf/uploads/2025/3/16/9d135876afb73c0ef637a24b9ad93945.ico differ diff --git a/sun-panel/conf/uploads/2025/3/16/d4bdc7ed4011fe08d1057fed05190f23.png b/sun-panel/conf/uploads/2025/3/16/d4bdc7ed4011fe08d1057fed05190f23.png new file mode 100644 index 0000000..c76dbea Binary files /dev/null and b/sun-panel/conf/uploads/2025/3/16/d4bdc7ed4011fe08d1057fed05190f23.png differ diff --git a/sun-panel/conf/uploads/2025/3/16/db44ee8e8563ba14fcb7622c6b606d31.png b/sun-panel/conf/uploads/2025/3/16/db44ee8e8563ba14fcb7622c6b606d31.png new file mode 100644 index 0000000..53ded98 Binary files /dev/null and b/sun-panel/conf/uploads/2025/3/16/db44ee8e8563ba14fcb7622c6b606d31.png differ diff --git a/sun-panel/conf/uploads/2025/3/16/de41043b7c03d4a2fe94c9f954f9e14c.png b/sun-panel/conf/uploads/2025/3/16/de41043b7c03d4a2fe94c9f954f9e14c.png new file mode 100644 index 0000000..c76dbea Binary files /dev/null and b/sun-panel/conf/uploads/2025/3/16/de41043b7c03d4a2fe94c9f954f9e14c.png differ diff --git a/sun-panel/conf/uploads/2025/3/16/f47a87c35f4780def333db0ac8659636.png b/sun-panel/conf/uploads/2025/3/16/f47a87c35f4780def333db0ac8659636.png new file mode 100644 index 0000000..fc372b2 Binary files /dev/null and b/sun-panel/conf/uploads/2025/3/16/f47a87c35f4780def333db0ac8659636.png differ diff --git a/toolbox-ocr/.env b/toolbox-ocr/.env new file mode 100644 index 0000000..c23c858 --- /dev/null +++ b/toolbox-ocr/.env @@ -0,0 +1 @@ +# VARIABLE=value #comment \ No newline at end of file diff --git a/toolbox-ocr/compose.yaml b/toolbox-ocr/compose.yaml new file mode 100644 index 0000000..d1a5c66 --- /dev/null +++ b/toolbox-ocr/compose.yaml @@ -0,0 +1,20 @@ +version: "3.8" +services: + toolbox-ocr: + image: docker.citory.tech/public/toolbox-ocr:1.0.0 + container_name: toolbox-ocr + ports: + - 10585:8080 + deploy: + resources: + reservations: + devices: + - driver: nvidia + count: all + capabilities: + - gpu + restart: always + runtime: nvidia +x-dockge: + urls: + - http://local.citory.tech:10585 diff --git a/vllm/.env b/vllm/.env new file mode 100644 index 0000000..c23c858 --- /dev/null +++ b/vllm/.env @@ -0,0 +1 @@ +# VARIABLE=value #comment \ No newline at end of file diff --git a/vllm/compose.yaml b/vllm/compose.yaml new file mode 100644 index 0000000..df58b47 --- /dev/null +++ b/vllm/compose.yaml @@ -0,0 +1,32 @@ +version: "3.8" +services: + vllm: + image: docker.citory.tech/mirror/vllm/vllm-openai:latest + container_name: vllm + ports: + - 10580:8080 + tty: true + environment: + - CUDA_VISIBLE_DEVICES=0,1 + deploy: + resources: + reservations: + devices: + - driver: nvidia + count: 2 + capabilities: + - gpu + ipc: host + runtime: nvidia + volumes: + - /home/deepgeek/data/data_local/server/vllm/models:/models + restart: always + command: --served-model-name DeepSeek-R1-Distill-Llama-70B-AWQ --model + /models/Valdemardi/DeepSeek-R1-Distill-Llama-70B-AWQ --trust-remote-code + --host 0.0.0.0 --port 8080 --max-model-len 8192 --tensor-parallel-size 2 + --gpu_memory_utilization 0.96 --enforce-eager --dtype auto --swap-space 8 + --api-key + "O8Is3NSYnp5fICWFbhkbwpLWgvMLkdCSuXR5ZggLmgwTKNPEWsjx1NqUxkyU7wLX" +x-dockge: + urls: + - http://local.citory.tech:10580 diff --git a/watchtower/.env b/watchtower/.env new file mode 100644 index 0000000..c23c858 --- /dev/null +++ b/watchtower/.env @@ -0,0 +1 @@ +# VARIABLE=value #comment \ No newline at end of file diff --git a/watchtower/compose.yaml b/watchtower/compose.yaml new file mode 100644 index 0000000..aa8619e --- /dev/null +++ b/watchtower/compose.yaml @@ -0,0 +1,9 @@ +version: "3.8" +services: + watchtower: + container_name: watchtower + image: docker.citory.tech/mirror/containrrr/watchtower:latest + volumes: + - /var/run/docker.sock:/var/run/docker.sock + command: --interval 3600 # 每小时检查一次[[1,10]] +networks: {}