fne8w2ah@lemmy.world to Technology@lemmy.worldEnglish · 7 days agoNew downloads of DeepSeek suspended in South Korea, data protection agency sayswww.channelnewsasia.comexternal-linkmessage-square10fedilinkarrow-up1100arrow-down13
arrow-up197arrow-down1external-linkNew downloads of DeepSeek suspended in South Korea, data protection agency sayswww.channelnewsasia.comfne8w2ah@lemmy.world to Technology@lemmy.worldEnglish · 7 days agomessage-square10fedilink
minus-squareBaroqueInMind@lemmy.onelinkfedilinkEnglisharrow-up3·edit-26 days agoYou can also run your own fancy front-end and host your own GPT website (locally).
minus-squarepezhore@infosec.publinkfedilinkEnglisharrow-up1·6 days agoI’m doing that with docker compose in my homelab, it’s pretty neat! services: ollama: volumes: - /etc/ollama-docker/ollama:/root/.ollama container_name: ollama pull_policy: always tty: true restart: unless-stopped image: ollama/ollama ports: - 11434:11434 deploy: resources: reservations: devices: - driver: nvidia device_ids: ['0'] capabilities: - gpu open-webui: build: context: . args: OLLAMA_BASE_URL: '/ollama' dockerfile: Dockerfile image: ghcr.io/open-webui/open-webui:main container_name: open-webui volumes: - /etc/ollama-docker/open-webui:/app/backend/data depends_on: - ollama ports: - 3000:8080 environment: - 'OLLAMA_BASE_URL=http://ollama:11434/' - 'WEBUI_SECRET_KEY=' extra_hosts: - host.docker.internal:host-gateway restart: unless-stopped volumes: ollama: {} open-webui: {}
You can also run your own fancy front-end and host your own GPT website (locally).
I’m doing that with docker compose in my homelab, it’s pretty neat!
services: ollama: volumes: - /etc/ollama-docker/ollama:/root/.ollama container_name: ollama pull_policy: always tty: true restart: unless-stopped image: ollama/ollama ports: - 11434:11434 deploy: resources: reservations: devices: - driver: nvidia device_ids: ['0'] capabilities: - gpu open-webui: build: context: . args: OLLAMA_BASE_URL: '/ollama' dockerfile: Dockerfile image: ghcr.io/open-webui/open-webui:main container_name: open-webui volumes: - /etc/ollama-docker/open-webui:/app/backend/data depends_on: - ollama ports: - 3000:8080 environment: - 'OLLAMA_BASE_URL=http://ollama:11434/' - 'WEBUI_SECRET_KEY=' extra_hosts: - host.docker.internal:host-gateway restart: unless-stopped volumes: ollama: {} open-webui: {}