Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We鈥檒l occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feature/kan 162 public ip #582

Closed
Show file tree
Hide file tree
Changes from 6 commits
Commits
Show all changes
35 commits
Select commit Hold shift + click to select a range
f02bd57
Fix dummy cache allocation
artek0chumak Apr 11, 2024
e5dddfe
Try mps device selecting
artek0chumak Apr 11, 2024
0ca54a5
Rechain reloc
artek0chumak Apr 11, 2024
c76e447
infrastructure
jmikedupont2 Apr 15, 2024
661545f
tiny mixtral running locally
Apr 16, 2024
be69f31
grok
Apr 16, 2024
c98b532
docker changes
Apr 17, 2024
6124622
mixtral working
Apr 17, 2024
3365e2e
Merge branch 'feature/docker-compose'
Apr 17, 2024
1644e89
reformat black
Apr 17, 2024
db5cb42
update config
Apr 29, 2024
a13e098
Merge pull request #1 from jmikedupont2/feature/dht1
Bakobiibizo Apr 29, 2024
f2092ac
remove deps
Apr 29, 2024
5744205
added setup.sh and added .env.example
jmikedupont2 Apr 29, 2024
41cc828
Merge pull request #2 from jmikedupont2/main
Bakobiibizo Apr 29, 2024
e01d410
Merge branch 'main' into setup-and-env.example
Bakobiibizo Apr 29, 2024
1fcc81b
update
Apr 29, 2024
0bfd0cb
updat
Apr 29, 2024
d9ba0ef
onlineer
Apr 29, 2024
2679513
Merge pull request #4 from jmikedupont2/setup-and-env.example
Bakobiibizo Apr 29, 2024
b84507c
changing defaults to set disk space
May 2, 2024
804de46
sauerkraut
May 8, 2024
41a4aba
remove options
May 8, 2024
e59cc85
block count
May 8, 2024
1615f5e
Update dht1.cillium.prod.compute.agentartificial.com.txt
jmikedupont2 May 9, 2024
90c454c
updated
May 9, 2024
04132bd
update port
jmikedupont2 Apr 30, 2024
dea21b5
changing defaults to set disk space
May 2, 2024
cf70fc1
sauerkraut
May 8, 2024
7f90c06
changing setings
May 9, 2024
fa9ffaa
Merge pull request #7 from jmikedupont2/feature/sauerkraut
Bakobiibizo May 10, 2024
e4581cf
Update docker-compose.yml
jmikedupont2 May 13, 2024
82a84bd
Merge pull request #8 from jmikedupont2/patch-1
jmikedupont2 May 16, 2024
9caa5b6
bugfix
May 25, 2024
17287db
public ip
Jun 3, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .env.example
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
INITIAL_PEERS=
DEVICE=CUDA
MODEL=meta-llama/Meta-Llama-8B-Instruct
139 changes: 8 additions & 131 deletions docker-compose.yml
Original file line number Diff line number Diff line change
@@ -1,53 +1,21 @@
services:
backbone:
profiles: ["core"]
image: h4ckermike/petals:main
profiles: ["backbone"]
image: h4ckermike/petals:main
command: python -m petals.cli.run_dht --host_maddrs /ip4/0.0.0.0/tcp/8008 --identity_path /cache/bootstrap1.id
volumes:
- petals-cache-backbone:/cache
network_mode: host
ipc: host
restart: unless-stopped

health:
profiles: ["core"]
restart: always
# depends_on:
# - backbone
image: h4ckermike/health.petals:main
ports:
- "8009:5000"
command: flask run --host=0.0.0.0 --port=5000

tinyllama_local_gpu:
profiles: ["local","gpu"]
# image: h4ckermike/petals:main
build : .
# depends_on:
# - backbone
ports:
- "31336:31336"
command: python -m petals.cli.run_server --port 31336 --num_blocks=1 Maykeye/TinyLLama-v0 --initial_peers $INITIAL_PEERS --device=$DEVICE
deploy:
resources:
reservations:
devices:
- driver: nvidia
count: 1
capabilities: [gpu]
restart: always

#
envmodel_local_gpu:
profiles: ["local","gpu"]
build: .
envmodel_gpu:
profiles: ["miner","gpu"]
# build: .
image: h4ckermike/petals:main
environment:
- MODEL=${MODEL}
# depends_on:
# - backbone
# xai-org/grok-1
# hpcai-tech/grok-1
# keyfan/grok-1-hf
- INITIAL_PEERS=${INITIAL_PEERS}
- DEVICE=${DEVICE}
command: python -m petals.cli.run_server --port 31331 --num_blocks=1 $MODEL --initial_peers $INITIAL_PEERS --device=$DEVICE
ports:
- "31331:31331"
Expand All @@ -59,96 +27,5 @@ services:
- driver: nvidia
count: 1
capabilities: [gpu]

tinymixtral_local_gpu:
profiles: ["local","gpu"]
build: .
# depends_on:
# - backbone
command: python -m petals.cli.run_server --port 31331 --num_blocks=1 SanjiWatsuki/TinyMixtral-32x248M --initial_peers $INITIAL_PEERS --device=$DEVICE
ports:
- "31331:31331"
restart: always

deploy:
resources:
reservations:
devices:
- driver: nvidia
count: 1
capabilities: [gpu]

tinyllama_local_cpu:
profiles: ["local","cpu"]
build: .
# depends_on:
# - backbone
command: python -m petals.cli.run_server --port 31331 --num_blocks=1 Maykeye/TinyLLama-v0 --initial_peers $INITIAL_PEERS --device=$DEVICE
ports:
- "31331:31331"
restart: always


tinyllamacpu:
profiles: ["tinyllama","cpu"]
image: h4ckermike/petals:main
depends_on:
- backbone
command: python -m petals.cli.run_server --port 31331 --num_blocks=1 Maykeye/TinyLLama-v0 --initial_peers $INITIAL_PEERS --device=$DEVICE
ports:
- "31331:31331"
restart: always


tinyllamagpu:
profiles: ["core"]
image: h4ckermike/petals:main
# depends_on:
# - backbone
ports:
- "31332:31332"
command: python -m petals.cli.run_server --port 31332 --num_blocks=100 Maykeye/TinyLLama-v0 --initial_peers $INITIAL_PEERS --device=$DEVICE
deploy:
resources:
reservations:
devices:
- driver: nvidia
count: 1
capabilities: [gpu]
restart: always


tinyllamatpu:
profiles: ["tpu"]
image: h4ckermike/petals:main
# depends_on:
# - backbone
ports:
- "31333:31333"
command: python -m petals.cli.run_server --port 31333 --num_blocks=1 Maykeye/TinyLLama-v0 --initial_peers $INITIAL_PEERS --device=$DEVICE
restart: always


debug_health:
profiles: ["debug"]

image: h4ckermike/health.petals:main
command: bash
stdin_open: true
tty: true

inference :
profiles: ["core"]
restart: always
# depends_on:
# - backbone
image: h4ckermike/inference.petals:main
# image: petals-inference-test
ports:
- "8010:5000"

command: gunicorn app:app --bind 0.0.0.0:5000 --worker-class gthread --threads 100 --timeout 1000


volumes:
petals-cache-backbone:
4 changes: 4 additions & 0 deletions envs/dht1.cillium.dev.compute.agentartificial.com.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
INITIAL_PEERS=/dns/dht1.cillium.dev.compute.agentartificial.com/tcp/8008/p2p/Qmb3skfrki1PR8ww6nxvoGm51F5imK3e1DPMZgtay6ofE2
DEVICE=cuda
MODEL=Maykeye/TinyLLama-v0
#MODEL=SanjiWatsuki/TinyMixtral-32x248M
2 changes: 1 addition & 1 deletion envs/gpu/h100/peers.txt
Original file line number Diff line number Diff line change
@@ -1 +1 @@
/ip4/216.81.245.26/tcp/8099/p2p/QmR4PcZvHg414Q2HNEaQZLiu69HD4Vs17hcwDgq8qJdJq2
INITIAL_PEERS=/dns/dht1.cillium.dev.compute.agentartificial.com/tcp/8008/p2p/Qmb3skfrki1PR8ww6nxvoGm51F5imK3e1DPMZgtay6ofE2
2 changes: 1 addition & 1 deletion get_peersl.sh
Original file line number Diff line number Diff line change
@@ -1 +1 @@
docker logs petals-backbone-1 2>&1 |grep initial_peers |cut "-d " -f18- | sort -u > peers.txt
docker logs petals-backbone-1 2>&1 | grep initial_peers | cut "-d " -f18- | sort -u > peers.txt
5 changes: 5 additions & 0 deletions setup.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
#!/bin/bash

set -e

docker compose --profile miner --env-file envs/dht1.cillium.dev.compute.agentartificial.com.txt up -d