forked from LAION-AI/Open-Assistant
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathdeploy-worker.yaml
39 lines (36 loc) · 1.05 KB
/
deploy-worker.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
# Ansible playbook to setup inference worker
- name: Deploy inference
hosts: dev
gather_facts: true
vars:
stack_name: "dev"
image_tag: latest
backend_url: "ws://localhost:8000"
api_key: "1234"
parallelism: 4
tasks:
- name: Create network
community.docker.docker_network:
name: "oasst-worker-{{ stack_name }}"
state: present
driver: bridge
- name: Create stack files directory
ansible.builtin.file:
path: "./{{ stack_name }}"
state: directory
mode: 0755
- name: Run the oasst inference worker
community.docker.docker_container:
name: "oasst-worker-{{ stack_name }}"
image:
"ghcr.io/laion-ai/open-assistant/oasst-inference-worker-full:{{
image_tag }}"
state: started
recreate: true
pull: true
restart_policy: always
network_mode: "oasst-worker-{{ stack_name }}"
env:
BACKEND_URL: "{{ backend_url }}"
API_KEY: "{{ api_key }}"
PARALLELISM: "{{ parallelism }}"