carlex3321 commited on
Commit
f0ae7e5
verified
1 Parent(s): c866040

Update Dockerfile

Browse files
Files changed (1) hide show
  1. Dockerfile +7 -7
Dockerfile CHANGED
@@ -88,10 +88,10 @@ RUN pip uninstall -y triton || true && \
88
  pip install -v --no-build-isolation triton==3.4.0
89
 
90
  # FlashAttention 2.8.x
91
- #RUN pip install flash-attn==2.8.3 --no-build-isolation || \
92
- # pip install flash-attn==2.8.2 --no-build-isolation || \
93
- # pip install flash-attn==2.8.1 --no-build-isolation || \
94
- # pip install flash-attn==2.8.0.post2 --no-build-isolation
95
 
96
  # Diffusers/Transformers est谩veis (sem dev)
97
  RUN pip install --no-cache-dir diffusers==0.31.0 transformers==4.44.2 accelerate==0.34.2 omegaconf==2.3.0
@@ -100,12 +100,12 @@ RUN pip install --no-cache-dir diffusers==0.31.0 transformers==4.44.2 accelerate
100
  RUN pip install -U git+https://github.com/carlex22/diffusers-aduc-sdr
101
 
102
  # ---------------- Reposit贸rios auxiliares ----------------
103
- #RUN git clone https://github.com/bytedance-seed/VINCIE.git && \
104
- # cp -r VINCIE/configs/. /app/configs/
105
 
106
 
107
  RUN git clone https://github.com/bytedance-seed/VINCIE.git && \
108
- cd VINCIE && pip install -r requirements.txt && cd .. && \
109
  #pip install flash_attn==2.6.3 --no-build-isolation && \
110
  cp -r VINCIE/configs/. /app/configs/
111
 
 
88
  pip install -v --no-build-isolation triton==3.4.0
89
 
90
  # FlashAttention 2.8.x
91
+ RUN pip install flash-attn==2.8.3 --no-build-isolation || \
92
+ pip install flash-attn==2.8.2 --no-build-isolation || \
93
+ pip install flash-attn==2.8.1 --no-build-isolation || \
94
+ pip install flash-attn==2.8.0.post2 --no-build-isolation
95
 
96
  # Diffusers/Transformers est谩veis (sem dev)
97
  RUN pip install --no-cache-dir diffusers==0.31.0 transformers==4.44.2 accelerate==0.34.2 omegaconf==2.3.0
 
100
  RUN pip install -U git+https://github.com/carlex22/diffusers-aduc-sdr
101
 
102
  # ---------------- Reposit贸rios auxiliares ----------------
103
+ RUN git clone https://github.com/Dao-AILab/flash-attention && \
104
+ cd flash-attention/csrc/layer_norm && pip install .
105
 
106
 
107
  RUN git clone https://github.com/bytedance-seed/VINCIE.git && \
108
+ #cd VINCIE && pip install -r requirements.txt && cd .. && \
109
  #pip install flash_attn==2.6.3 --no-build-isolation && \
110
  cp -r VINCIE/configs/. /app/configs/
111