File size: 1,130 Bytes
02af69b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
78a0121
6a64356
 
 
 
 
 
02af69b
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
# Use the full Python 3.9 image (if you need specific modules)
FROM python:3.9.19

RUN useradd -m -u 1000 user
USER user
ENV PATH="/home/user/.local/bin:$PATH"

# Working Directory
WORKDIR /app

COPY --chown=user ./models/ models/
COPY --chown=user ./app.py app.py

RUN pip install --no-cache-dir torch==2.2.2
RUN pip install --no-cache-dir packaging

# Copy Dependencies (if you have any)
COPY --chown=user ./requirements.txt requirements.txt

# Install Dependencies (if you have any)
RUN pip install --no-cache-dir -r requirements.txt
RUN pip install -U git+https://github.com/sustcsonglin/flash-linear-attention

# Copy Custom Modules (Adjust paths if needed)
COPY --chown=user  ./causal-conv1d/ causal-conv1d/
RUN cd /app/causal-conv1d && python setup.py install --user

COPY --chown=user ./mamba/ mamba/
RUN cd /app/mamba && python setup.py install --user

# Set the environment variable for the Hugging Face cache directory
ENV HF_HOME=/app/.cache 

# Create the cache directory and give the appropriate permissions
RUN mkdir -p /app/.cache && chmod 777 /app/.cache

# Print Messages
# CMD ["bash"]
CMD ["python", "app.py"]