File size: 2,646 Bytes
4849aaf
 
 
 
 
 
ce80d54
 
 
 
 
 
 
 
4849aaf
 
 
 
 
 
 
 
 
9dd9c4f
342cbc0
60a9d9f
 
 
117c51f
4849aaf
66a2a67
f2bbf6e
60a9d9f
ce80d54
 
4849aaf
60a9d9f
 
c1841b0
 
 
60a9d9f
 
c1841b0
60a9d9f
ce80d54
4f4fe16
e1088b1
 
 
 
66a2a67
762a1f5
 
c764a12
837a28c
a0df3c8
837a28c
 
 
646f120
 
ce80d54
60a9d9f
 
 
ce80d54
4849aaf
 
60a9d9f
 
 
ce80d54
 
4849aaf
4f8d8d8
 
4849aaf
 
 
 
60a9d9f
117c51f
60a9d9f
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
FROM python:3.9-slim

# Set environment variables to force Hugging Face to use a fresh cache directory
ENV TRANSFORMERS_CACHE=/tmp/transformers_cache
ENV HF_HOME=/tmp/huggingface_cache

# Install dependencies
RUN apt-get update && apt-get install -y git wget

# Create and use a non-root user
RUN useradd -m -u 1000 user

# Create the directory for the repository and give the non-root user access to it
RUN mkdir /Responsible-Ai-Moderation-Models && chown -R user:user /Responsible-Ai-Moderation-Models

USER user

# Update PATH to include the user's local bin directory
ENV PATH="/home/user/.local/bin:$PATH"

# Install Hugging Face dependencies and other packages
RUN pip install --user huggingface_hub transformers git+https://github.com/huggingface/transformers.git

RUN pwd

# Clone the repository
RUN git clone https://huggingface.co/InfosysResponsibleAiToolKit/Responsible-Ai-Moderation-Models /Responsible-Ai-Moderation-Models 

RUN git clone https://huggingface.co/spaces/InfosysResponsibleAiToolKit/torch /Responsible-Ai-Moderation-Models/torch

RUN mv /Responsible-Ai-Moderation-Models/torch/torch-2.2.0+cu118-cp39-cp39-linux_x86_64.whl /Responsible-Ai-Moderation-Models/

COPY --chown=user . .

RUN ls -la

COPY --chown=user . /Responsible-Ai-Moderation-Models

WORKDIR /Responsible-Ai-Moderation-Models

RUN ls -la
 
RUN pip install --no-cache-dir /Responsible-Ai-Moderation-Models/privacy-2.0.8-py3-none-any.whl

RUN pip install --no-cache-dir /Responsible-Ai-Moderation-Models/en_core_web_lg-3.5.0-py3-none-any.whl

RUN pip install torch==2.2.0

RUN pip install transformers==4.41.0 

RUN pip install diffusers==0.27.2

RUN echo "git install====" > /dev/null

# RUN pip install torch==2.2.0+cu118 torchaudio==2.2.0+cu118 --extra-index-url https://download.pytorch.org/whl/cu118

# RUN pip install torchvision --extra-index-url https://download.pytorch.org/whl/cu118


# RUN pip install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/cu118

# RUN pip install --no-cache-dir /Responsible-Ai-Moderation-Models/torch-2.2.0+cu118-cp39-cp39-linux_x86_64.whl

# RUN pip install --user spacy

# RUN python -m spacy download en_core_web_lg

RUN pip install --upgrade pip

# Install privacy .whl file and other dependencies
# RUN pip install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/cu118

# Install additional requirements from requirements.txt
RUN pip install --user --no-cache-dir -r requirements/requirement.txt

RUN pip list

WORKDIR /Responsible-Ai-Moderation-Models/src

# Expose the port used by Hugging Face Spaces (7860)
EXPOSE 7860
# command
CMD ["python", "main.py"]