Compare commits

...

5 Commits

2 changed files with 11 additions and 3 deletions

View File

@@ -73,7 +73,8 @@ COPY --from=packages ${VIRTUAL_ENV} ${VIRTUAL_ENV}
ENV PATH="${VIRTUAL_ENV}/bin:${PATH}"
# Download nltk data
RUN python -c "import nltk; nltk.download('punkt'); nltk.download('averaged_perceptron_tagger')"
RUN mkdir -p /usr/local/share/nltk_data && NLTK_DATA=/usr/local/share/nltk_data python -c "import nltk; nltk.download('punkt'); nltk.download('averaged_perceptron_tagger'); nltk.download('stopwords')" \
&& chmod -R 755 /usr/local/share/nltk_data
ENV TIKTOKEN_CACHE_DIR=/app/api/.tiktoken_cache
@@ -86,7 +87,15 @@ COPY . /app/api/
COPY docker/entrypoint.sh /entrypoint.sh
RUN chmod +x /entrypoint.sh
# Create non-root user and set permissions
RUN groupadd -r -g 1001 dify && \
useradd -r -u 1001 -g 1001 -s /bin/bash dify && \
mkdir -p /home/dify && \
chown -R 1001:1001 /app /home/dify ${TIKTOKEN_CACHE_DIR} /entrypoint.sh
ARG COMMIT_SHA
ENV COMMIT_SHA=${COMMIT_SHA}
ENV NLTK_DATA=/usr/local/share/nltk_data
USER 1001
ENTRYPOINT ["/bin/bash", "/entrypoint.sh"]

View File

@@ -302,8 +302,7 @@ class OracleVector(BaseVector):
nltk.data.find("tokenizers/punkt")
nltk.data.find("corpora/stopwords")
except LookupError:
nltk.download("punkt")
nltk.download("stopwords")
raise LookupError("Unable to find the required NLTK data package: punkt and stopwords")
e_str = re.sub(r"[^\w ]", "", query)
all_tokens = nltk.word_tokenize(e_str)
stop_words = stopwords.words("english")