-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathrequirements.txt
More file actions
34 lines (30 loc) · 867 Bytes
/
requirements.txt
File metadata and controls
34 lines (30 loc) · 867 Bytes
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
# general
jupyter==1.1.1
pandas==2.2.3
geopandas==1.0.1
beautifulsoup4==4.12.3
SQLAlchemy==2.0.36
pydantic==2.9.2
PyMuPDF==1.25.1
scipy==1.14.1
# ML
autogluon==3.12
scikit-learn==1.5.2
torch==2.5.1
xgboost==2.1.3
# llm libs
huggingface-hub==0.27.0
lm-format-enforcer==0.10.9
# llama-cpp-python, kann nicht lokal installiert werden über pypi, deswegen folgendes bei einer nvidia gpu:
# git clone --recursive -j8 https://github.com/abetlen/llama-cpp-python.git
# CODA_HOME=""C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA\v12.6" CMAKE_ARGS="-DGGML_CUDA=on" CUDACXX="C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA\v12.6\bin\nvcc.exe" FORCE_CMAKE=1 pip install -e . --force-reinstall --no-cache-dir --verbose
# visualizations
matplotlib==3.10.0
seaborn==0.13.2
# experiments
# pypdf
# pymupdf4llm
# marker-pdf
# kor
# langchain
# langchain-community