Skip to content

Commit

Permalink
relax dependencies as they take too long to load on collab
Browse files Browse the repository at this point in the history
Signed-off-by: Maroun Touma <[email protected]>
  • Loading branch information
touma-I committed Dec 17, 2024
1 parent 3a99f2b commit d8835f2
Show file tree
Hide file tree
Showing 10 changed files with 70 additions and 13 deletions.
2 changes: 1 addition & 1 deletion transforms/code/code_profiler/python/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ packaging==24.0
pandas==2.2.2
parso==0.8.4
pexpect==4.9.0
pillow==10.3.0
pillow>=10.3.0
platformdirs==4.2.2
prompt_toolkit==3.0.45
protobuf==5.27.2
Expand Down
2 changes: 1 addition & 1 deletion transforms/code/code_quality/python/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
data-prep-toolkit>=0.2.3
bs4==0.0.2
transformers==4.38.2
transformers>=4.38.2
2 changes: 1 addition & 1 deletion transforms/language/lang_id/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
fasttext==0.9.2
langcodes==3.3.0
langcodes>=3.3.0
huggingface-hub >= 0.21.4, <1.0.0
numpy==1.26.4
2 changes: 1 addition & 1 deletion transforms/language/text_encoder/requirements.txt
Original file line number Diff line number Diff line change
@@ -1 +1 @@
sentence-transformers==3.0.1
sentence-transformers>=3.0.1
6 changes: 4 additions & 2 deletions transforms/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ language = { file = [

"universal/hap/python/requirements.txt",
"universal/tokenization/python/requirements.txt",
"universal/ededup/python/requirements.txt",
"universal/ededup/requirements.txt",
"universal/fdedup/python/requirements.txt",

"language/doc_quality/requirements.txt",
Expand Down Expand Up @@ -93,7 +93,6 @@ code_profiler = { file = ["code/code_profiler/python/requirements.txt"]}

pii_redactor = { file = ["language/pii_redactor/python/requirements.txt"]}

ededup = { file = ["universal/ededup/python/requirements.txt"]}
fdedup = { file = ["universal/fdedup/python/requirements.txt"]}
profiler = { file = ["universal/profiler/python/requirements.txt"]}
filter = { file = ["universal/filter/python/requirements.txt"]}
Expand All @@ -110,6 +109,8 @@ text_encoder = { file = ["language/text_encoder/requirements.txt"]}

doc_id = { file = ["universal/doc_id/requirements.txt"]}
hap = { file = ["universal/hap/requirements.txt"]}
ededup = { file = ["universal/ededup/requirements.txt"]}

web2parquet = { file = ["universal/web2parquet/requirements.txt"]}

# Does not seem to work for our custom layout
Expand All @@ -128,6 +129,7 @@ dpk_pdf2parquet = "language/pdf2parquet/dpk_pdf2parquet"
dpk_text_encoder = "language/text_encoder/dpk_text_encoder"
dpk_doc_id = "universal/doc_id/dpk_doc_id"
dpk_hap = "universal/hap/dpk_hap"
dpk_ededup = "universal/ededup/dpk_ededup"

#[tool.setuptools.package-data]
#"*" = ["*.txt"]
Expand Down
60 changes: 58 additions & 2 deletions transforms/transforms-1.0-lang.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -161,6 +161,62 @@
"#table.to_pandas()"
]
},
{
"cell_type": "code",
"execution_count": 10,
"id": "38480cd5",
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"10:56:59 INFO - exact dedup params are {'doc_column': 'contents', 'doc_id_column': 'document_id', 'use_snapshot': False, 'snapshot_directory': None}\n",
"10:56:59 INFO - pipeline id pipeline_id\n",
"10:56:59 INFO - code location None\n",
"10:56:59 INFO - data factory data_ is using local data access: input_folder - doc-chunk-files output_folder - dedup-files\n",
"10:56:59 INFO - data factory data_ max_files -1, n_sample -1\n",
"10:56:59 INFO - data factory data_ Not using data sets, checkpointing False, max files -1, random samples -1, files to use ['.parquet'], files to checkpoint ['.parquet']\n",
"10:56:59 INFO - orchestrator ededup started at 2024-12-14 10:56:59\n",
"10:56:59 INFO - Number of files is 1, source profile {'max_file_size': 0.03043651580810547, 'min_file_size': 0.03043651580810547, 'total_file_size': 0.03043651580810547}\n",
"10:56:59 INFO - Starting from the beginning\n",
"10:56:59 INFO - Completed 1 files (100.0%) in 0.0 min\n",
"10:56:59 INFO - Done processing 1 files, waiting for flush() completion.\n",
"10:56:59 INFO - done flushing in 0.0 sec\n",
"10:56:59 INFO - Completed execution in 0.0 min, execution result 0\n"
]
},
{
"data": {
"text/plain": [
"0"
]
},
"execution_count": 10,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"from dpk_ededup.transform_python import Ededup\n",
"Ededup(input_folder=\"doc-chunk-files\",\n",
" output_folder=\"dedup-files\",\n",
" ededup_doc_column=\"contents\",\n",
" ededup_doc_id_column=\"document_id\").transform()"
]
},
{
"cell_type": "code",
"execution_count": 11,
"id": "27e36a8e",
"metadata": {},
"outputs": [],
"source": [
"##### **** To explote the output from eDedup, run the code below\n",
"#table = pq.read_table('dedup-files/arxiv_org_2408.09869v5.pdf_application.parquet')\n",
"#table.to_pandas()"
]
},
{
"cell_type": "code",
"execution_count": 12,
Expand Down Expand Up @@ -199,7 +255,7 @@
],
"source": [
"from dpk_lang_id.transform_python import LangId\n",
"LangId(input_folder= \"doc-chunk-files\",\n",
"LangId(input_folder= \"dedup-files\",\n",
" output_folder= \"langId-files\",\n",
" lang_id_model_credential= \"PUT YOUR OWN HUGGINGFACE CREDENTIAL\",\n",
" lang_id_model_kind= \"fasttext\",\n",
Expand Down Expand Up @@ -246,7 +302,7 @@
"source": [
"%%capture\n",
"from dpk_doc_quality.transform_python import DocQuality\n",
"DocQuality(input_folder='doc-chunk-files',\n",
"DocQuality(input_folder='dedup-files',\n",
" output_folder= 'doc-quality-files',\n",
" docq_text_lang = \"en\",\n",
" docq_doc_content_column =\"contents\").transform()"
Expand Down
1 change: 0 additions & 1 deletion transforms/universal/ededup/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,2 @@
data-prep-toolkit>=0.2.3
mmh3>=4.1.0
xxhash==3.4.1
2 changes: 1 addition & 1 deletion transforms/universal/fdedup/python/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ boto3>=1.34.69
kubernetes>=30.1.0
polars==1.9.0
disjoint-set>=0.8.0
scipy>=1.14.1, <2.0.0
scipy>=1.12.1, <2.0.0
numpy<1.29.0
sentencepiece>=0.2.0
mmh3>=4.1.0
4 changes: 2 additions & 2 deletions transforms/universal/hap/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
nltk==3.9.1
transformers==4.38.2
torch>=2.2.2,<=2.4.1
transformers>=4.38.2
torch>=2.2.2,<=2.5.1
pandas==2.2.2
2 changes: 1 addition & 1 deletion transforms/universal/tokenization/python/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
data-prep-toolkit>=0.2.3
transformers==4.38.2
transformers>=4.38.2

0 comments on commit d8835f2

Please sign in to comment.