forked from huawei-noah/Efficient-NLP
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathsetup.py
54 lines (49 loc) · 1.92 KB
/
setup.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
import shutil
from pathlib import Path
from setuptools import find_packages, setup
# Remove stale transformers.egg-info directory to avoid https://github.com/pypa/pip/issues/5466
stale_egg_info = Path(__file__).parent / "transformers.egg-info"
if stale_egg_info.exists():
print(
(
"Warning: {} exists.\n\n"
"If you recently updated transformers to 3.0 or later, this is expected,\n"
"but it may prevent transformers from installing in editable mode.\n\n"
"This directory is automatically generated by Python's packaging tools.\n"
"I will remove it now.\n\n"
"See https://github.com/pypa/pip/issues/5466 for details.\n"
).format(stale_egg_info)
)
shutil.rmtree(stale_egg_info)
setup(
name="knnkd",
version="0.6.4",
author="Noah's Ark Lab Researchers at Montreal",
description="Experiments with Knowledge Distillation on NLP tasks",
long_description=open("README.md", "r", encoding="utf-8").read(),
long_description_content_type="text/markdown",
keywords="NLP deep learning transformer pytorch BERT RoBERTa knowledge distillation",
package_dir={"": "src"},
packages=find_packages("src"),
install_requires=[
"numpy",
"scikit-learn",
"transformers ~= 3.5.0",
"datasets == 1.1.3",
"pytorch-lightning == 1.1.2",
# dataclasses for Python versions that don't have it
"dataclasses;python_version<'3.7'",
"torch-scatter",
"tqdm >= 4.27",
],
python_requires=">=3.6.0",
classifiers=[
"Intended Audience :: Science/Research",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Topic :: Scientific/Engineering :: Knowledge Distillation",
],
)