-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathsetup.py
More file actions
27 lines (24 loc) · 798 Bytes
/
setup.py
File metadata and controls
27 lines (24 loc) · 798 Bytes
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
from setuptools import setup, find_packages
setup(
name='leap-transformer',
version='1.0.1',
license='CC0 1.0 Universal',
author='Michael Hu',
author_email='prmhu@yahoo.com',
url='https://github.com/mtanghu/Additive-Attention-Is-Not-All-You-Need-Maybe',
description=(
'Linear Explainable Attention in Parallel (LEAP) for causal language modeling (also implements fastformer)'
),
packages=find_packages('src'),
package_dir={'': 'src'},
long_description=open("README.md", "r", encoding="utf-8").read(),
long_description_content_type='text/markdown',
keywords='linear transformer NLP deep learning pytorch',
install_requires=[
'transformers',
'datasets',
'pandas',
'numpy',
'matplotlib'
]
)