Commit 976eb341 authored by Andrey's avatar Andrey
Browse files

New version of Transformers

parent ad1ebadb
easyblock = 'PythonPackage'
local_prefix = 'nlpl-'
local_original_name = 'huggingface-hub'
name = local_prefix + local_original_name
version = '0.11.0'
versionsuffix = '-Python-%(pyver)s'
homepage = 'https://github.com/huggingface/huggingface_hub'
description = """A client library to download and publish on the Hugging Face Hub as well as extracting useful information from there."""
toolchain = {'name': 'foss', 'version': '2021a'}
source_urls = ['https://pypi.python.org/packages/source/h/huggingface-hub/']
sources = ['huggingface_hub-%(version)s.tar.gz']
checksums = ['b48860d791502c2b8a0e6c841214df19c67999198a75d1417512b45752508ac6']
options = {'modulename': "huggingface_hub"}
dependencies = [
('Python', '3.9.5'),
('nlpl-python-candy', '2022.01', versionsuffix),
]
use_pip = True
sanity_pip_check = True
moduleclass = 'data'
easyblock = 'PythonPackage'
local_prefix = 'nlpl-'
local_original_name = 'transformers'
name = local_prefix + local_original_name
version = '4.24.0'
versionsuffix = '-Python-%(pyver)s'
homepage = 'https://github.com/huggingface/transformers'
description = """Transformers provides thousands of pretrained models to perform tasks on texts such as classification, information extraction, question answering, summarization, translation, text generation, etc
in 100+ languages.
Its aim is to make cutting-edge NLP easier to use for everyone.
Transformers provides APIs to quickly download and use those pretrained models on a given text, fine-tune them on your own datasets then share them with the community on our model hub.
At the same time, each python module defining an architecture can be used as a standalone and modified to enable quick research experiments.
Transformers is backed by the two most popular deep learning libraries, PyTorch and TensorFlow, with a seamless integration between them,
allowing you to train your models with one then load it for inference with the other."""
options = {'modulename': 'transformers'}
toolchain = {'name': 'foss', 'version': '2021a'}
source_urls = ['https://pypi.python.org/packages/source/t/transformers/']
sources = ['transformers-%s.tar.gz' % version]
checksums = ['486f353a8e594002e48be0e2aba723d96eda839e63bfe274702a4b5eda85559b']
local_tf_version = "2.6.5"
local_pytorch_version = "1.11.0"
local_cuda_version = "11.3.1"
builddependencies = [
('nlpl-pytorch', local_pytorch_version, '-cuda-%s-Python-%%(pyver)s' % local_cuda_version),
('nlpl-tensorflow', local_tf_version, '-cuda-%s-Python-%%(pyver)s' % local_cuda_version),
]
dependencies = [
('Python', '3.9.5'),
('nlpl-python-candy', '2022.01', versionsuffix),
('nlpl-tokenizers', '0.12.1', versionsuffix),
('nlpl-huggingface-hub', '0.11.0', versionsuffix),
('nlpl-pytorch', local_pytorch_version, '-cuda-%s-Python-%%(pyver)s' % local_cuda_version),
('nlpl-tensorflow', local_tf_version, '-cuda-%s-Python-%%(pyver)s' % local_cuda_version),
]
use_pip = True
sanity_pip_check = True
moduleclass = 'tools'
easyblock = 'PythonPackage'
local_prefix = 'nlpl-'
local_original_name = 'huggingface-hub'
name = local_prefix + local_original_name
version = '0.11.0'
versionsuffix = '-Python-%(pyver)s'
homepage = 'https://github.com/huggingface/huggingface_hub'
description = """A client library to download and publish on the Hugging Face Hub as well as extracting useful information from there."""
toolchain = {'name': 'gomkl', 'version': '2021a'}
source_urls = ['https://pypi.python.org/packages/source/h/huggingface-hub/']
sources = ['huggingface_hub-%(version)s.tar.gz']
checksums = ['b48860d791502c2b8a0e6c841214df19c67999198a75d1417512b45752508ac6']
options = {'modulename': "huggingface_hub"}
dependencies = [
('Python', '3.9.5'),
('nlpl-python-candy', '2022.01', versionsuffix),
]
use_pip = True
sanity_pip_check = True
moduleclass = 'data'
easyblock = 'PythonPackage'
local_prefix = 'nlpl-'
local_original_name = 'transformers'
name = local_prefix + local_original_name
version = '4.24.0'
versionsuffix = '-Python-%(pyver)s'
homepage = 'https://github.com/huggingface/transformers'
description = """Transformers provides thousands of pretrained models to perform tasks on texts such as classification, information extraction, question answering, summarization, translation, text generation, etc
in 100+ languages.
Its aim is to make cutting-edge NLP easier to use for everyone.
Transformers provides APIs to quickly download and use those pretrained models on a given text, fine-tune them on your own datasets then share them with the community on our model hub.
At the same time, each python module defining an architecture can be used as a standalone and modified to enable quick research experiments.
Transformers is backed by the two most popular deep learning libraries, PyTorch and TensorFlow, with a seamless integration between them,
allowing you to train your models with one then load it for inference with the other."""
options = {'modulename': 'transformers'}
toolchain = {'name': 'gomkl', 'version': '2021a'}
source_urls = ['https://pypi.python.org/packages/source/t/transformers/']
sources = ['transformers-%s.tar.gz' % version]
checksums = ['486f353a8e594002e48be0e2aba723d96eda839e63bfe274702a4b5eda85559b']
local_tf_version = "2.6.5"
local_pytorch_version = "1.11.0"
local_cuda_version = "11.3.1"
builddependencies = [
('nlpl-pytorch', local_pytorch_version, '-cuda-%s-Python-%%(pyver)s' % local_cuda_version),
('nlpl-tensorflow', local_tf_version, '-cuda-%s-Python-%%(pyver)s' % local_cuda_version),
]
dependencies = [
('Python', '3.9.5'),
('nlpl-python-candy', '2022.01', versionsuffix),
('nlpl-tokenizers', '0.12.1', versionsuffix),
('nlpl-huggingface-hub', '0.11.0', versionsuffix),
('nlpl-pytorch', local_pytorch_version, '-cuda-%s-Python-%%(pyver)s' % local_cuda_version),
('nlpl-tensorflow', local_tf_version, '-cuda-%s-Python-%%(pyver)s' % local_cuda_version),
]
use_pip = True
sanity_pip_check = True
moduleclass = 'tools'
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment