mirror of
https://git.freebsd.org/ports.git
synced 2025-04-28 09:36:41 -04:00
misc/py-schedulefree: New port: Schedule free learning in PyTorch
This commit is contained in:
parent
8866cf92ae
commit
d9e91d6828
4 changed files with 40 additions and 0 deletions
|
@ -513,6 +513,7 @@
|
|||
SUBDIR += py-pytorch-lightning
|
||||
SUBDIR += py-pyviz-comms
|
||||
SUBDIR += py-safetensors
|
||||
SUBDIR += py-schedulefree
|
||||
SUBDIR += py-scikit-fusion
|
||||
SUBDIR += py-scikit-quant
|
||||
SUBDIR += py-serverfiles
|
||||
|
|
25
misc/py-schedulefree/Makefile
Normal file
25
misc/py-schedulefree/Makefile
Normal file
|
@ -0,0 +1,25 @@
|
|||
PORTNAME= schedulefree
|
||||
DISTVERSION= 1.4.1
|
||||
CATEGORIES= misc python # machine-learning
|
||||
MASTER_SITES= PYPI
|
||||
PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX}
|
||||
|
||||
MAINTAINER= yuri@FreeBSD.org
|
||||
COMMENT= Schedule free learning in PyTorch
|
||||
WWW= https://github.com/facebookresearch/schedule_free
|
||||
|
||||
LICENSE= APACHE20
|
||||
LICENSE_FILE= ${WRKSRC}/LICENSE
|
||||
|
||||
BUILD_DEPENDS= ${PYTHON_PKGNAMEPREFIX}hatchling>0:devel/py-hatchling@${PY_FLAVOR}
|
||||
RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}pytorch>0:misc/py-pytorch@${PY_FLAVOR} \
|
||||
${PYTHON_PKGNAMEPREFIX}typing-extensions>0:devel/py-typing-extensions@${PY_FLAVOR}
|
||||
|
||||
USES= python
|
||||
USE_PYTHON= pep517 autoplist pytest
|
||||
|
||||
NO_ARCH= yes
|
||||
|
||||
# most tests fail: AssertionError: Torch not compiled with CUDA enabled, see https://github.com/facebookresearch/schedule_free/issues/65
|
||||
|
||||
.include <bsd.port.mk>
|
3
misc/py-schedulefree/distinfo
Normal file
3
misc/py-schedulefree/distinfo
Normal file
|
@ -0,0 +1,3 @@
|
|||
TIMESTAMP = 1742853003
|
||||
SHA256 (schedulefree-1.4.1.tar.gz) = 69ef25601d1fc0d8dd00cb36f9af78833f88b7846f1bb6ddecc9f144f3e9f7cb
|
||||
SIZE (schedulefree-1.4.1.tar.gz) = 29281
|
11
misc/py-schedulefree/pkg-descr
Normal file
11
misc/py-schedulefree/pkg-descr
Normal file
|
@ -0,0 +1,11 @@
|
|||
Schedulefree is a Schedule-Free optimizer in PyTorch.
|
||||
|
||||
We provide several Schedule-Free optimizer implementations:
|
||||
|
||||
* SGDScheduleFree and SGDScheduleFreeReference: Schedule-free variants of SGD
|
||||
* AdamWScheduleFree and AdamWScheduleFreeReference: Schedule-free variants
|
||||
of AdamW
|
||||
* RAdamScheduleFree: Schedule-free variant of RAdam, which eliminates the need
|
||||
for both learning rate scheduling and warmup (implementation community
|
||||
contributed)
|
||||
* Experimental ScheduleFreeWrapper to combine with other optimizers
|
Loading…
Add table
Reference in a new issue