# -*- coding: utf-8; mode: tcl; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- vim:fenc=utf-8:ft=tcl:et:sw=4:ts=4:sts=4 PortSystem 1.0 PortGroup github 1.0 PortGroup python 1.0 github.setup tensorflow mesh 441ff477dcff8fc577eb59019007e55a278c87d6 # no tagged release; version from setup.py plus github commit date version 0.1.18.20210305 revision 0 name py-${github.project}-${github.author} platforms {darwin any} supported_archs noarch license Apache-2 maintainers nomaintainer description Mesh TensorFlow - Model Parallelism Made Easier long_description Mesh TensorFlow (mtf) is a language for distributed \ deep learning, capable of specifying a broad class \ of distributed tensor computations. The purpose of \ Mesh TensorFlow is to formalize and implement \ distribution strategies for your computation graph \ over your hardware/processors. For example: "Split \ the batch over rows of processors and split the \ units in the hidden layer across columns of \ processors." Mesh TensorFlow is implemented as a \ layer over TensorFlow. distname ${github.project}-${github.author}-${version} checksums rmd160 47fc56ab18d7cf0d397dc7a194a94663cae81851 \ sha256 57bbc4caa0958e879ed24a2dac679f257e28355a1f11f0f370708bb9b982c1d4 \ size 361646 python.versions 38 39 if {${name} ne ${subport}} { depends_build-append \ port:py${python.version}-pytest-runner depends_run-append \ port:py${python.version}-absl \ port:py${python.version}-future \ port:py${python.version}-gin-config \ port:py${python.version}-six post-destroot { set docdir ${prefix}/share/doc/${subport} xinstall -d ${destroot}${docdir} xinstall -m 0644 -W ${worksrcpath} LICENSE README.md \ ${destroot}${docdir} } livecheck.type none }