diff --git a/dev-python/robot-detection/Manifest b/dev-python/robot-detection/Manifest new file mode 100644 index 0000000..9c579d2 --- /dev/null +++ b/dev-python/robot-detection/Manifest @@ -0,0 +1 @@ +DIST robot-detection-0.4.0.tar.gz 81090 BLAKE2B d8422567119a633e1f97586895191a5f331e7800db596b204cc6e7a0b44efee1ffbc0e4941911a1410be329126997f59eebb63441e12196feaf5ac8000b1434b SHA512 10b1fbd2307c375e3cd067e43bee1bc0ff71f4b83ede9b92d2fb8d60edfd6d4a8d1a4d06d74e3c0653056f2e2b2cb81a7e997047093e925bd79bf4626d33d85f diff --git a/dev-python/robot-detection/metadata.xml b/dev-python/robot-detection/metadata.xml new file mode 100644 index 0000000..ff64bfe --- /dev/null +++ b/dev-python/robot-detection/metadata.xml @@ -0,0 +1,10 @@ + + + + + + + robot-detection + rory/robot-detection + + diff --git a/dev-python/robot-detection/robot-detection-0.4.0-r1.ebuild b/dev-python/robot-detection/robot-detection-0.4.0-r1.ebuild new file mode 100644 index 0000000..8e00270 --- /dev/null +++ b/dev-python/robot-detection/robot-detection-0.4.0-r1.ebuild @@ -0,0 +1,19 @@ +# Copyright 1999-2021 Gentoo Authors +# Distributed under the terms of the GNU General Public License v2 + +EAPI=8 + +PYTHON_COMPAT=( python3_{9..11} ) +inherit distutils-r1 + +DESCRIPTION="Module to detect if a given HTTP User Agent is a web crawler" +HOMEPAGE="https://github.com/rory/robot-detection" +SRC_URI="https://github.com/rory/robot-detection/archive/v${PV}.tar.gz -> ${P}.tar.gz" + +LICENSE="GPL-3" +SLOT="0" +KEYWORDS="~amd64" + +RDEPEND="dev-python/six[${PYTHON_USEDEP}]" + +distutils_enable_tests unittest