diff options
author | Frederic Culot <culot@FreeBSD.org> | 2011-04-14 21:18:39 +0800 |
---|---|---|
committer | Frederic Culot <culot@FreeBSD.org> | 2011-04-14 21:18:39 +0800 |
commit | 3395f956808458a4a02bb6e6a72b6663c5c74773 (patch) | |
tree | 20c10dd8a1fb49d9f0f14d8ff68b50a74deab4f5 | |
parent | 4a17519108015444fc7064f519c2478793e47421 (diff) | |
download | freebsd-ports-gnome-3395f956808458a4a02bb6e6a72b6663c5c74773.tar.gz freebsd-ports-gnome-3395f956808458a4a02bb6e6a72b6663c5c74773.tar.zst freebsd-ports-gnome-3395f956808458a4a02bb6e6a72b6663c5c74773.zip |
WWW::RobotRules parses /robots.txt files which are used to forbid conforming
robots from accessing parts of a web site. The parsed files are kept in a
WWW::RobotRules object, and this object provides methods to check if access
to a given URL is prohibited.
WWW: http://search.cpan.org/dist/WWW-RobotRules/
This new port is needed to update www/p5-libwww.
-rw-r--r-- | www/Makefile | 1 | ||||
-rw-r--r-- | www/p5-WWW-RobotRules/Makefile | 26 | ||||
-rw-r--r-- | www/p5-WWW-RobotRules/distinfo | 2 | ||||
-rw-r--r-- | www/p5-WWW-RobotRules/pkg-descr | 6 | ||||
-rw-r--r-- | www/p5-WWW-RobotRules/pkg-plist | 7 |
5 files changed, 42 insertions, 0 deletions
diff --git a/www/Makefile b/www/Makefile index 364ac3bf1cda..338ba128bfdc 100644 --- a/www/Makefile +++ b/www/Makefile @@ -1286,6 +1286,7 @@ SUBDIR += p5-WWW-Pastebin-PastebinCom-Create SUBDIR += p5-WWW-Plurk SUBDIR += p5-WWW-Robot + SUBDIR += p5-WWW-RobotRules SUBDIR += p5-WWW-RobotRules-Parser SUBDIR += p5-WWW-Scraper-ISBN SUBDIR += p5-WWW-Scraper-ISBN-Amazon_Driver diff --git a/www/p5-WWW-RobotRules/Makefile b/www/p5-WWW-RobotRules/Makefile new file mode 100644 index 000000000000..e890fec8fded --- /dev/null +++ b/www/p5-WWW-RobotRules/Makefile @@ -0,0 +1,26 @@ +# New ports collection makefile for: p5-WWW-RobotRules +# Date created: 2011-04-14 +# Whom: Frederic Culot <culot@FreeBSD.org> +# +# $FreeBSD$ +# + +PORTNAME= WWW-RobotRules +PORTVERSION= 6.01 +CATEGORIES= www perl5 +MASTER_SITES= CPAN +PKGNAMEPREFIX= p5- + +MAINTAINER= perl@FreeBSD.org +COMMENT= Database of robots.txt-derived permissions + +RUN_DEPENDS= p5-URI>=1.10:${PORTSDIR}/net/p5-URI + +BUILD_DEPENDS:= ${RUN_DEPENDS} + +PERL_CONFIGURE= yes + +MAN3= WWW::RobotRules.3 \ + WWW::RobotRules::AnyDBM_File.3 + +.include <bsd.port.mk> diff --git a/www/p5-WWW-RobotRules/distinfo b/www/p5-WWW-RobotRules/distinfo new file mode 100644 index 000000000000..4f623e0166aa --- /dev/null +++ b/www/p5-WWW-RobotRules/distinfo @@ -0,0 +1,2 @@ +SHA256 (WWW-RobotRules-6.01.tar.gz) = f817e3e982c9d869c7796bcb5737c3422c2272355424acd162d0f3b132bec9d3 +SIZE (WWW-RobotRules-6.01.tar.gz) = 9047 diff --git a/www/p5-WWW-RobotRules/pkg-descr b/www/p5-WWW-RobotRules/pkg-descr new file mode 100644 index 000000000000..fe8711ec7e7a --- /dev/null +++ b/www/p5-WWW-RobotRules/pkg-descr @@ -0,0 +1,6 @@ +This module parses /robots.txt files which are used to forbid conforming +robots from accessing parts of a web site. The parsed files are kept in +a WWW::RobotRules object, and this object provides methods to check if +access to a given URL is prohibited. + +WWW: http://search.cpan.org/dist/WWW-RobotRules/ diff --git a/www/p5-WWW-RobotRules/pkg-plist b/www/p5-WWW-RobotRules/pkg-plist new file mode 100644 index 000000000000..88fda90849c0 --- /dev/null +++ b/www/p5-WWW-RobotRules/pkg-plist @@ -0,0 +1,7 @@ +%%SITE_PERL%%/WWW/RobotRules.pm +%%SITE_PERL%%/WWW/RobotRules/AnyDBM_File.pm +%%SITE_PERL%%/%%PERL_ARCH%%/auto/WWW/RobotRules/.packlist +@dirrmtry %%SITE_PERL%%/%%PERL_ARCH%%/auto/WWW/RobotRules +@dirrmtry %%SITE_PERL%%/%%PERL_ARCH%%/auto/WWW +@dirrmtry %%SITE_PERL%%/WWW/RobotRules +@dirrmtry %%SITE_PERL%%/WWW |