mirror of
https://git.FreeBSD.org/ports.git
synced 2025-01-03 06:04:53 +00:00
WWW::RobotRules parses /robots.txt files which are used to forbid conforming
robots from accessing parts of a web site. The parsed files are kept in a WWW::RobotRules object, and this object provides methods to check if access to a given URL is prohibited. WWW: http://search.cpan.org/dist/WWW-RobotRules/ This new port is needed to update www/p5-libwww.
This commit is contained in:
parent
4a17519108
commit
3395f95680
Notes:
svn2git
2021-03-31 03:12:20 +00:00
svn path=/head/; revision=272722
@ -1286,6 +1286,7 @@
|
||||
SUBDIR += p5-WWW-Pastebin-PastebinCom-Create
|
||||
SUBDIR += p5-WWW-Plurk
|
||||
SUBDIR += p5-WWW-Robot
|
||||
SUBDIR += p5-WWW-RobotRules
|
||||
SUBDIR += p5-WWW-RobotRules-Parser
|
||||
SUBDIR += p5-WWW-Scraper-ISBN
|
||||
SUBDIR += p5-WWW-Scraper-ISBN-Amazon_Driver
|
||||
|
26
www/p5-WWW-RobotRules/Makefile
Normal file
26
www/p5-WWW-RobotRules/Makefile
Normal file
@ -0,0 +1,26 @@
|
||||
# New ports collection makefile for: p5-WWW-RobotRules
|
||||
# Date created: 2011-04-14
|
||||
# Whom: Frederic Culot <culot@FreeBSD.org>
|
||||
#
|
||||
# $FreeBSD$
|
||||
#
|
||||
|
||||
PORTNAME= WWW-RobotRules
|
||||
PORTVERSION= 6.01
|
||||
CATEGORIES= www perl5
|
||||
MASTER_SITES= CPAN
|
||||
PKGNAMEPREFIX= p5-
|
||||
|
||||
MAINTAINER= perl@FreeBSD.org
|
||||
COMMENT= Database of robots.txt-derived permissions
|
||||
|
||||
RUN_DEPENDS= p5-URI>=1.10:${PORTSDIR}/net/p5-URI
|
||||
|
||||
BUILD_DEPENDS:= ${RUN_DEPENDS}
|
||||
|
||||
PERL_CONFIGURE= yes
|
||||
|
||||
MAN3= WWW::RobotRules.3 \
|
||||
WWW::RobotRules::AnyDBM_File.3
|
||||
|
||||
.include <bsd.port.mk>
|
2
www/p5-WWW-RobotRules/distinfo
Normal file
2
www/p5-WWW-RobotRules/distinfo
Normal file
@ -0,0 +1,2 @@
|
||||
SHA256 (WWW-RobotRules-6.01.tar.gz) = f817e3e982c9d869c7796bcb5737c3422c2272355424acd162d0f3b132bec9d3
|
||||
SIZE (WWW-RobotRules-6.01.tar.gz) = 9047
|
6
www/p5-WWW-RobotRules/pkg-descr
Normal file
6
www/p5-WWW-RobotRules/pkg-descr
Normal file
@ -0,0 +1,6 @@
|
||||
This module parses /robots.txt files which are used to forbid conforming
|
||||
robots from accessing parts of a web site. The parsed files are kept in
|
||||
a WWW::RobotRules object, and this object provides methods to check if
|
||||
access to a given URL is prohibited.
|
||||
|
||||
WWW: http://search.cpan.org/dist/WWW-RobotRules/
|
7
www/p5-WWW-RobotRules/pkg-plist
Normal file
7
www/p5-WWW-RobotRules/pkg-plist
Normal file
@ -0,0 +1,7 @@
|
||||
%%SITE_PERL%%/WWW/RobotRules.pm
|
||||
%%SITE_PERL%%/WWW/RobotRules/AnyDBM_File.pm
|
||||
%%SITE_PERL%%/%%PERL_ARCH%%/auto/WWW/RobotRules/.packlist
|
||||
@dirrmtry %%SITE_PERL%%/%%PERL_ARCH%%/auto/WWW/RobotRules
|
||||
@dirrmtry %%SITE_PERL%%/%%PERL_ARCH%%/auto/WWW
|
||||
@dirrmtry %%SITE_PERL%%/WWW/RobotRules
|
||||
@dirrmtry %%SITE_PERL%%/WWW
|
Loading…
Reference in New Issue
Block a user