Source Code Cross Referenced for RobotsTXTRule.java in  » Web-Crawler » jspider » net » javacoding » jspider » core » rule » impl » Java Source Code / Java DocumentationJava Source Code and Java Documentation

Java Source Code / Java Documentation
1. 6.0 JDK Core
2. 6.0 JDK Modules
3. 6.0 JDK Modules com.sun
4. 6.0 JDK Modules com.sun.java
5. 6.0 JDK Modules sun
6. 6.0 JDK Platform
7. Ajax
8. Apache Harmony Java SE
9. Aspect oriented
10. Authentication Authorization
11. Blogger System
12. Build
13. Byte Code
14. Cache
15. Chart
16. Chat
17. Code Analyzer
18. Collaboration
19. Content Management System
20. Database Client
21. Database DBMS
22. Database JDBC Connection Pool
23. Database ORM
24. Development
25. EJB Server geronimo
26. EJB Server GlassFish
27. EJB Server JBoss 4.2.1
28. EJB Server resin 3.1.5
29. ERP CRM Financial
30. ESB
31. Forum
32. GIS
33. Graphic Library
34. Groupware
35. HTML Parser
36. IDE
37. IDE Eclipse
38. IDE Netbeans
39. Installer
40. Internationalization Localization
41. Inversion of Control
42. Issue Tracking
43. J2EE
44. JBoss
45. JMS
46. JMX
47. Library
48. Mail Clients
49. Net
50. Parser
51. PDF
52. Portal
53. Profiler
54. Project Management
55. Report
56. RSS RDF
57. Rule Engine
58. Science
59. Scripting
60. Search Engine
61. Security
62. Sevlet Container
63. Source Control
64. Swing Library
65. Template Engine
66. Test Coverage
67. Testing
68. UML
69. Web Crawler
70. Web Framework
71. Web Mail
72. Web Server
73. Web Services
74. Web Services apache cxf 2.0.1
75. Web Services AXIS2
76. Wiki Engine
77. Workflow Engines
78. XML
79. XML UI
Java
Java Tutorial
Java Open Source
Jar File Download
Java Articles
Java Products
Java by API
Photoshop Tutorials
Maya Tutorials
Flash Tutorials
3ds-Max Tutorials
Illustrator Tutorials
GIMP Tutorials
C# / C Sharp
C# / CSharp Tutorial
C# / CSharp Open Source
ASP.Net
ASP.NET Tutorial
JavaScript DHTML
JavaScript Tutorial
JavaScript Reference
HTML / CSS
HTML CSS Reference
C / ANSI-C
C Tutorial
C++
C++ Tutorial
Ruby
PHP
Python
Python Tutorial
Python Open Source
SQL Server / T-SQL
SQL Server / T-SQL Tutorial
Oracle PL / SQL
Oracle PL/SQL Tutorial
PostgreSQL
SQL / MySQL
MySQL Tutorial
VB.Net
VB.Net Tutorial
Flash / Flex / ActionScript
VBA / Excel / Access / Word
XML
XML Tutorial
Microsoft Office PowerPoint 2007 Tutorial
Microsoft Office Excel 2007 Tutorial
Microsoft Office Word 2007 Tutorial
Java Source Code / Java Documentation » Web Crawler » jspider » net.javacoding.jspider.core.rule.impl 
Source Cross Referenced  Class Diagram Java Document (Java Doc) 


01:        package net.javacoding.jspider.core.rule.impl;
02:
03:        import net.javacoding.jspider.api.model.Decision;
04:        import net.javacoding.jspider.api.model.Site;
05:        import net.javacoding.jspider.core.SpiderContext;
06:        import net.javacoding.jspider.core.model.DecisionInternal;
07:        import net.javacoding.jspider.core.model.SiteInternal;
08:        import net.javacoding.jspider.core.util.URLUtil;
09:        import net.javacoding.jspider.core.util.html.RobotsTXTLine;
10:        import net.javacoding.jspider.core.util.html.RobotsTXTLineSet;
11:
12:        import java.io.IOException;
13:        import java.io.InputStream;
14:        import java.net.URL;
15:
16:        /**
17:         * Rule implementation that applies the rules expressed by a site's robots.txt
18:         * file to the resources we want to fetch on that site.
19:         * This file allows webmasters to exclude certain resources and folders not to
20:         * be spidered by web robots, to disallow inclusion in search engines, etc ...
21:         *
22:         * $Id: RobotsTXTRule.java,v 1.13 2003/03/28 17:26:28 vanrogu Exp $
23:         *
24:         * @author Günther Van Roey
25:         */
26:        public class RobotsTXTRule extends BaseRuleImpl {
27:
28:            /** user agent under which we're operating. */
29:            protected String effectiveUserAgent;
30:
31:            /** user agent in the robots.txt file we're obeying */
32:            protected String obeyedUserAgent;
33:
34:            /** all lines in the robots.txt file that apply to us and forbid access to a part of the site. */
35:            protected RobotsTXTLine[] forbiddenPaths;
36:
37:            /**
38:             * Public constructor.
39:             * @param userAgent the userAgent under which we're operating
40:             * @param is the inputstream to read the robots.txt file from
41:             * @throws IOException in case something goes wrong reading the robots.txt
42:             */
43:            public RobotsTXTRule(String userAgent, InputStream is)
44:                    throws IOException {
45:                RobotsTXTLineSet lineSet = RobotsTXTLineSet.findLineSet(is,
46:                        userAgent);
47:                this .effectiveUserAgent = userAgent;
48:                if (lineSet == null) {
49:                    this .obeyedUserAgent = null;
50:                    forbiddenPaths = new RobotsTXTLine[0];
51:                } else {
52:                    this .obeyedUserAgent = lineSet.getUserAgent();
53:                    forbiddenPaths = lineSet.getLines();
54:                }
55:            }
56:
57:            /**
58:             * Returns the user agent from robots.txt we're obeying. (can be '*').
59:             * This user agent identification is the first match we encountered in the file,
60:             * a match is given if our effective user agent contains the user agent
61:             * identification as a substring in a case-insensitive way.
62:             * @return the useragent selector we're obeying.
63:             */
64:            public String getObeyedUserAgent() {
65:                return obeyedUserAgent;
66:            }
67:
68:            /**
69:             * Applies the rule to a given URL
70:             * @param context the spider context we're working in
71:             * @param currentSite the site we're spidering
72:             * @param url the url of the resource to be tested for spider permission
73:             * @return Decision object expressing this rule's decision on the resource
74:             */
75:            public Decision apply(SpiderContext context, Site currentSite,
76:                    URL url) {
77:                String path = url.getPath();
78:                Decision decision = new DecisionInternal();
79:
80:                if ((context.getStorage().getSiteDAO().find(URLUtil
81:                        .getSiteURL(url))).getObeyRobotsTXT()) {
82:
83:                    for (int i = 0; i < forbiddenPaths.length; i++) {
84:                        RobotsTXTLine forbiddenPath = forbiddenPaths[i];
85:                        if (forbiddenPath.matches(url)) {
86:                            decision = new DecisionInternal(
87:                                    Decision.RULE_FORBIDDEN, "access to '"
88:                                            + path + "' forbidden");
89:                            break;
90:                        }
91:                    }
92:                }
93:                return decision;
94:            }
95:
96:        }
www.java2java.com | Contact Us
Copyright 2009 - 12 Demo Source and Support. All rights reserved.
All other trademarks are property of their respective owners.