Skip to content

Commit

Permalink
Javadoc fixes, now build without javadoc warnings!
Browse files Browse the repository at this point in the history
  • Loading branch information
tobli committed May 14, 2015
1 parent 6149785 commit f5f83f0
Show file tree
Hide file tree
Showing 8 changed files with 15 additions and 14 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ public class CrawlerResult {
*
* @param theStartPoint where the crawl was started
* @param theUrls the urls that was fetched
* @param theVerifiedResponses the verified responses
* @param theNonWorkingResponses the non working urls
*/
public CrawlerResult(String theStartPoint, Set<CrawlerURL> theUrls,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ public class HTMLPageResponse {
* @param theBody the body
* @param theEncoding the encoding
* @param theSize the size
* @param theResponseType the response mime type
* @param theFetchTime the time it took to fetch the response
*/
public HTMLPageResponse(CrawlerURL pageUrl, int theResponseCode, Map<String, String> theHeaders,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,8 @@ public class HTMLPageResponseCallable implements Callable<HTMLPageResponse> {
* @param theUrl the url to call.
* @param theFetcher the fetcher to use
* @param fetchTheBody if true, the response body is fetched, else not.
* @param theRequestHeaders request headers to add
* @param followRedirectsToNewDomain if true, follow redirects that lead to a different domain.
*/
public HTMLPageResponseCallable(CrawlerURL theUrl, HTMLPageResponseFetcher theFetcher,
boolean fetchTheBody, Map<String, String> theRequestHeaders, boolean followRedirectsToNewDomain) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,13 @@ public interface HTMLPageResponseFetcher {
*
* @param url the url to fetch
* @param fetchBody fetch the body or not
* @param requestHeaders request headers to add
* @param followRedirectsToNewDomain if true, follow redirects that lead to a different domain.
* @return the response
*/
HTMLPageResponse get(CrawlerURL url, boolean fetchBody, Map<String, String> requestHeaders, boolean followRedirectsToNewDomain);
HTMLPageResponse get(CrawlerURL url, boolean fetchBody,
Map<String, String> requestHeaders,
boolean followRedirectsToNewDomain);


/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,9 @@ public interface AssetsVerifier {
/**
* Verify that all the assets work (=return 200) for the working urls in the result.
*
* @param responses
* @return
* @param responses responses to verify
* @param configuration configuration to verify against
* @return result of the verification
*/
AssetsVerificationResult verify(Set<HTMLPageResponse> responses,
CrawlerConfiguration configuration);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -161,6 +161,7 @@ public CrawlerResult getUrls(CrawlerConfiguration configuration) {
* @param responses holding bodys where we should fetch the links.
* @param allUrls every url we have fetched so far
* @param nonWorkingUrls the urls that didn't work to fetch
* @param verifiedUrls responses that are already verified
* @param host the host we are working on
* @param onlyOnPath only fetch files that match the following path. If empty, all will match.
* @param notOnPath don't collect/follow urls that contains this text in the url
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,20 +68,10 @@ public HTTPClientResponseFetcher(HttpClient client) {
httpClient = client;
}

/**
* Shutdown the client.
*/
public void shutdown() {
httpClient.getConnectionManager().shutdown();
}

/**
* Get a response.
*
* @param url the url
* @param getPage the body of the page or not
* @return the response
*/
public HTMLPageResponse get(CrawlerURL url, boolean getPage, Map<String, String> requestHeaders, boolean followRedirectsToNewDomain) {

if (url.isWrongSyntax()) {
Expand Down
3 changes: 2 additions & 1 deletion src/main/java/com/soulgalore/crawler/util/HTTPSFaker.java
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,8 @@ private HTTPSFaker() {}

/**
* Get a HttpClient that accept any HTTP certificate.
*
*
* @param cm the connection manager to use when creating the new HttpClient
* @return a httpClient that accept any HTTP certificate
*/
@SuppressWarnings("deprecation")
Expand Down

0 comments on commit f5f83f0

Please sign in to comment.