diff options
author | Sven Gothel <[email protected]> | 2019-05-15 01:52:32 +0200 |
---|---|---|
committer | Sven Gothel <[email protected]> | 2019-05-15 01:52:32 +0200 |
commit | 8f0bc06071ed608a13e81c14ef60204005a83779 (patch) | |
tree | f4b12eced740e8289d3ce6d78d382a58f2a2d096 | |
parent | e5155b34e7e9f99ccb30ee7923ec9c0562cebc55 (diff) |
Dropping deprecated rome-fetcher via apache's CloseableHttpClientrome-fetcher-drop
See https://github.com/rometools/rome/issues/276
This still has issues w/ empty streams (Premature EOS)
and the code snippet in above issue 276 is to replace
rome's HttpClientFeedFetcher w/ apache's CloseableHttpClient.
However, we are originally using rome's HttpURLFeedFetcher.
-rw-r--r-- | pom.xml | 7 | ||||
-rw-r--r-- | src/main/java/com/jogamp/hungryharry/FeedAggregator.java | 67 |
2 files changed, 38 insertions, 36 deletions
@@ -56,7 +56,6 @@ <version>2.12.0</version> <scope>test</scope> </dependency> - <!-- <dependency> <groupId>org.apache.httpcomponents</groupId> <artifactId>httpclient</artifactId> @@ -77,18 +76,12 @@ <artifactId>fluent-hc</artifactId> <version>4.5.8</version> </dependency> - --> <dependency> <groupId>com.rometools</groupId> <artifactId>rome</artifactId> <version>1.12.0</version> </dependency> <dependency> - <groupId>com.rometools</groupId> - <artifactId>rome-fetcher</artifactId> - <version>1.12.0</version> - </dependency> - <dependency> <groupId>org.freemarker</groupId> <artifactId>freemarker</artifactId> <version>2.3.28</version> diff --git a/src/main/java/com/jogamp/hungryharry/FeedAggregator.java b/src/main/java/com/jogamp/hungryharry/FeedAggregator.java index 12f87f8..404265a 100644 --- a/src/main/java/com/jogamp/hungryharry/FeedAggregator.java +++ b/src/main/java/com/jogamp/hungryharry/FeedAggregator.java @@ -6,6 +6,12 @@ package com.jogamp.hungryharry; import com.jogamp.hungryharry.Config.Feed; import com.jogamp.hungryharry.Config.Planet; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.client.methods.HttpGet; +import org.apache.http.client.methods.HttpUriRequest; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.HttpClients; + import com.rometools.rome.feed.synd.SyndContent; import com.rometools.rome.feed.synd.SyndEntry; import com.rometools.rome.feed.synd.SyndFeed; @@ -14,10 +20,6 @@ import com.rometools.rome.io.FeedException; import com.rometools.rome.io.SyndFeedInput; import com.rometools.rome.io.SyndFeedOutput; import com.rometools.rome.io.XmlReader; -import com.rometools.fetcher.FeedFetcher; -import com.rometools.fetcher.impl.FeedFetcherCache; -import com.rometools.fetcher.impl.HashMapFeedInfoCache; -import com.rometools.fetcher.impl.HttpURLFeedFetcher; import freemarker.template.Configuration; import freemarker.template.ObjectWrapper; @@ -231,37 +233,44 @@ public class FeedAggregator { } private List<SyndEntry> downloadFeeds(List<Feed> feeds, List<SyndFeed> downloadedFeeds) throws IllegalArgumentException { - - FeedFetcherCache feedInfoCache = HashMapFeedInfoCache.getInstance(); - FeedFetcher feedFetcher = new HttpURLFeedFetcher(feedInfoCache); - // trust foreign doctype? feedFetcher.setAllowDoctypes(true); List<SyndEntry> collectedEntries = new ArrayList<SyndEntry>(); - Set<String> ids = new HashSet<String>(); - for (Config.Feed feed : feeds) { - LOG.info("downloading "+feed); - try { - SyndFeed inFeed = feedFetcher.retrieveFeed(new URL(feed.url)); - downloadedFeeds.add(inFeed); - List<SyndEntry> entries = inFeed.getEntries(); - - LOG.info("downloaded "+entries.size()+ " entries"); - - //skip duplicates - for (SyndEntry entry : entries) { - String uid = entry.getLink(); - if(!ids.contains(uid)) { - ids.add(uid); - collectedEntries.add(entry); - }else{ - LOG.info("skiping duplicate entry: "+uid); + try (CloseableHttpClient client = HttpClients.createMinimal()) { + for (Config.Feed feed : feeds) { + LOG.info("downloading "+feed); + HttpUriRequest request = new HttpGet(feed.url); + try (CloseableHttpResponse response = client.execute(request); + InputStream stream = response.getEntity().getContent() + ) + { + if( true || 0 < stream.available() ) { + SyndFeedInput input = new SyndFeedInput(); + SyndFeed inFeed = input.build(new XmlReader(stream, true /* lenient */)); + downloadedFeeds.add(inFeed); + List<SyndEntry> entries = inFeed.getEntries(); + + LOG.info("downloaded "+entries.size()+ " entries"); + + //skip duplicates + for (SyndEntry entry : entries) { + String uid = entry.getLink(); + if(!ids.contains(uid)) { + ids.add(uid); + collectedEntries.add(entry); + } else { + LOG.info("skiping duplicate entry: "+uid); + } + } + } else { + LOG.log(WARNING, "skipping feed due to zero input"); } + } catch (Exception ex) { + LOG.log(WARNING, "skipping feed", ex); } - - } catch (Exception ex) { - LOG.log(WARNING, "skipping feed", ex); } + } catch (Exception ex) { + LOG.log(SEVERE, "HttpClient failure", ex); } sort(collectedEntries, new Comparator<SyndEntry>() { |