Skip to content

Commit 292f322

Browse files
committed
Added: ExecutorService and CompletableFuture examples
1 parent a257d91 commit 292f322

File tree

3 files changed

+142
-0
lines changed

3 files changed

+142
-0
lines changed
Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
package com.stacktips.threads;
2+
3+
import java.io.IOException;
4+
import java.net.URL;
5+
6+
public class WebScraper {
7+
8+
String scrapeWebsite(String urlString) {
9+
try {
10+
URL url = new URL(urlString);
11+
try (var stream = url.openStream()) {
12+
int contentLength = stream.readAllBytes().length;
13+
return String.format("URL: %s, Content Length: %d bytes", urlString, contentLength);
14+
}
15+
} catch (IOException e) {
16+
return "Error scraping " + urlString + ": " + e.getMessage();
17+
}
18+
}
19+
}
Lines changed: 58 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,58 @@
1+
package com.stacktips.threads;
2+
3+
import java.util.List;
4+
import java.util.concurrent.*;
5+
6+
public class WebScraperUsingCompletableFuture {
7+
8+
private final ExecutorService executorService;
9+
private final WebScraper webScraper;
10+
11+
public WebScraperUsingCompletableFuture(int nThreads) {
12+
this.executorService = Executors.newFixedThreadPool(nThreads);
13+
this.webScraper = new WebScraper();
14+
}
15+
16+
public List<String> scrapeWebsites(List<String> urls) {
17+
List<CompletableFuture<String>> futures = urls.stream()
18+
.map(url -> CompletableFuture.supplyAsync(() -> webScraper.scrapeWebsite(url), executorService)
19+
.orTimeout(30, TimeUnit.SECONDS)
20+
.exceptionally(e -> "Error: " + e.getMessage()))
21+
.toList();
22+
23+
return futures.stream()
24+
.map(CompletableFuture::join)
25+
.toList();
26+
}
27+
28+
29+
30+
public void shutdown() {
31+
executorService.shutdown();
32+
try {
33+
if (!executorService.awaitTermination(60, TimeUnit.SECONDS)) {
34+
executorService.shutdownNow();
35+
}
36+
} catch (InterruptedException e) {
37+
executorService.shutdownNow();
38+
}
39+
}
40+
41+
public static void main(String[] args) {
42+
List<String> urls = List.of(
43+
"https://www.example.com",
44+
"https://www.github.com",
45+
"https://www.stackoverflow.com",
46+
"https://www.java.com",
47+
"https://www.spring.io"
48+
);
49+
50+
WebScraperUsingCompletableFuture scraper = new WebScraperUsingCompletableFuture(3);
51+
try {
52+
List<String> results = scraper.scrapeWebsites(urls);
53+
results.forEach(System.out::println);
54+
} finally {
55+
scraper.shutdown();
56+
}
57+
}
58+
}
Lines changed: 65 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,65 @@
1+
package com.stacktips.threads;
2+
3+
import java.util.ArrayList;
4+
import java.util.List;
5+
import java.util.concurrent.*;
6+
7+
public class WebScraperUsingExecutorService {
8+
9+
private final ExecutorService executorService;
10+
private final WebScraper webScraper;
11+
12+
public WebScraperUsingExecutorService(int nThreads) {
13+
this.executorService = Executors.newFixedThreadPool(nThreads);
14+
this.webScraper = new WebScraper();
15+
}
16+
17+
public List<String> scrapeWebsites(List<String> urls) throws InterruptedException {
18+
List<Future<String>> futures = new ArrayList<>();
19+
for (String url : urls) {
20+
futures.add(executorService.submit(() -> webScraper.scrapeWebsite(url)));
21+
}
22+
23+
List<String> results = new ArrayList<>();
24+
for (Future<String> future : futures) {
25+
try {
26+
results.add(future.get(30, TimeUnit.SECONDS));
27+
} catch (ExecutionException | TimeoutException e) {
28+
results.add("Error: " + e.getMessage());
29+
}
30+
}
31+
32+
return results;
33+
}
34+
35+
public void shutdown() {
36+
executorService.shutdown();
37+
try {
38+
if (!executorService.awaitTermination(60, TimeUnit.SECONDS)) {
39+
executorService.shutdownNow();
40+
}
41+
} catch (InterruptedException e) {
42+
executorService.shutdownNow();
43+
}
44+
}
45+
46+
public static void main(String[] args) {
47+
List<String> urls = List.of(
48+
"https://www.example.com",
49+
"https://www.github.com",
50+
"https://www.stackoverflow.com",
51+
"https://www.java.com",
52+
"https://www.spring.io"
53+
);
54+
55+
WebScraperUsingExecutorService scraper = new WebScraperUsingExecutorService(3);
56+
try {
57+
List<String> results = scraper.scrapeWebsites(urls);
58+
results.forEach(System.out::println);
59+
} catch (InterruptedException e) {
60+
e.printStackTrace();
61+
} finally {
62+
scraper.shutdown();
63+
}
64+
}
65+
}

0 commit comments

Comments
 (0)