1
1
mirror of https://github.com/MarginaliaSearch/MarginaliaSearch.git synced 2025-10-06 07:32:38 +02:00

Compare commits

...

4 Commits

Author SHA1 Message Date
Viktor Lofgren
647dd9b12f (crawler) Reduce the likelihood of crawler tasks locking on domains before they are ready 2025-04-21 00:24:30 +02:00
Viktor Lofgren
de4e2849ce (crawler) Tweak request retry counts
Increase the default number of tries to 3, but don't retry on SSL errors as they are unlikely to fix themselves in the short term.
2025-04-19 00:19:48 +02:00
Viktor Lofgren
3c43f1954e (crawler) Add custom cookie store implementation
Apache HttpClient's cookie implementation builds an enormous concurrent hashmap with every cookie for every domain ever crawled.  This is a big waste of resources.

Replacing it with a fairly crude domain-isolated instance, as we are primarily interested in answering whether a cookie is set, and we will never retain cookies long term.
2025-04-18 13:04:22 +02:00
Viktor Lofgren
fa2462ec39 (crawler) Re-enable aborts on timeout 2025-04-18 12:59:34 +02:00
21 changed files with 302 additions and 199 deletions

View File

@@ -20,7 +20,6 @@ import nu.marginalia.model.crawldata.CrawledDocument;
import nu.marginalia.model.crawldata.CrawledDomain;
import nu.marginalia.model.crawldata.SerializableCrawlData;
import nu.marginalia.parquet.crawldata.CrawledDocumentParquetRecordFileWriter;
import org.apache.hc.client5.http.cookie.BasicCookieStore;
import org.junit.jupiter.api.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -247,7 +246,7 @@ public class CrawlingThenConvertingIntegrationTest {
private CrawledDomain crawl(CrawlerMain.CrawlSpecRecord specs, Predicate<EdgeDomain> domainBlacklist) throws Exception {
List<SerializableCrawlData> data = new ArrayList<>();
try (var recorder = new WarcRecorder(fileName, new BasicCookieStore());
try (var recorder = new WarcRecorder(fileName);
var db = new DomainStateDb(dbTempFile))
{
new CrawlerRetreiver(httpFetcher, new DomainProber(domainBlacklist), specs, db, recorder).crawlDomain();

View File

@@ -43,6 +43,7 @@ import java.nio.file.StandardCopyOption;
import java.security.Security;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
@@ -65,6 +66,7 @@ public class CrawlerMain extends ProcessMainClass {
private final DomainLocks domainLocks = new DomainLocks();
private final Map<String, CrawlTask> pendingCrawlTasks = new ConcurrentHashMap<>();
private final LinkedBlockingQueue<CrawlTask> retryQueue = new LinkedBlockingQueue<>();
private final AtomicInteger tasksDone = new AtomicInteger(0);
private final HttpFetcherImpl fetcher;
@@ -277,12 +279,26 @@ public class CrawlerMain extends ProcessMainClass {
}
// Schedule viable tasks for execution until list is empty
while (!taskList.isEmpty()) {
taskList.removeIf(this::trySubmitDeferredTask);
for (int emptyRuns = 0;emptyRuns < 300;) {
boolean hasTasks = !taskList.isEmpty();
// The order of these checks very important to avoid a race condition
// where we miss a task that is put into the retry queue
boolean hasRunningTasks = pool.getActiveCount() > 0;
boolean hasRetryTasks = !retryQueue.isEmpty();
if (hasTasks || hasRetryTasks || hasRunningTasks) {
retryQueue.drainTo(taskList);
taskList.removeIf(this::trySubmitDeferredTask);
// Add a small pause here to avoid busy looping toward the end of the execution cycle when
// we might have no new viable tasks to run for hours on end
TimeUnit.MILLISECONDS.sleep(50);
} else {
// We have no tasks to run, and no tasks in the retry queue
// but we wait a bit to see if any new tasks come in via the retry queue
emptyRuns++;
TimeUnit.SECONDS.sleep(1);
}
}
logger.info("Shutting down the pool, waiting for tasks to complete...");
@@ -425,6 +441,16 @@ public class CrawlerMain extends ProcessMainClass {
return;
}
Optional<DomainLocks.DomainLock> lock = domainLocks.tryLockDomain(new EdgeDomain(domain));
// We don't have a lock, so we can't run this task
// we return to avoid blocking the pool for too long
if (lock.isEmpty()) {
retryQueue.add(this);
return;
}
DomainLocks.DomainLock domainLock = lock.get();
try (domainLock) {
Path newWarcFile = CrawlerOutputFile.createWarcPath(outputDir, id, domain, CrawlerOutputFile.WarcFileVersion.LIVE);
Path tempFile = CrawlerOutputFile.createWarcPath(outputDir, id, domain, CrawlerOutputFile.WarcFileVersion.TEMP);
Path slopFile = CrawlerOutputFile.createSlopPath(outputDir, id, domain);
@@ -438,10 +464,9 @@ public class CrawlerMain extends ProcessMainClass {
Files.deleteIfExists(tempFile);
}
try (var warcRecorder = new WarcRecorder(newWarcFile, fetcher); // write to a temp file for now
try (var warcRecorder = new WarcRecorder(newWarcFile); // write to a temp file for now
var retriever = new CrawlerRetreiver(fetcher, domainProber, specification, domainStateDb, warcRecorder);
CrawlDataReference reference = getReference()
)
CrawlDataReference reference = getReference())
{
// Resume the crawl if it was aborted
if (Files.exists(tempFile)) {
@@ -451,10 +476,7 @@ public class CrawlerMain extends ProcessMainClass {
DomainLinks domainLinks = anchorTagsSource.getAnchorTags(domain);
int size;
try (var lock = domainLocks.lockDomain(new EdgeDomain(domain))) {
size = retriever.crawlDomain(domainLinks, reference);
}
int size = retriever.crawlDomain(domainLinks, reference);
// Delete the reference crawl data if it's not the same as the new one
// (mostly a case when migrating from legacy->warc)
@@ -487,6 +509,7 @@ public class CrawlerMain extends ProcessMainClass {
Files.deleteIfExists(tempFile);
}
}
}
private CrawlDataReference getReference() {
try {

View File

@@ -1,34 +0,0 @@
package nu.marginalia.crawl.fetcher;
import java.io.IOException;
import java.net.CookieHandler;
import java.net.URI;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
public class Cookies extends CookieHandler {
final ThreadLocal<ConcurrentHashMap<String, List<String>>> cookieJar = ThreadLocal.withInitial(ConcurrentHashMap::new);
public void clear() {
cookieJar.get().clear();
}
public boolean hasCookies() {
return !cookieJar.get().isEmpty();
}
public List<String> getCookies() {
return cookieJar.get().values().stream().flatMap(List::stream).toList();
}
@Override
public Map<String, List<String>> get(URI uri, Map<String, List<String>> requestHeaders) throws IOException {
return cookieJar.get();
}
@Override
public void put(URI uri, Map<String, List<String>> responseHeaders) throws IOException {
cookieJar.get().putAll(responseHeaders);
}
}

View File

@@ -0,0 +1,56 @@
package nu.marginalia.crawl.fetcher;
import org.apache.hc.client5.http.classic.methods.HttpUriRequestBase;
import org.apache.hc.core5.http.ClassicHttpRequest;
import org.apache.hc.core5.http.HttpResponse;
import java.util.HashMap;
import java.util.Map;
import java.util.StringJoiner;
public class DomainCookies {
private final Map<String, String> cookies = new HashMap<>();
public boolean hasCookies() {
return !cookies.isEmpty();
}
public void updateCookieStore(HttpResponse response) {
for (var header : response.getHeaders()) {
if (header.getName().equalsIgnoreCase("Set-Cookie")) {
parseCookieHeader(header.getValue());
}
}
}
private void parseCookieHeader(String value) {
// Parse the Set-Cookie header value and extract the cookies
String[] parts = value.split(";");
String cookie = parts[0].trim();
if (cookie.contains("=")) {
String[] cookieParts = cookie.split("=");
String name = cookieParts[0].trim();
String val = cookieParts[1].trim();
cookies.put(name, val);
}
}
public void paintRequest(HttpUriRequestBase request) {
request.addHeader("Cookie", createCookieHeader());
}
public void paintRequest(ClassicHttpRequest request) {
request.addHeader("Cookie", createCookieHeader());
}
private String createCookieHeader() {
StringJoiner sj = new StringJoiner("; ");
for (var cookie : cookies.entrySet()) {
sj.add(cookie.getKey() + "=" + cookie.getValue());
}
return sj.toString();
}
}

View File

@@ -23,6 +23,7 @@ public interface HttpFetcher extends AutoCloseable {
HttpFetchResult fetchContent(EdgeUrl url,
WarcRecorder recorder,
DomainCookies cookies,
CrawlDelayTimer timer,
ContentTags tags,
ProbeType probeType);

View File

@@ -47,6 +47,7 @@ import org.slf4j.Marker;
import org.slf4j.MarkerFactory;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLException;
import java.io.IOException;
import java.net.SocketTimeoutException;
import java.net.URISyntaxException;
@@ -307,6 +308,7 @@ public class HttpFetcherImpl implements HttpFetcher, HttpRequestRetryStrategy {
* recorded in the WARC file on failure.
*/
public ContentTypeProbeResult probeContentType(EdgeUrl url,
DomainCookies cookies,
CrawlDelayTimer timer,
ContentTags tags) {
if (!tags.isEmpty() || !contentTypeLogic.isUrlLikeBinary(url)) {
@@ -319,9 +321,11 @@ public class HttpFetcherImpl implements HttpFetcher, HttpRequestRetryStrategy {
.addHeader("Accept-Encoding", "gzip")
.build();
var result = SendLock.wrapSend(client, head, (rsp) -> {
EntityUtils.consume(rsp.getEntity());
cookies.paintRequest(head);
return SendLock.wrapSend(client, head, (rsp) -> {
cookies.updateCookieStore(rsp);
EntityUtils.consume(rsp.getEntity());
int statusCode = rsp.getCode();
// Handle redirects
@@ -359,8 +363,6 @@ public class HttpFetcherImpl implements HttpFetcher, HttpRequestRetryStrategy {
return new ContentTypeProbeResult.BadContentType(contentType, statusCode);
}
});
return result;
}
catch (SocketTimeoutException ex) {
@@ -382,6 +384,7 @@ public class HttpFetcherImpl implements HttpFetcher, HttpRequestRetryStrategy {
@Override
public HttpFetchResult fetchContent(EdgeUrl url,
WarcRecorder warcRecorder,
DomainCookies cookies,
CrawlDelayTimer timer,
ContentTags contentTags,
ProbeType probeType)
@@ -389,7 +392,7 @@ public class HttpFetcherImpl implements HttpFetcher, HttpRequestRetryStrategy {
try {
if (probeType == HttpFetcher.ProbeType.FULL) {
try {
var probeResult = probeContentType(url, timer, contentTags);
var probeResult = probeContentType(url, cookies, timer, contentTags);
logger.info(crawlerAuditMarker, "Probe result {} for {}", probeResult.getClass().getSimpleName(), url);
switch (probeResult) {
case HttpFetcher.ContentTypeProbeResult.NoOp():
@@ -427,7 +430,7 @@ public class HttpFetcherImpl implements HttpFetcher, HttpRequestRetryStrategy {
contentTags.paint(request);
try (var sl = new SendLock()) {
HttpFetchResult result = warcRecorder.fetch(client, request);
HttpFetchResult result = warcRecorder.fetch(client, cookies, request);
if (result instanceof HttpFetchResult.ResultOk ok) {
if (ok.statusCode() == 304) {
@@ -604,7 +607,7 @@ public class HttpFetcherImpl implements HttpFetcher, HttpRequestRetryStrategy {
request.addHeader("Accept-Encoding", "gzip");
request.addHeader("Accept", "text/*, */*;q=0.9");
HttpFetchResult result = recorder.fetch(client, request);
HttpFetchResult result = recorder.fetch(client, new DomainCookies(), request);
return DocumentBodyExtractor.asBytes(result).mapOpt((contentType, body) ->
robotsParser.parseContent(url.toString(),
@@ -620,18 +623,21 @@ public class HttpFetcherImpl implements HttpFetcher, HttpRequestRetryStrategy {
@Override
public boolean retryRequest(HttpRequest request, IOException exception, int executionCount, HttpContext context) {
if (exception instanceof SocketTimeoutException ex) {
if (exception instanceof SocketTimeoutException) { // Timeouts are not recoverable
return false;
}
if (exception instanceof SSLException) { // SSL exceptions are unlikely to be recoverable
return false;
}
return executionCount < 3;
return executionCount <= 3;
}
@Override
public boolean retryRequest(HttpResponse response, int executionCount, HttpContext context) {
return switch (response.getCode()) {
case 500, 503 -> executionCount < 2;
case 429 -> executionCount < 3;
case 500, 503 -> executionCount <= 2;
case 429 -> executionCount <= 3;
default -> false;
};
}

View File

@@ -106,8 +106,7 @@ public abstract class WarcInputBuffer implements AutoCloseable {
// so we don't keep the connection open forever or are forced to consume
// the stream to the end
// FIXME: Disable this for now, as it may cause issues with the connection pool
// request.abort();
request.abort();
break;
}

View File

@@ -1,6 +1,7 @@
package nu.marginalia.crawl.fetcher.warc;
import nu.marginalia.crawl.fetcher.ContentTags;
import nu.marginalia.crawl.fetcher.DomainCookies;
import nu.marginalia.crawl.fetcher.HttpFetcher;
import nu.marginalia.crawl.fetcher.HttpFetcherImpl;
import nu.marginalia.link_parser.LinkParser;
@@ -9,8 +10,6 @@ import nu.marginalia.model.EdgeUrl;
import nu.marginalia.model.body.HttpFetchResult;
import org.apache.hc.client5.http.classic.HttpClient;
import org.apache.hc.client5.http.classic.methods.HttpGet;
import org.apache.hc.client5.http.cookie.BasicCookieStore;
import org.apache.hc.client5.http.cookie.CookieStore;
import org.apache.hc.core5.http.NameValuePair;
import org.jetbrains.annotations.Nullable;
import org.netpreserve.jwarc.*;
@@ -53,23 +52,15 @@ public class WarcRecorder implements AutoCloseable {
// Affix a version string in case we need to change the format in the future
// in some way
private final String warcRecorderVersion = "1.0";
private final CookieStore cookies;
private final LinkParser linkParser = new LinkParser();
/**
* Create a new WarcRecorder that will write to the given file
*
* @param warcFile The file to write to
*/
public WarcRecorder(Path warcFile, HttpFetcherImpl fetcher) throws IOException {
public WarcRecorder(Path warcFile) throws IOException {
this.warcFile = warcFile;
this.writer = new WarcWriter(warcFile);
this.cookies = fetcher.getCookies();
}
public WarcRecorder(Path warcFile, CookieStore cookies) throws IOException {
this.warcFile = warcFile;
this.writer = new WarcWriter(warcFile);
this.cookies = cookies;
}
/**
@@ -79,23 +70,20 @@ public class WarcRecorder implements AutoCloseable {
public WarcRecorder() throws IOException {
this.warcFile = Files.createTempFile("warc", ".warc.gz");
this.writer = new WarcWriter(this.warcFile);
this.cookies = new BasicCookieStore();
temporaryFile = true;
}
private boolean hasCookies() {
return !cookies.getCookies().isEmpty();
}
public HttpFetchResult fetch(HttpClient client,
DomainCookies cookies,
HttpGet request)
throws NoSuchAlgorithmException, IOException, URISyntaxException, InterruptedException
{
return fetch(client, request, Duration.ofMillis(MAX_TIME));
return fetch(client, cookies, request, Duration.ofMillis(MAX_TIME));
}
public HttpFetchResult fetch(HttpClient client,
DomainCookies cookies,
HttpGet request,
Duration timeout)
throws NoSuchAlgorithmException, IOException, URISyntaxException, InterruptedException
@@ -113,13 +101,15 @@ public class WarcRecorder implements AutoCloseable {
// Inject a range header to attempt to limit the size of the response
// to the maximum size we want to store, if the server supports it.
request.addHeader("Range", "bytes=0-"+MAX_SIZE);
cookies.paintRequest(request);
try {
return client.execute(request,response -> {
try (WarcInputBuffer inputBuffer = WarcInputBuffer.forResponse(response, request, timeout);
InputStream inputStream = inputBuffer.read()) {
cookies.updateCookieStore(response);
// Build and write the request
WarcDigestBuilder requestDigestBuilder = new WarcDigestBuilder();
@@ -143,8 +133,9 @@ public class WarcRecorder implements AutoCloseable {
warcRequest.http(); // force HTTP header to be parsed before body is consumed so that caller can use it
writer.write(warcRequest);
if (hasCookies()) {
extraHeaders.put("X-Has-Cookies", List.of("1"));
if (cookies.hasCookies()) {
response.addHeader("X-Has-Cookies", 1);
}
byte[] responseHeaders = WarcProtocolReconstructor.getResponseHeader(response, inputBuffer.size()).getBytes(StandardCharsets.UTF_8);
@@ -259,7 +250,7 @@ public class WarcRecorder implements AutoCloseable {
writer.write(item);
}
private void saveOldResponse(EdgeUrl url, String contentType, int statusCode, byte[] documentBody, @Nullable String headers, ContentTags contentTags) {
private void saveOldResponse(EdgeUrl url, DomainCookies domainCookies, String contentType, int statusCode, byte[] documentBody, @Nullable String headers, ContentTags contentTags) {
try {
WarcDigestBuilder responseDigestBuilder = new WarcDigestBuilder();
WarcDigestBuilder payloadDigestBuilder = new WarcDigestBuilder();
@@ -320,7 +311,7 @@ public class WarcRecorder implements AutoCloseable {
.date(Instant.now())
.body(MediaType.HTTP_RESPONSE, responseDataBuffer.copyBytes());
if (hasCookies()) {
if (domainCookies.hasCookies() || (headers != null && headers.contains("Set-Cookie:"))) {
builder.addHeader("X-Has-Cookies", "1");
}
@@ -340,8 +331,8 @@ public class WarcRecorder implements AutoCloseable {
* an E-Tag or Last-Modified header, and the server responds with a 304 Not Modified. In this
* scenario we want to record the data as it was in the previous crawl, but not re-fetch it.
*/
public void writeReferenceCopy(EdgeUrl url, String contentType, int statusCode, byte[] documentBody, @Nullable String headers, ContentTags ctags) {
saveOldResponse(url, contentType, statusCode, documentBody, headers, ctags);
public void writeReferenceCopy(EdgeUrl url, DomainCookies cookies, String contentType, int statusCode, byte[] documentBody, @Nullable String headers, ContentTags ctags) {
saveOldResponse(url, cookies, contentType, statusCode, documentBody, headers, ctags);
}
public void writeWarcinfoHeader(String ip, EdgeDomain domain, HttpFetcherImpl.DomainProbeResult result) throws IOException {

View File

@@ -3,6 +3,7 @@ package nu.marginalia.crawl.logic;
import nu.marginalia.model.EdgeDomain;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.Semaphore;
@@ -19,8 +20,21 @@ public class DomainLocks {
* and may be held by another thread. The caller is responsible for locking and releasing the lock.
*/
public DomainLock lockDomain(EdgeDomain domain) throws InterruptedException {
return new DomainLock(domain.toString(),
var ret = new DomainLock(domain.toString(),
locks.computeIfAbsent(domain.topDomain.toLowerCase(), this::defaultPermits));
ret.lock();
return ret;
}
public Optional<DomainLock> tryLockDomain(EdgeDomain domain) {
var sem = locks.computeIfAbsent(domain.topDomain.toLowerCase(), this::defaultPermits);
if (sem.tryAcquire(1)) {
return Optional.of(new DomainLock(domain.toString(), sem));
}
else {
// We don't have a lock, so we return an empty optional
return Optional.empty();
}
}
private Semaphore defaultPermits(String topDomain) {
@@ -56,10 +70,13 @@ public class DomainLocks {
private final String domainName;
private final Semaphore semaphore;
DomainLock(String domainName, Semaphore semaphore) throws InterruptedException {
DomainLock(String domainName, Semaphore semaphore) {
this.domainName = domainName;
this.semaphore = semaphore;
}
// This method is called to lock the domain. It will block until the lock is available.
private void lock() throws InterruptedException {
Thread.currentThread().setName("crawling:" + domainName + " [await domain lock]");
semaphore.acquire();
Thread.currentThread().setName("crawling:" + domainName);

View File

@@ -6,6 +6,7 @@ import nu.marginalia.contenttype.ContentType;
import nu.marginalia.crawl.CrawlerMain;
import nu.marginalia.crawl.DomainStateDb;
import nu.marginalia.crawl.fetcher.ContentTags;
import nu.marginalia.crawl.fetcher.DomainCookies;
import nu.marginalia.crawl.fetcher.HttpFetcher;
import nu.marginalia.crawl.fetcher.warc.WarcRecorder;
import nu.marginalia.crawl.logic.LinkFilterSelector;
@@ -51,6 +52,7 @@ public class CrawlerRetreiver implements AutoCloseable {
private final DomainStateDb domainStateDb;
private final WarcRecorder warcRecorder;
private final CrawlerRevisitor crawlerRevisitor;
private final DomainCookies cookies = new DomainCookies();
private static final CrawlerConnectionThrottle connectionThrottle = new CrawlerConnectionThrottle(
Duration.ofSeconds(1) // pace the connections to avoid network congestion at startup
@@ -124,7 +126,7 @@ public class CrawlerRetreiver implements AutoCloseable {
}
Instant recrawlStart = Instant.now();
CrawlerRevisitor.RecrawlMetadata recrawlMetadata = crawlerRevisitor.recrawl(oldCrawlData, robotsRules, delayTimer);
CrawlerRevisitor.RecrawlMetadata recrawlMetadata = crawlerRevisitor.recrawl(oldCrawlData, cookies, robotsRules, delayTimer);
Duration recrawlTime = Duration.between(recrawlStart, Instant.now());
// Play back the old crawl data (if present) and fetch the documents comparing etags and last-modified
@@ -274,7 +276,7 @@ public class CrawlerRetreiver implements AutoCloseable {
try {
var url = rootUrl.withPathAndParam("/", null);
HttpFetchResult result = fetcher.fetchContent(url, warcRecorder, timer, ContentTags.empty(), HttpFetcher.ProbeType.DISABLED);
HttpFetchResult result = fetcher.fetchContent(url, warcRecorder, cookies, timer, ContentTags.empty(), HttpFetcher.ProbeType.DISABLED);
timer.waitFetchDelay(0);
if (result instanceof HttpFetchResult.ResultRedirect(EdgeUrl location)) {
@@ -337,7 +339,7 @@ public class CrawlerRetreiver implements AutoCloseable {
// Grab the favicon if it exists
if (fetcher.fetchContent(faviconUrl, warcRecorder, timer, ContentTags.empty(), HttpFetcher.ProbeType.DISABLED) instanceof HttpFetchResult.ResultOk iconResult) {
if (fetcher.fetchContent(faviconUrl, warcRecorder, cookies, timer, ContentTags.empty(), HttpFetcher.ProbeType.DISABLED) instanceof HttpFetchResult.ResultOk iconResult) {
String contentType = iconResult.header("Content-Type");
byte[] iconData = iconResult.getBodyBytes();
@@ -407,7 +409,7 @@ public class CrawlerRetreiver implements AutoCloseable {
if (parsedOpt.isEmpty())
return false;
HttpFetchResult result = fetcher.fetchContent(parsedOpt.get(), warcRecorder, timer, ContentTags.empty(), HttpFetcher.ProbeType.DISABLED);
HttpFetchResult result = fetcher.fetchContent(parsedOpt.get(), warcRecorder, cookies, timer, ContentTags.empty(), HttpFetcher.ProbeType.DISABLED);
timer.waitFetchDelay(0);
if (!(result instanceof HttpFetchResult.ResultOk ok)) {
@@ -435,7 +437,7 @@ public class CrawlerRetreiver implements AutoCloseable {
{
var contentTags = reference.getContentTags();
HttpFetchResult fetchedDoc = fetcher.fetchContent(top, warcRecorder, timer, contentTags, HttpFetcher.ProbeType.FULL);
HttpFetchResult fetchedDoc = fetcher.fetchContent(top, warcRecorder, cookies, timer, contentTags, HttpFetcher.ProbeType.FULL);
timer.waitFetchDelay();
if (Thread.interrupted()) {
@@ -461,7 +463,7 @@ public class CrawlerRetreiver implements AutoCloseable {
{
var doc = reference.doc();
warcRecorder.writeReferenceCopy(top, doc.contentType, doc.httpStatus, doc.documentBodyBytes, doc.headers, contentTags);
warcRecorder.writeReferenceCopy(top, cookies, doc.contentType, doc.httpStatus, doc.documentBodyBytes, doc.headers, contentTags);
fetchedDoc = new HttpFetchResult.Result304ReplacedWithReference(doc.url,
new ContentType(doc.contentType, "UTF-8"),

View File

@@ -2,6 +2,7 @@ package nu.marginalia.crawl.retreival.revisit;
import crawlercommons.robots.SimpleRobotRules;
import nu.marginalia.crawl.fetcher.ContentTags;
import nu.marginalia.crawl.fetcher.DomainCookies;
import nu.marginalia.crawl.fetcher.warc.WarcRecorder;
import nu.marginalia.crawl.retreival.CrawlDataReference;
import nu.marginalia.crawl.retreival.CrawlDelayTimer;
@@ -37,6 +38,7 @@ public class CrawlerRevisitor {
/** Performs a re-crawl of old documents, comparing etags and last-modified */
public RecrawlMetadata recrawl(CrawlDataReference oldCrawlData,
DomainCookies cookies,
SimpleRobotRules robotsRules,
CrawlDelayTimer delayTimer)
throws InterruptedException {
@@ -132,6 +134,7 @@ public class CrawlerRevisitor {
}
// Add a WARC record so we don't repeat this
warcRecorder.writeReferenceCopy(url,
cookies,
doc.contentType,
doc.httpStatus,
doc.documentBodyBytes,

View File

@@ -96,50 +96,50 @@ class HttpFetcherImplContentTypeProbeTest {
@Test
public void testProbeContentTypeHtmlShortcircuitPath() throws URISyntaxException {
var result = fetcher.probeContentType(new EdgeUrl("https://localhost/test.html"), new CrawlDelayTimer(50), ContentTags.empty());
var result = fetcher.probeContentType(new EdgeUrl("https://localhost/test.html"), new DomainCookies(), new CrawlDelayTimer(50), ContentTags.empty());
Assertions.assertInstanceOf(HttpFetcher.ContentTypeProbeResult.NoOp.class, result);
}
@Test
public void testProbeContentTypeHtmlShortcircuitTags() {
var result = fetcher.probeContentType(contentTypeBinaryUrl, new CrawlDelayTimer(50), new ContentTags("a", "b"));
var result = fetcher.probeContentType(contentTypeBinaryUrl, new DomainCookies(), new CrawlDelayTimer(50), new ContentTags("a", "b"));
Assertions.assertInstanceOf(HttpFetcher.ContentTypeProbeResult.NoOp.class, result);
}
@Test
public void testProbeContentTypeHtml() {
var result = fetcher.probeContentType(contentTypeHtmlUrl, new CrawlDelayTimer(50), ContentTags.empty());
var result = fetcher.probeContentType(contentTypeHtmlUrl, new DomainCookies(), new CrawlDelayTimer(50), ContentTags.empty());
Assertions.assertEquals(new HttpFetcher.ContentTypeProbeResult.Ok(contentTypeHtmlUrl), result);
}
@Test
public void testProbeContentTypeBinary() {
var result = fetcher.probeContentType(contentTypeBinaryUrl, new CrawlDelayTimer(50), ContentTags.empty());
var result = fetcher.probeContentType(contentTypeBinaryUrl, new DomainCookies(), new CrawlDelayTimer(50), ContentTags.empty());
Assertions.assertEquals(new HttpFetcher.ContentTypeProbeResult.BadContentType("application/octet-stream", 200), result);
}
@Test
public void testProbeContentTypeRedirect() {
var result = fetcher.probeContentType(redirectUrl, new CrawlDelayTimer(50), ContentTags.empty());
var result = fetcher.probeContentType(redirectUrl, new DomainCookies(), new CrawlDelayTimer(50), ContentTags.empty());
Assertions.assertEquals(new HttpFetcher.ContentTypeProbeResult.Redirect(contentTypeHtmlUrl), result);
}
@Test
public void testProbeContentTypeBadHttpStatus() {
var result = fetcher.probeContentType(badHttpStatusUrl, new CrawlDelayTimer(50), ContentTags.empty());
var result = fetcher.probeContentType(badHttpStatusUrl, new DomainCookies(), new CrawlDelayTimer(50), ContentTags.empty());
Assertions.assertEquals(new HttpFetcher.ContentTypeProbeResult.HttpError(500, "Bad status code"), result);
}
@Test
public void testOnlyGetAllowed() {
var result = fetcher.probeContentType(onlyGetAllowedUrl, new CrawlDelayTimer(50), ContentTags.empty());
var result = fetcher.probeContentType(onlyGetAllowedUrl, new DomainCookies(), new CrawlDelayTimer(50), ContentTags.empty());
Assertions.assertEquals(new HttpFetcher.ContentTypeProbeResult.Ok(onlyGetAllowedUrl), result);
}
@Test
public void testTimeout() {
var result = fetcher.probeContentType(timeoutUrl, new CrawlDelayTimer(50), ContentTags.empty());
var result = fetcher.probeContentType(timeoutUrl, new DomainCookies(), new CrawlDelayTimer(50), ContentTags.empty());
Assertions.assertInstanceOf(HttpFetcher.ContentTypeProbeResult.Timeout.class, result);
}

View File

@@ -31,6 +31,7 @@ class HttpFetcherImplFetchTest {
private static String lastModified = "Wed, 21 Oct 2024 07:28:00 GMT";
private static EdgeUrl okUrl;
private static EdgeUrl okUrlSetsCookie;
private static EdgeUrl okRangeResponseUrl;
private static EdgeUrl okUrlWith304;
@@ -88,6 +89,19 @@ class HttpFetcherImplFetchTest {
.withStatus(200)
.withBody("Hello World")));
okUrlSetsCookie = new EdgeUrl("http://localhost:18089/okSetCookie.bin");
wireMockServer.stubFor(WireMock.head(WireMock.urlEqualTo(okUrlSetsCookie.path))
.willReturn(WireMock.aResponse()
.withHeader("Content-Type", "text/html")
.withHeader("Set-Cookie", "test=1")
.withStatus(200)));
wireMockServer.stubFor(WireMock.get(WireMock.urlEqualTo(okUrlSetsCookie.path))
.willReturn(WireMock.aResponse()
.withHeader("Content-Type", "text/html")
.withHeader("Set-Cookie", "test=1")
.withStatus(200)
.withBody("Hello World")));
okUrlWith304 = new EdgeUrl("http://localhost:18089/ok304.bin");
wireMockServer.stubFor(WireMock.head(WireMock.urlEqualTo(okUrlWith304.path))
.willReturn(WireMock.aResponse()
@@ -117,6 +131,8 @@ class HttpFetcherImplFetchTest {
.withHeader("Keep-Alive", "max=4, timeout=30")
.withBody("Hello")
));
wireMockServer.start();
}
@@ -134,7 +150,7 @@ class HttpFetcherImplFetchTest {
public void setUp() throws IOException {
fetcher = new HttpFetcherImpl(new UserAgent("test.marginalia.nu", "test.marginalia.nu"));
warcFile = Files.createTempFile(getClass().getSimpleName(), ".warc");
warcRecorder = new WarcRecorder(warcFile, fetcher);
warcRecorder = new WarcRecorder(warcFile);
}
@AfterEach
@@ -158,7 +174,7 @@ class HttpFetcherImplFetchTest {
@Test
public void testOk_NoProbe() throws IOException {
var result = fetcher.fetchContent(okUrl, warcRecorder, new CrawlDelayTimer(1000), ContentTags.empty(), HttpFetcher.ProbeType.DISABLED);
var result = fetcher.fetchContent(okUrl, warcRecorder, new DomainCookies(), new CrawlDelayTimer(1000), ContentTags.empty(), HttpFetcher.ProbeType.DISABLED);
Assertions.assertInstanceOf(HttpFetchResult.ResultOk.class, result);
Assertions.assertTrue(result.isOk());
@@ -169,12 +185,29 @@ class HttpFetcherImplFetchTest {
Assertions.assertInstanceOf(WarcResponse.class, warcRecords.get(1));
WarcResponse response = (WarcResponse) warcRecords.get(1);
assertEquals("0", response.headers().first("X-Has-Cookies").orElse("0"));
assertEquals("0", response.http().headers().first("X-Has-Cookies").orElse("0"));
}
@Test
public void testOkSetsCookie() throws IOException {
var cookies = new DomainCookies();
var result = fetcher.fetchContent(okUrlSetsCookie, warcRecorder, cookies, new CrawlDelayTimer(1000), ContentTags.empty(), HttpFetcher.ProbeType.DISABLED);
Assertions.assertInstanceOf(HttpFetchResult.ResultOk.class, result);
Assertions.assertTrue(result.isOk());
List<WarcRecord> warcRecords = getWarcRecords();
assertEquals(2, warcRecords.size());
Assertions.assertInstanceOf(WarcRequest.class, warcRecords.get(0));
Assertions.assertInstanceOf(WarcResponse.class, warcRecords.get(1));
WarcResponse response = (WarcResponse) warcRecords.get(1);
assertEquals("1", response.http().headers().first("X-Has-Cookies").orElse("0"));
}
@Test
public void testOk_FullProbe() {
var result = fetcher.fetchContent(okUrl, warcRecorder, new CrawlDelayTimer(1000), ContentTags.empty(), HttpFetcher.ProbeType.FULL);
var result = fetcher.fetchContent(okUrl, warcRecorder, new DomainCookies(), new CrawlDelayTimer(1000), ContentTags.empty(), HttpFetcher.ProbeType.FULL);
Assertions.assertInstanceOf(HttpFetchResult.ResultOk.class, result);
Assertions.assertTrue(result.isOk());
@@ -182,7 +215,7 @@ class HttpFetcherImplFetchTest {
@Test
public void testOk304_NoProbe() {
var result = fetcher.fetchContent(okUrlWith304, warcRecorder, new CrawlDelayTimer(1000), new ContentTags(etag, lastModified), HttpFetcher.ProbeType.DISABLED);
var result = fetcher.fetchContent(okUrlWith304, warcRecorder, new DomainCookies(), new CrawlDelayTimer(1000), new ContentTags(etag, lastModified), HttpFetcher.ProbeType.DISABLED);
Assertions.assertInstanceOf(HttpFetchResult.Result304Raw.class, result);
System.out.println(result);
@@ -191,7 +224,7 @@ class HttpFetcherImplFetchTest {
@Test
public void testOk304_FullProbe() {
var result = fetcher.fetchContent(okUrlWith304, warcRecorder, new CrawlDelayTimer(1000), new ContentTags(etag, lastModified), HttpFetcher.ProbeType.FULL);
var result = fetcher.fetchContent(okUrlWith304, warcRecorder, new DomainCookies(), new CrawlDelayTimer(1000), new ContentTags(etag, lastModified), HttpFetcher.ProbeType.FULL);
Assertions.assertInstanceOf(HttpFetchResult.Result304Raw.class, result);
System.out.println(result);
@@ -199,7 +232,7 @@ class HttpFetcherImplFetchTest {
@Test
public void testBadStatus_NoProbe() throws IOException {
var result = fetcher.fetchContent(badHttpStatusUrl, warcRecorder, new CrawlDelayTimer(1000), ContentTags.empty(), HttpFetcher.ProbeType.DISABLED);
var result = fetcher.fetchContent(badHttpStatusUrl, warcRecorder, new DomainCookies(), new CrawlDelayTimer(1000), ContentTags.empty(), HttpFetcher.ProbeType.DISABLED);
Assertions.assertInstanceOf(HttpFetchResult.ResultOk.class, result);
Assertions.assertFalse(result.isOk());
@@ -213,7 +246,7 @@ class HttpFetcherImplFetchTest {
@Test
public void testBadStatus_FullProbe() {
var result = fetcher.fetchContent(badHttpStatusUrl, warcRecorder, new CrawlDelayTimer(1000), ContentTags.empty(), HttpFetcher.ProbeType.FULL);
var result = fetcher.fetchContent(badHttpStatusUrl, warcRecorder, new DomainCookies(), new CrawlDelayTimer(1000), ContentTags.empty(), HttpFetcher.ProbeType.FULL);
Assertions.assertInstanceOf(HttpFetchResult.ResultOk.class, result);
Assertions.assertFalse(result.isOk());
@@ -223,7 +256,7 @@ class HttpFetcherImplFetchTest {
@Test
public void testRedirect_NoProbe() throws URISyntaxException, IOException {
var result = fetcher.fetchContent(redirectUrl, warcRecorder, new CrawlDelayTimer(1000), ContentTags.empty(), HttpFetcher.ProbeType.DISABLED);
var result = fetcher.fetchContent(redirectUrl, warcRecorder, new DomainCookies(), new CrawlDelayTimer(1000), ContentTags.empty(), HttpFetcher.ProbeType.DISABLED);
Assertions.assertInstanceOf(HttpFetchResult.ResultRedirect.class, result);
assertEquals(new EdgeUrl("http://localhost:18089/test.html.bin"), ((HttpFetchResult.ResultRedirect) result).url());
@@ -236,7 +269,7 @@ class HttpFetcherImplFetchTest {
@Test
public void testRedirect_FullProbe() throws URISyntaxException {
var result = fetcher.fetchContent(redirectUrl, warcRecorder, new CrawlDelayTimer(1000), ContentTags.empty(), HttpFetcher.ProbeType.FULL);
var result = fetcher.fetchContent(redirectUrl, warcRecorder, new DomainCookies(), new CrawlDelayTimer(1000), ContentTags.empty(), HttpFetcher.ProbeType.FULL);
Assertions.assertInstanceOf(HttpFetchResult.ResultRedirect.class, result);
assertEquals(new EdgeUrl("http://localhost:18089/test.html.bin"), ((HttpFetchResult.ResultRedirect) result).url());
@@ -249,7 +282,7 @@ class HttpFetcherImplFetchTest {
public void testFetchTimeout_NoProbe() throws IOException, URISyntaxException {
Instant requestStart = Instant.now();
var result = fetcher.fetchContent(timeoutUrl, warcRecorder, new CrawlDelayTimer(1000), ContentTags.empty(), HttpFetcher.ProbeType.DISABLED);
var result = fetcher.fetchContent(timeoutUrl, warcRecorder, new DomainCookies(), new CrawlDelayTimer(1000), ContentTags.empty(), HttpFetcher.ProbeType.DISABLED);
Assertions.assertInstanceOf(HttpFetchResult.ResultException.class, result);
@@ -273,7 +306,7 @@ class HttpFetcherImplFetchTest {
@Test
public void testRangeResponse() throws IOException {
var result = fetcher.fetchContent(okRangeResponseUrl, warcRecorder, new CrawlDelayTimer(1000), ContentTags.empty(), HttpFetcher.ProbeType.DISABLED);
var result = fetcher.fetchContent(okRangeResponseUrl, warcRecorder, new DomainCookies(), new CrawlDelayTimer(1000), ContentTags.empty(), HttpFetcher.ProbeType.DISABLED);
Assertions.assertInstanceOf(HttpFetchResult.ResultOk.class, result);
Assertions.assertTrue(result.isOk());
@@ -290,7 +323,7 @@ class HttpFetcherImplFetchTest {
@Test
public void testFetchTimeout_Probe() throws IOException, URISyntaxException {
Instant requestStart = Instant.now();
var result = fetcher.fetchContent(timeoutUrl, warcRecorder, new CrawlDelayTimer(1000), ContentTags.empty(), HttpFetcher.ProbeType.FULL);
var result = fetcher.fetchContent(timeoutUrl, warcRecorder, new DomainCookies(), new CrawlDelayTimer(1000), ContentTags.empty(), HttpFetcher.ProbeType.FULL);
Instant requestEnd = Instant.now();
Assertions.assertInstanceOf(HttpFetchResult.ResultException.class, result);
@@ -313,7 +346,7 @@ class HttpFetcherImplFetchTest {
@Test
public void testKeepaliveUrl() {
// mostly for smoke testing and debugger utility
var result = fetcher.fetchContent(keepAliveUrl, warcRecorder, new CrawlDelayTimer(1000), ContentTags.empty(), HttpFetcher.ProbeType.DISABLED);
var result = fetcher.fetchContent(keepAliveUrl, warcRecorder, new DomainCookies(), new CrawlDelayTimer(1000), ContentTags.empty(), HttpFetcher.ProbeType.DISABLED);
Assertions.assertInstanceOf(HttpFetchResult.ResultOk.class, result);
Assertions.assertTrue(result.isOk());
@@ -330,6 +363,13 @@ class HttpFetcherImplFetchTest {
WarcXEntityRefused.register(reader);
for (var record : reader) {
// Load the body, we need to do this before we close the reader to have access to the content.
if (record instanceof WarcRequest req) {
req.http();
} else if (record instanceof WarcResponse rsp) {
rsp.http();
}
records.add(record);
}
}

View File

@@ -1,11 +1,11 @@
package nu.marginalia.crawl.retreival;
import nu.marginalia.crawl.fetcher.DomainCookies;
import nu.marginalia.crawl.fetcher.warc.WarcRecorder;
import nu.marginalia.model.EdgeDomain;
import nu.marginalia.model.EdgeUrl;
import org.apache.hc.client5.http.classic.HttpClient;
import org.apache.hc.client5.http.classic.methods.HttpGet;
import org.apache.hc.client5.http.cookie.BasicCookieStore;
import org.apache.hc.client5.http.impl.classic.HttpClients;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
@@ -45,7 +45,7 @@ class CrawlerWarcResynchronizerTest {
@Test
void run() throws IOException, URISyntaxException {
try (var oldRecorder = new WarcRecorder(fileName, new BasicCookieStore())) {
try (var oldRecorder = new WarcRecorder(fileName)) {
fetchUrl(oldRecorder, "https://www.marginalia.nu/");
fetchUrl(oldRecorder, "https://www.marginalia.nu/log/");
fetchUrl(oldRecorder, "https://www.marginalia.nu/feed/");
@@ -55,7 +55,7 @@ class CrawlerWarcResynchronizerTest {
var crawlFrontier = new DomainCrawlFrontier(new EdgeDomain("www.marginalia.nu"), List.of(), 100);
try (var newRecorder = new WarcRecorder(outputFile, new BasicCookieStore())) {
try (var newRecorder = new WarcRecorder(outputFile)) {
new CrawlerWarcResynchronizer(crawlFrontier, newRecorder).run(fileName);
}
@@ -82,6 +82,6 @@ class CrawlerWarcResynchronizerTest {
request.addHeader("User-agent", "test.marginalia.nu");
request.addHeader("Accept-Encoding", "gzip");
recorder.fetch(httpClient, request);
recorder.fetch(httpClient, new DomainCookies(), request);
}
}

View File

@@ -2,6 +2,7 @@ package nu.marginalia.crawl.retreival.fetcher;
import com.sun.net.httpserver.HttpServer;
import nu.marginalia.crawl.fetcher.ContentTags;
import nu.marginalia.crawl.fetcher.DomainCookies;
import nu.marginalia.crawl.fetcher.HttpFetcher;
import nu.marginalia.crawl.fetcher.HttpFetcherImpl;
import nu.marginalia.crawl.retreival.CrawlDelayTimer;
@@ -88,7 +89,7 @@ class ContentTypeProberTest {
@Test
void probeContentTypeOk() throws Exception {
HttpFetcher.ContentTypeProbeResult result = fetcher.probeContentType(htmlEndpoint, new CrawlDelayTimer(50), ContentTags.empty());
HttpFetcher.ContentTypeProbeResult result = fetcher.probeContentType(htmlEndpoint, new DomainCookies(), new CrawlDelayTimer(50), ContentTags.empty());
System.out.println(result);
@@ -97,7 +98,7 @@ class ContentTypeProberTest {
@Test
void probeContentTypeRedir() throws Exception {
HttpFetcher.ContentTypeProbeResult result = fetcher.probeContentType(htmlRedirEndpoint, new CrawlDelayTimer(50), ContentTags.empty());
HttpFetcher.ContentTypeProbeResult result = fetcher.probeContentType(htmlRedirEndpoint, new DomainCookies(), new CrawlDelayTimer(50), ContentTags.empty());
System.out.println(result);
@@ -106,7 +107,7 @@ class ContentTypeProberTest {
@Test
void probeContentTypeBad() throws Exception {
HttpFetcher.ContentTypeProbeResult result = fetcher.probeContentType(binaryEndpoint, new CrawlDelayTimer(50), ContentTags.empty());
HttpFetcher.ContentTypeProbeResult result = fetcher.probeContentType(binaryEndpoint, new DomainCookies(), new CrawlDelayTimer(50), ContentTags.empty());
System.out.println(result);
@@ -115,7 +116,7 @@ class ContentTypeProberTest {
@Test
void probeContentTypeTimeout() throws Exception {
HttpFetcher.ContentTypeProbeResult result = fetcher.probeContentType(timeoutEndpoint, new CrawlDelayTimer(50), ContentTags.empty());
HttpFetcher.ContentTypeProbeResult result = fetcher.probeContentType(timeoutEndpoint, new DomainCookies(), new CrawlDelayTimer(50), ContentTags.empty());
System.out.println(result);

View File

@@ -1,10 +1,10 @@
package nu.marginalia.crawl.retreival.fetcher;
import com.sun.net.httpserver.HttpServer;
import nu.marginalia.crawl.fetcher.DomainCookies;
import nu.marginalia.crawl.fetcher.warc.WarcRecorder;
import org.apache.hc.client5.http.classic.HttpClient;
import org.apache.hc.client5.http.classic.methods.HttpGet;
import org.apache.hc.client5.http.cookie.BasicCookieStore;
import org.apache.hc.client5.http.impl.classic.HttpClients;
import org.junit.jupiter.api.*;
import org.netpreserve.jwarc.WarcReader;
@@ -89,7 +89,7 @@ class WarcRecorderFakeServerTest {
fileNameWarc = Files.createTempFile("test", ".warc");
fileNameParquet = Files.createTempFile("test", ".parquet");
client = new WarcRecorder(fileNameWarc, new BasicCookieStore());
client = new WarcRecorder(fileNameWarc);
}
@AfterEach
@@ -104,7 +104,7 @@ class WarcRecorderFakeServerTest {
HttpGet request = new HttpGet("http://localhost:14510/fast");
request.addHeader("User-agent", "test.marginalia.nu");
request.addHeader("Accept-Encoding", "gzip");
client.fetch(httpClient, request);
client.fetch(httpClient, new DomainCookies(), request);
Map<String, String> sampleData = new HashMap<>();
try (var warcReader = new WarcReader(fileNameWarc)) {
@@ -130,6 +130,7 @@ class WarcRecorderFakeServerTest {
request.addHeader("Accept-Encoding", "gzip");
client.fetch(httpClient,
new DomainCookies(),
request,
Duration.ofSeconds(1)
);

View File

@@ -2,13 +2,13 @@ package nu.marginalia.crawl.retreival.fetcher;
import nu.marginalia.UserAgent;
import nu.marginalia.crawl.fetcher.ContentTags;
import nu.marginalia.crawl.fetcher.DomainCookies;
import nu.marginalia.crawl.fetcher.warc.WarcRecorder;
import nu.marginalia.model.EdgeUrl;
import nu.marginalia.parquet.crawldata.CrawledDocumentParquetRecordFileReader;
import nu.marginalia.parquet.crawldata.CrawledDocumentParquetRecordFileWriter;
import org.apache.hc.client5.http.classic.HttpClient;
import org.apache.hc.client5.http.classic.methods.HttpGet;
import org.apache.hc.client5.http.cookie.BasicCookieStore;
import org.apache.hc.client5.http.impl.classic.HttpClients;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
@@ -41,7 +41,7 @@ class WarcRecorderTest {
fileNameWarc = Files.createTempFile("test", ".warc");
fileNameParquet = Files.createTempFile("test", ".parquet");
client = new WarcRecorder(fileNameWarc, new BasicCookieStore());
client = new WarcRecorder(fileNameWarc);
}
@AfterEach
@@ -56,11 +56,8 @@ class WarcRecorderTest {
HttpGet request = new HttpGet("https://www.marginalia.nu/");
request.addHeader("User-agent", "test.marginalia.nu");
request.addHeader("Accept-Encoding", "gzip");
client.fetch(httpClient, request);
client.fetch(httpClient,
request
);
client.fetch(httpClient, new DomainCookies(), request);
Map<String, String> sampleData = new HashMap<>();
try (var warcReader = new WarcReader(fileNameWarc)) {
@@ -81,8 +78,9 @@ class WarcRecorderTest {
@Test
public void flagAsSkipped() throws IOException, URISyntaxException {
try (var recorder = new WarcRecorder(fileNameWarc, new BasicCookieStore())) {
try (var recorder = new WarcRecorder(fileNameWarc)) {
recorder.writeReferenceCopy(new EdgeUrl("https://www.marginalia.nu/"),
new DomainCookies(),
"text/html",
200,
"<?doctype html><html><body>test</body></html>".getBytes(),
@@ -105,8 +103,9 @@ class WarcRecorderTest {
@Test
public void flagAsSkippedNullBody() throws IOException, URISyntaxException {
try (var recorder = new WarcRecorder(fileNameWarc, new BasicCookieStore())) {
try (var recorder = new WarcRecorder(fileNameWarc)) {
recorder.writeReferenceCopy(new EdgeUrl("https://www.marginalia.nu/"),
new DomainCookies(),
"text/html",
200,
null,
@@ -117,8 +116,9 @@ class WarcRecorderTest {
@Test
public void testSaveImport() throws URISyntaxException, IOException {
try (var recorder = new WarcRecorder(fileNameWarc, new BasicCookieStore())) {
try (var recorder = new WarcRecorder(fileNameWarc)) {
recorder.writeReferenceCopy(new EdgeUrl("https://www.marginalia.nu/"),
new DomainCookies(),
"text/html",
200,
"<?doctype html><html><body>test</body></html>".getBytes(),
@@ -145,19 +145,19 @@ class WarcRecorderTest {
request1.addHeader("User-agent", "test.marginalia.nu");
request1.addHeader("Accept-Encoding", "gzip");
client.fetch(httpClient, request1);
client.fetch(httpClient, new DomainCookies(), request1);
HttpGet request2 = new HttpGet("https://www.marginalia.nu/log/");
request2.addHeader("User-agent", "test.marginalia.nu");
request2.addHeader("Accept-Encoding", "gzip");
client.fetch(httpClient, request2);
client.fetch(httpClient, new DomainCookies(), request2);
HttpGet request3 = new HttpGet("https://www.marginalia.nu/sanic.png");
request3.addHeader("User-agent", "test.marginalia.nu");
request3.addHeader("Accept-Encoding", "gzip");
client.fetch(httpClient, request3);
client.fetch(httpClient, new DomainCookies(), request3);
CrawledDocumentParquetRecordFileWriter.convertWarc(
"www.marginalia.nu",

View File

@@ -1,6 +1,7 @@
package nu.marginalia.crawling;
import nu.marginalia.crawl.fetcher.ContentTags;
import nu.marginalia.crawl.fetcher.DomainCookies;
import nu.marginalia.crawl.fetcher.HttpFetcher;
import nu.marginalia.crawl.fetcher.HttpFetcherImpl;
import nu.marginalia.crawl.fetcher.warc.WarcRecorder;
@@ -31,7 +32,7 @@ class HttpFetcherTest {
void fetchUTF8() throws Exception {
var fetcher = new HttpFetcherImpl("nu.marginalia.edge-crawler");
try (var recorder = new WarcRecorder()) {
var result = fetcher.fetchContent(new EdgeUrl("https://www.marginalia.nu"), recorder, new CrawlDelayTimer(100), ContentTags.empty(), HttpFetcher.ProbeType.FULL);
var result = fetcher.fetchContent(new EdgeUrl("https://www.marginalia.nu"), recorder, new DomainCookies(), new CrawlDelayTimer(100), ContentTags.empty(), HttpFetcher.ProbeType.FULL);
if (DocumentBodyExtractor.asString(result) instanceof DocumentBodyResult.Ok bodyOk) {
System.out.println(bodyOk.contentType());
}
@@ -49,7 +50,7 @@ class HttpFetcherTest {
var fetcher = new HttpFetcherImpl("nu.marginalia.edge-crawler");
try (var recorder = new WarcRecorder()) {
var result = fetcher.fetchContent(new EdgeUrl("https://www.marginalia.nu/robots.txt"), recorder, new CrawlDelayTimer(100), ContentTags.empty(), HttpFetcher.ProbeType.FULL);
var result = fetcher.fetchContent(new EdgeUrl("https://www.marginalia.nu/robots.txt"), recorder, new DomainCookies(), new CrawlDelayTimer(100), ContentTags.empty(), HttpFetcher.ProbeType.FULL);
if (DocumentBodyExtractor.asString(result) instanceof DocumentBodyResult.Ok bodyOk) {
System.out.println(bodyOk.contentType());
}

View File

@@ -3,10 +3,7 @@ package nu.marginalia.crawling.retreival;
import crawlercommons.robots.SimpleRobotRules;
import nu.marginalia.crawl.CrawlerMain;
import nu.marginalia.crawl.DomainStateDb;
import nu.marginalia.crawl.fetcher.ContentTags;
import nu.marginalia.crawl.fetcher.HttpFetcher;
import nu.marginalia.crawl.fetcher.HttpFetcherImpl;
import nu.marginalia.crawl.fetcher.SitemapRetriever;
import nu.marginalia.crawl.fetcher.*;
import nu.marginalia.crawl.fetcher.warc.WarcRecorder;
import nu.marginalia.crawl.retreival.CrawlDelayTimer;
import nu.marginalia.crawl.retreival.CrawlerRetreiver;
@@ -137,7 +134,7 @@ public class CrawlerMockFetcherTest {
}
@Override
public HttpFetchResult fetchContent(EdgeUrl url, WarcRecorder recorder, CrawlDelayTimer timer, ContentTags tags, ProbeType probeType) {
public HttpFetchResult fetchContent(EdgeUrl url, WarcRecorder recorder, DomainCookies cookies, CrawlDelayTimer timer, ContentTags tags, ProbeType probeType) {
logger.info("Fetching {}", url);
if (mockData.containsKey(url)) {
byte[] bodyBytes = mockData.get(url).documentBodyBytes;

View File

@@ -16,7 +16,6 @@ import nu.marginalia.model.crawldata.CrawledDocument;
import nu.marginalia.model.crawldata.CrawledDomain;
import nu.marginalia.model.crawldata.SerializableCrawlData;
import nu.marginalia.slop.SlopCrawlDataRecord;
import org.apache.hc.client5.http.cookie.BasicCookieStore;
import org.jetbrains.annotations.NotNull;
import org.junit.jupiter.api.*;
import org.netpreserve.jwarc.*;
@@ -180,7 +179,7 @@ class CrawlerRetreiverTest {
new EdgeDomain("www.marginalia.nu"),
List.of(), 100);
var resync = new CrawlerWarcResynchronizer(revisitCrawlFrontier,
new WarcRecorder(tempFileWarc2, new BasicCookieStore())
new WarcRecorder(tempFileWarc2)
);
// truncate the size of the file to simulate a crash
@@ -456,7 +455,7 @@ class CrawlerRetreiverTest {
List.of(), 100);
var resync = new CrawlerWarcResynchronizer(revisitCrawlFrontier,
new WarcRecorder(tempFileWarc3, new BasicCookieStore())
new WarcRecorder(tempFileWarc3)
);
// truncate the size of the file to simulate a crash
@@ -507,7 +506,7 @@ class CrawlerRetreiverTest {
}
private void doCrawlWithReferenceStream(CrawlerMain.CrawlSpecRecord specs, CrawlDataReference reference) {
try (var recorder = new WarcRecorder(tempFileWarc2, new BasicCookieStore());
try (var recorder = new WarcRecorder(tempFileWarc2);
var db = new DomainStateDb(tempFileDb)
) {
new CrawlerRetreiver(httpFetcher, new DomainProber(d -> true), specs, db, recorder).crawlDomain(new DomainLinks(), reference);
@@ -519,7 +518,7 @@ class CrawlerRetreiverTest {
@NotNull
private DomainCrawlFrontier doCrawl(Path tempFileWarc1, CrawlerMain.CrawlSpecRecord specs) {
try (var recorder = new WarcRecorder(tempFileWarc1, new BasicCookieStore());
try (var recorder = new WarcRecorder(tempFileWarc1);
var db = new DomainStateDb(tempFileDb)
) {
var crawler = new CrawlerRetreiver(httpFetcher, new DomainProber(d -> true), specs, db, recorder);

View File

@@ -10,6 +10,7 @@ import nu.marginalia.api.searchquery.model.results.PrototypeRankingParameters;
import nu.marginalia.converting.processor.DomainProcessor;
import nu.marginalia.converting.writer.ConverterBatchWriter;
import nu.marginalia.crawl.fetcher.ContentTags;
import nu.marginalia.crawl.fetcher.DomainCookies;
import nu.marginalia.crawl.fetcher.HttpFetcherImpl;
import nu.marginalia.crawl.fetcher.warc.WarcRecorder;
import nu.marginalia.functions.searchquery.QueryFactory;
@@ -43,7 +44,6 @@ import nu.marginalia.process.control.FakeProcessHeartbeat;
import nu.marginalia.storage.FileStorageService;
import nu.marginalia.test.IntegrationTestModule;
import nu.marginalia.test.TestUtil;
import org.apache.hc.client5.http.cookie.BasicCookieStore;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
@@ -121,11 +121,12 @@ public class IntegrationTest {
public void run() throws Exception {
/** CREATE WARC */
try (WarcRecorder warcRecorder = new WarcRecorder(warcData, new BasicCookieStore())) {
try (WarcRecorder warcRecorder = new WarcRecorder(warcData)) {
warcRecorder.writeWarcinfoHeader("127.0.0.1", new EdgeDomain("www.example.com"),
new HttpFetcherImpl.DomainProbeResult.Ok(new EdgeUrl("https://www.example.com/")));
warcRecorder.writeReferenceCopy(new EdgeUrl("https://www.example.com/"),
new DomainCookies(),
"text/html", 200,
"""
<html>