mirror of
https://github.com/MarginaliaSearch/MarginaliaSearch.git
synced 2025-10-05 21:22:39 +02:00
Compare commits
20 Commits
deploy-010
...
deploy-011
Author | SHA1 | Date | |
---|---|---|---|
|
d1ec909b36 | ||
|
c67c5bbf42 | ||
|
ecb0e57a1a | ||
|
8c61f61b46 | ||
|
662a18c933 | ||
|
1c2426a052 | ||
|
34df7441ac | ||
|
5387e2bd80 | ||
|
0f3b24d0f8 | ||
|
a732095d2a | ||
|
6607f0112f | ||
|
4913730de9 | ||
|
1db64f9d56 | ||
|
4dcff14498 | ||
|
426658f64e | ||
|
2181b22f05 | ||
|
42bd79a609 | ||
|
b91c1e528a | ||
|
b1130d7a04 | ||
|
8364bcdc97 |
@@ -43,12 +43,11 @@ subprojects.forEach {it ->
|
||||
}
|
||||
|
||||
ext {
|
||||
jvmVersion=23
|
||||
dockerImageBase='container-registry.oracle.com/graalvm/jdk:23'
|
||||
jvmVersion = 24
|
||||
dockerImageBase='container-registry.oracle.com/graalvm/jdk:24'
|
||||
dockerImageTag='latest'
|
||||
dockerImageRegistry='marginalia'
|
||||
jibVersion = '3.4.4'
|
||||
|
||||
}
|
||||
|
||||
idea {
|
||||
|
@@ -14,7 +14,7 @@ public class EdgeDomain implements Serializable {
|
||||
@Nonnull
|
||||
public final String topDomain;
|
||||
|
||||
public EdgeDomain(String host) {
|
||||
public EdgeDomain(@Nonnull String host) {
|
||||
Objects.requireNonNull(host, "domain name must not be null");
|
||||
|
||||
host = host.toLowerCase();
|
||||
@@ -61,6 +61,10 @@ public class EdgeDomain implements Serializable {
|
||||
this.topDomain = topDomain;
|
||||
}
|
||||
|
||||
public static String getTopDomain(String host) {
|
||||
return new EdgeDomain(host).topDomain;
|
||||
}
|
||||
|
||||
private boolean looksLikeGovTld(String host) {
|
||||
if (host.length() < 8)
|
||||
return false;
|
||||
@@ -116,24 +120,6 @@ public class EdgeDomain implements Serializable {
|
||||
return topDomain.substring(0, cutPoint).toLowerCase();
|
||||
}
|
||||
|
||||
public String getLongDomainKey() {
|
||||
StringBuilder ret = new StringBuilder();
|
||||
|
||||
int cutPoint = topDomain.indexOf('.');
|
||||
if (cutPoint < 0) {
|
||||
ret.append(topDomain);
|
||||
} else {
|
||||
ret.append(topDomain, 0, cutPoint);
|
||||
}
|
||||
|
||||
if (!subDomain.isEmpty() && !"www".equals(subDomain)) {
|
||||
ret.append(":");
|
||||
ret.append(subDomain);
|
||||
}
|
||||
|
||||
return ret.toString().toLowerCase();
|
||||
}
|
||||
|
||||
/** If possible, try to provide an alias domain,
|
||||
* i.e. a domain name that is very likely to link to this one
|
||||
* */
|
||||
|
@@ -25,7 +25,7 @@ import static org.mockito.Mockito.when;
|
||||
class ZkServiceRegistryTest {
|
||||
private static final int ZOOKEEPER_PORT = 2181;
|
||||
private static final GenericContainer<?> zookeeper =
|
||||
new GenericContainer<>("zookeeper:3.8.0")
|
||||
new GenericContainer<>("zookeeper:3.8")
|
||||
.withExposedPorts(ZOOKEEPER_PORT);
|
||||
|
||||
List<ZkServiceRegistry> registries = new ArrayList<>();
|
||||
|
@@ -21,6 +21,10 @@ public class FaviconGrpcService extends FaviconAPIGrpc.FaviconAPIImplBase implem
|
||||
this.domainStateDb = domainStateDb;
|
||||
}
|
||||
|
||||
public boolean shouldRegisterService() {
|
||||
return domainStateDb.isAvailable();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void getFavicon(RpcFaviconRequest request, StreamObserver<RpcFaviconResponse> responseObserver) {
|
||||
Optional<DomainStateDb.FaviconRecord> icon = domainStateDb.getIcon(request.getDomain());
|
||||
|
@@ -23,16 +23,33 @@ public class SimpleBlockingThreadPool {
|
||||
private final Logger logger = LoggerFactory.getLogger(SimpleBlockingThreadPool.class);
|
||||
|
||||
public SimpleBlockingThreadPool(String name, int poolSize, int queueSize) {
|
||||
this(name, poolSize, queueSize, ThreadType.PLATFORM);
|
||||
}
|
||||
|
||||
public SimpleBlockingThreadPool(String name, int poolSize, int queueSize, ThreadType threadType) {
|
||||
tasks = new ArrayBlockingQueue<>(queueSize);
|
||||
|
||||
for (int i = 0; i < poolSize; i++) {
|
||||
Thread worker = new Thread(this::worker, name + "[" + i + "]");
|
||||
worker.setDaemon(true);
|
||||
worker.start();
|
||||
|
||||
Thread.Builder threadBuilder = switch (threadType) {
|
||||
case VIRTUAL -> Thread.ofVirtual();
|
||||
case PLATFORM -> Thread.ofPlatform().daemon(true);
|
||||
};
|
||||
|
||||
Thread worker = threadBuilder
|
||||
.name(name + "[" + i + "]")
|
||||
.start(this::worker);
|
||||
|
||||
workers.add(worker);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public enum ThreadType {
|
||||
VIRTUAL,
|
||||
PLATFORM
|
||||
}
|
||||
|
||||
public void submit(Task task) throws InterruptedException {
|
||||
tasks.put(task);
|
||||
}
|
||||
|
@@ -41,10 +41,7 @@ import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.StandardCopyOption;
|
||||
import java.security.Security;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
@@ -106,9 +103,18 @@ public class CrawlerMain extends ProcessMainClass {
|
||||
this.blacklist = blacklist;
|
||||
this.node = processConfiguration.node();
|
||||
|
||||
SimpleBlockingThreadPool.ThreadType threadType;
|
||||
if (Boolean.getBoolean("crawler.useVirtualThreads")) {
|
||||
threadType = SimpleBlockingThreadPool.ThreadType.VIRTUAL;
|
||||
}
|
||||
else {
|
||||
threadType = SimpleBlockingThreadPool.ThreadType.PLATFORM;
|
||||
}
|
||||
|
||||
pool = new SimpleBlockingThreadPool("CrawlerPool",
|
||||
Integer.getInteger("crawler.poolSize", 256),
|
||||
1);
|
||||
1,
|
||||
threadType);
|
||||
|
||||
|
||||
// Wait for the blacklist to be loaded before starting the crawl
|
||||
@@ -224,10 +230,7 @@ public class CrawlerMain extends ProcessMainClass {
|
||||
|
||||
logger.info("Loaded {} domains", crawlSpecRecords.size());
|
||||
|
||||
// Shuffle the domains to ensure we get a good mix of domains in each crawl,
|
||||
// so that e.g. the big domains don't get all crawled at once, or we end up
|
||||
// crawling the same server in parallel from different subdomains...
|
||||
Collections.shuffle(crawlSpecRecords);
|
||||
crawlSpecRecords.sort(crawlSpecArrangement(crawlSpecRecords));
|
||||
|
||||
// First a validation run to ensure the file is all good to parse
|
||||
if (crawlSpecRecords.isEmpty()) {
|
||||
@@ -306,6 +309,30 @@ public class CrawlerMain extends ProcessMainClass {
|
||||
}
|
||||
}
|
||||
|
||||
/** Create a comparator that sorts the crawl specs in a way that is beneficial for the crawl,
|
||||
* we want to enqueue domains that have common top domains first, but otherwise have a random
|
||||
* order.
|
||||
* <p></p>
|
||||
* Note, we can't use hash codes for randomization as it is not desirable to have the same order
|
||||
* every time the process is restarted (and CrawlSpecRecord is a record, which defines equals and
|
||||
* hashcode based on the fields).
|
||||
* */
|
||||
private Comparator<CrawlSpecRecord> crawlSpecArrangement(List<CrawlSpecRecord> records) {
|
||||
Random r = new Random();
|
||||
Map<String, Integer> topDomainCounts = new HashMap<>(4 + (int) Math.sqrt(records.size()));
|
||||
Map<String, Integer> randomOrder = new HashMap<>(records.size());
|
||||
|
||||
for (var spec : records) {
|
||||
topDomainCounts.merge(EdgeDomain.getTopDomain(spec.domain), 1, Integer::sum);
|
||||
randomOrder.put(spec.domain, r.nextInt());
|
||||
}
|
||||
|
||||
return Comparator.comparing((CrawlSpecRecord spec) -> topDomainCounts.getOrDefault(EdgeDomain.getTopDomain(spec.domain), 0) >= 8)
|
||||
.reversed()
|
||||
.thenComparing(spec -> randomOrder.get(spec.domain))
|
||||
.thenComparing(Record::hashCode); // non-deterministic tie-breaker to
|
||||
}
|
||||
|
||||
/** Submit a task for execution if it can be run, returns true if it was submitted
|
||||
* or if it can be discarded */
|
||||
private boolean trySubmitDeferredTask(CrawlTask task) {
|
||||
|
@@ -11,6 +11,7 @@ import java.nio.file.Path;
|
||||
import java.sql.Connection;
|
||||
import java.sql.DriverManager;
|
||||
import java.sql.SQLException;
|
||||
import java.time.Duration;
|
||||
import java.time.Instant;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
@@ -24,6 +25,17 @@ public class DomainStateDb implements AutoCloseable {
|
||||
|
||||
private final Connection connection;
|
||||
|
||||
|
||||
public record CrawlMeta(
|
||||
String domainName,
|
||||
Instant lastFullCrawl,
|
||||
Duration recrawlTime,
|
||||
Duration crawlTime,
|
||||
int recrawlErrors,
|
||||
int crawlChanges,
|
||||
int totalCrawlSize
|
||||
) {}
|
||||
|
||||
public record SummaryRecord(
|
||||
String domainName,
|
||||
Instant lastUpdated,
|
||||
@@ -79,11 +91,16 @@ public class DomainStateDb implements AutoCloseable {
|
||||
return fs.asPath().resolve("domainstate.db");
|
||||
}
|
||||
else {
|
||||
throw new SQLException("Could not find crawl data storage");
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
public DomainStateDb(Path filename) throws SQLException {
|
||||
public DomainStateDb(@Nullable Path filename) throws SQLException {
|
||||
if (null == filename) {
|
||||
connection = null;
|
||||
return;
|
||||
}
|
||||
|
||||
String sqliteDbString = "jdbc:sqlite:" + filename.toString();
|
||||
connection = DriverManager.getConnection(sqliteDbString);
|
||||
|
||||
@@ -97,6 +114,17 @@ public class DomainStateDb implements AutoCloseable {
|
||||
feedUrl TEXT
|
||||
)
|
||||
""");
|
||||
stmt.executeUpdate("""
|
||||
CREATE TABLE IF NOT EXISTS crawl_meta (
|
||||
domain TEXT PRIMARY KEY,
|
||||
lastFullCrawlEpochMs LONG NOT NULL,
|
||||
recrawlTimeMs LONG NOT NULL,
|
||||
recrawlErrors INTEGER NOT NULL,
|
||||
crawlTimeMs LONG NOT NULL,
|
||||
crawlChanges INTEGER NOT NULL,
|
||||
totalCrawlSize INTEGER NOT NULL
|
||||
)
|
||||
""");
|
||||
stmt.executeUpdate("""
|
||||
CREATE TABLE IF NOT EXISTS favicon (
|
||||
domain TEXT PRIMARY KEY,
|
||||
@@ -110,11 +138,18 @@ public class DomainStateDb implements AutoCloseable {
|
||||
|
||||
@Override
|
||||
public void close() throws SQLException {
|
||||
connection.close();
|
||||
if (connection != null) {
|
||||
connection.close();
|
||||
}
|
||||
}
|
||||
|
||||
public boolean isAvailable() {
|
||||
return connection != null;
|
||||
}
|
||||
|
||||
public void saveIcon(String domain, FaviconRecord faviconRecord) {
|
||||
if (connection == null) throw new IllegalStateException("No connection to domainstate db");
|
||||
|
||||
try (var stmt = connection.prepareStatement("""
|
||||
INSERT OR REPLACE INTO favicon (domain, contentType, icon)
|
||||
VALUES(?, ?, ?)
|
||||
@@ -130,6 +165,9 @@ public class DomainStateDb implements AutoCloseable {
|
||||
}
|
||||
|
||||
public Optional<FaviconRecord> getIcon(String domain) {
|
||||
if (connection == null)
|
||||
return Optional.empty();
|
||||
|
||||
try (var stmt = connection.prepareStatement("SELECT contentType, icon FROM favicon WHERE DOMAIN = ?")) {
|
||||
stmt.setString(1, domain);
|
||||
var rs = stmt.executeQuery();
|
||||
@@ -149,7 +187,29 @@ public class DomainStateDb implements AutoCloseable {
|
||||
return Optional.empty();
|
||||
}
|
||||
|
||||
public void save(CrawlMeta crawlMeta) {
|
||||
if (connection == null) throw new IllegalStateException("No connection to domainstate db");
|
||||
|
||||
try (var stmt = connection.prepareStatement("""
|
||||
INSERT OR REPLACE INTO crawl_meta (domain, lastFullCrawlEpochMs, recrawlTimeMs, recrawlErrors, crawlTimeMs, crawlChanges, totalCrawlSize)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||
""")) {
|
||||
stmt.setString(1, crawlMeta.domainName());
|
||||
stmt.setLong(2, crawlMeta.lastFullCrawl.toEpochMilli());
|
||||
stmt.setLong(3, crawlMeta.recrawlTime.toMillis());
|
||||
stmt.setInt(4, crawlMeta.recrawlErrors);
|
||||
stmt.setLong(5, crawlMeta.crawlTime.toMillis());
|
||||
stmt.setInt(6, crawlMeta.crawlChanges);
|
||||
stmt.setInt(7, crawlMeta.totalCrawlSize);
|
||||
stmt.executeUpdate();
|
||||
} catch (SQLException e) {
|
||||
logger.error("Failed to insert crawl meta record", e);
|
||||
}
|
||||
}
|
||||
|
||||
public void save(SummaryRecord record) {
|
||||
if (connection == null) throw new IllegalStateException("No connection to domainstate db");
|
||||
|
||||
try (var stmt = connection.prepareStatement("""
|
||||
INSERT OR REPLACE INTO summary (domain, lastUpdatedEpochMs, state, stateDesc, feedUrl)
|
||||
VALUES (?, ?, ?, ?, ?)
|
||||
@@ -165,7 +225,38 @@ public class DomainStateDb implements AutoCloseable {
|
||||
}
|
||||
}
|
||||
|
||||
public Optional<SummaryRecord> get(String domainName) {
|
||||
public Optional<CrawlMeta> getMeta(String domainName) {
|
||||
if (connection == null)
|
||||
return Optional.empty();
|
||||
|
||||
try (var stmt = connection.prepareStatement("""
|
||||
SELECT domain, lastFullCrawlEpochMs, recrawlTimeMs, recrawlErrors, crawlTimeMs, crawlChanges, totalCrawlSize
|
||||
FROM crawl_meta
|
||||
WHERE domain = ?
|
||||
""")) {
|
||||
stmt.setString(1, domainName);
|
||||
var rs = stmt.executeQuery();
|
||||
if (rs.next()) {
|
||||
return Optional.of(new CrawlMeta(
|
||||
rs.getString("domain"),
|
||||
Instant.ofEpochMilli(rs.getLong("lastFullCrawlEpochMs")),
|
||||
Duration.ofMillis(rs.getLong("recrawlTimeMs")),
|
||||
Duration.ofMillis(rs.getLong("crawlTimeMs")),
|
||||
rs.getInt("recrawlErrors"),
|
||||
rs.getInt("crawlChanges"),
|
||||
rs.getInt("totalCrawlSize")
|
||||
));
|
||||
}
|
||||
} catch (SQLException ex) {
|
||||
logger.error("Failed to get crawl meta record", ex);
|
||||
}
|
||||
return Optional.empty();
|
||||
}
|
||||
|
||||
public Optional<SummaryRecord> getSummary(String domainName) {
|
||||
if (connection == null)
|
||||
return Optional.empty();
|
||||
|
||||
try (var stmt = connection.prepareStatement("""
|
||||
SELECT domain, lastUpdatedEpochMs, state, stateDesc, feedUrl
|
||||
FROM summary
|
||||
|
@@ -29,7 +29,9 @@ import java.net.http.HttpResponse;
|
||||
import java.net.http.HttpTimeoutException;
|
||||
import java.time.Duration;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.Semaphore;
|
||||
import java.util.zip.GZIPInputStream;
|
||||
|
||||
|
||||
@@ -55,12 +57,22 @@ public class HttpFetcherImpl implements HttpFetcher {
|
||||
private final HttpClient client;
|
||||
|
||||
private HttpClient createClient() {
|
||||
final ExecutorService executorService;
|
||||
|
||||
if (Boolean.getBoolean("crawler.httpclient.useVirtualThreads")) {
|
||||
executorService = Executors.newVirtualThreadPerTaskExecutor();
|
||||
}
|
||||
else {
|
||||
executorService = Executors.newCachedThreadPool();
|
||||
}
|
||||
|
||||
return HttpClient.newBuilder()
|
||||
.sslContext(NoSecuritySSL.buildSslContext())
|
||||
.cookieHandler(cookies)
|
||||
.followRedirects(HttpClient.Redirect.NORMAL)
|
||||
.version(HttpClient.Version.HTTP_1_1)
|
||||
.connectTimeout(Duration.ofSeconds(8))
|
||||
.executor(Executors.newCachedThreadPool())
|
||||
.executor(executorService)
|
||||
.build();
|
||||
}
|
||||
|
||||
@@ -116,7 +128,7 @@ public class HttpFetcherImpl implements HttpFetcher {
|
||||
|
||||
for (int tries = 0;; tries++) {
|
||||
try {
|
||||
var rsp = client.send(head, HttpResponse.BodyHandlers.discarding());
|
||||
var rsp = SendLock.wrapSend(client, head, HttpResponse.BodyHandlers.discarding());
|
||||
EdgeUrl rspUri = new EdgeUrl(rsp.uri());
|
||||
|
||||
if (!Objects.equals(rspUri.domain, url.domain)) {
|
||||
@@ -153,7 +165,7 @@ public class HttpFetcherImpl implements HttpFetcher {
|
||||
.timeout(requestTimeout)
|
||||
;
|
||||
|
||||
var rsp = client.send(headBuilder.build(), HttpResponse.BodyHandlers.discarding());
|
||||
var rsp = SendLock.wrapSend(client, headBuilder.build(), HttpResponse.BodyHandlers.discarding());
|
||||
var headers = rsp.headers();
|
||||
|
||||
var contentTypeHeader = headers.firstValue("Content-Type").orElse(null);
|
||||
@@ -229,21 +241,24 @@ public class HttpFetcherImpl implements HttpFetcher {
|
||||
|
||||
contentTags.paint(getBuilder);
|
||||
|
||||
HttpFetchResult result = warcRecorder.fetch(client, getBuilder.build());
|
||||
try (var sl = new SendLock()) {
|
||||
HttpFetchResult result = warcRecorder.fetch(client, getBuilder.build());
|
||||
|
||||
if (result instanceof HttpFetchResult.ResultOk ok) {
|
||||
if (ok.statusCode() == 429) {
|
||||
throw new RateLimitException(Objects.requireNonNullElse(ok.header("Retry-After"), "1"));
|
||||
}
|
||||
if (ok.statusCode() == 304) {
|
||||
return new HttpFetchResult.Result304Raw();
|
||||
}
|
||||
if (ok.statusCode() == 200) {
|
||||
return ok;
|
||||
if (result instanceof HttpFetchResult.ResultOk ok) {
|
||||
if (ok.statusCode() == 429) {
|
||||
throw new RateLimitException(Objects.requireNonNullElse(ok.header("Retry-After"), "1"));
|
||||
}
|
||||
if (ok.statusCode() == 304) {
|
||||
return new HttpFetchResult.Result304Raw();
|
||||
}
|
||||
if (ok.statusCode() == 200) {
|
||||
return ok;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -317,22 +332,28 @@ public class HttpFetcherImpl implements HttpFetcher {
|
||||
.timeout(requestTimeout)
|
||||
.build();
|
||||
|
||||
var response = client.send(getRequest, HttpResponse.BodyHandlers.ofInputStream());
|
||||
if (response.statusCode() != 200) {
|
||||
return new SitemapResult.SitemapError();
|
||||
}
|
||||
|
||||
try (InputStream inputStream = response.body()) {
|
||||
|
||||
InputStream parserStream;
|
||||
if (sitemapUrl.path.endsWith(".gz")) {
|
||||
parserStream = new GZIPInputStream(inputStream);
|
||||
}
|
||||
else {
|
||||
parserStream = inputStream;
|
||||
try (var sl = new SendLock()) {
|
||||
var response = client.send(getRequest, HttpResponse.BodyHandlers.ofInputStream());
|
||||
if (response.statusCode() != 200) {
|
||||
return new SitemapResult.SitemapError();
|
||||
}
|
||||
|
||||
Document parsedSitemap;
|
||||
|
||||
try (InputStream inputStream = response.body()) {
|
||||
InputStream parserStream;
|
||||
if (sitemapUrl.path.endsWith(".gz")) {
|
||||
parserStream = new GZIPInputStream(inputStream);
|
||||
} else {
|
||||
parserStream = inputStream;
|
||||
}
|
||||
|
||||
parsedSitemap = Jsoup.parse(parserStream, "UTF-8", sitemapUrl.toString(), Parser.xmlParser());
|
||||
}
|
||||
finally {
|
||||
sl.close();
|
||||
}
|
||||
|
||||
Document parsedSitemap = Jsoup.parse(parserStream, "UTF-8", sitemapUrl.toString(), Parser.xmlParser());
|
||||
if (parsedSitemap.childrenSize() == 0) {
|
||||
return new SitemapResult.SitemapError();
|
||||
}
|
||||
@@ -386,7 +407,7 @@ public class HttpFetcherImpl implements HttpFetcher {
|
||||
}
|
||||
|
||||
private Optional<SimpleRobotRules> fetchAndParseRobotsTxt(EdgeUrl url, WarcRecorder recorder) {
|
||||
try {
|
||||
try (var sl = new SendLock()) {
|
||||
var getRequest = HttpRequest.newBuilder()
|
||||
.GET()
|
||||
.uri(url.asURI())
|
||||
@@ -429,5 +450,30 @@ public class HttpFetcherImpl implements HttpFetcher {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
class SendLock implements AutoCloseable {
|
||||
|
||||
private static final Semaphore maxConcurrentRequests = new Semaphore(Integer.getInteger("crawler.maxConcurrentRequests", 512));
|
||||
boolean closed = false;
|
||||
|
||||
public SendLock() {
|
||||
maxConcurrentRequests.acquireUninterruptibly();
|
||||
}
|
||||
|
||||
public static <T> HttpResponse<T> wrapSend(HttpClient client, HttpRequest request, HttpResponse.BodyHandler<T> handler) throws IOException, InterruptedException {
|
||||
try (var lock = new SendLock()) {
|
||||
return client.send(request, handler);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
if (!closed) {
|
||||
maxConcurrentRequests.release();
|
||||
closed = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -8,7 +8,10 @@ import java.net.http.HttpHeaders;
|
||||
import java.net.http.HttpResponse;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.time.Duration;
|
||||
import java.time.Instant;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.*;
|
||||
import java.util.zip.GZIPInputStream;
|
||||
|
||||
/** Input buffer for temporary storage of a HTTP response
|
||||
@@ -39,7 +42,7 @@ public abstract class WarcInputBuffer implements AutoCloseable {
|
||||
* and suppressed from the headers.
|
||||
* If an error occurs, a buffer will be created with no content and an error status.
|
||||
*/
|
||||
static WarcInputBuffer forResponse(HttpResponse<InputStream> rsp) {
|
||||
static WarcInputBuffer forResponse(HttpResponse<InputStream> rsp, Duration timeLimit) {
|
||||
if (rsp == null)
|
||||
return new ErrorBuffer();
|
||||
|
||||
@@ -51,11 +54,11 @@ public abstract class WarcInputBuffer implements AutoCloseable {
|
||||
|
||||
if (contentEncoding == null && contentLength > 0 && contentLength < 8192) {
|
||||
// If the content is small and not compressed, we can just read it into memory
|
||||
return new MemoryBuffer(headers, is, contentLength);
|
||||
return new MemoryBuffer(headers, timeLimit, is, contentLength);
|
||||
}
|
||||
else {
|
||||
// Otherwise, we unpack it into a file and read it from there
|
||||
return new FileBuffer(headers, is);
|
||||
return new FileBuffer(headers, timeLimit, is);
|
||||
}
|
||||
}
|
||||
catch (Exception ex) {
|
||||
@@ -64,9 +67,16 @@ public abstract class WarcInputBuffer implements AutoCloseable {
|
||||
|
||||
}
|
||||
|
||||
private static final ExecutorService virtualExecutorService = Executors.newVirtualThreadPerTaskExecutor();
|
||||
|
||||
private Future<Integer> readAsync(InputStream is, byte[] out) {
|
||||
return virtualExecutorService.submit(() -> is.read(out));
|
||||
}
|
||||
|
||||
/** Copy an input stream to an output stream, with a maximum size and time limit */
|
||||
protected void copy(InputStream is, OutputStream os) {
|
||||
long startTime = System.currentTimeMillis();
|
||||
protected void copy(InputStream is, OutputStream os, Duration timeLimit) {
|
||||
Instant start = Instant.now();
|
||||
Instant timeout = start.plus(timeLimit);
|
||||
long size = 0;
|
||||
|
||||
byte[] buffer = new byte[8192];
|
||||
@@ -76,7 +86,15 @@ public abstract class WarcInputBuffer implements AutoCloseable {
|
||||
|
||||
while (true) {
|
||||
try {
|
||||
int n = is.read(buffer);
|
||||
Duration remaining = Duration.between(Instant.now(), timeout);
|
||||
if (remaining.isNegative()) {
|
||||
truncationReason = WarcTruncationReason.TIME;
|
||||
break;
|
||||
}
|
||||
|
||||
Future<Integer> readAsync = readAsync(is, buffer);
|
||||
int n = readAsync.get(remaining.toMillis(), TimeUnit.MILLISECONDS);
|
||||
|
||||
if (n < 0) break;
|
||||
size += n;
|
||||
os.write(buffer, 0, n);
|
||||
@@ -85,12 +103,11 @@ public abstract class WarcInputBuffer implements AutoCloseable {
|
||||
truncationReason = WarcTruncationReason.LENGTH;
|
||||
break;
|
||||
}
|
||||
|
||||
if (System.currentTimeMillis() - startTime > WarcRecorder.MAX_TIME) {
|
||||
truncationReason = WarcTruncationReason.TIME;
|
||||
break;
|
||||
}
|
||||
} catch (IOException e) {
|
||||
} catch (IOException|ExecutionException e) {
|
||||
truncationReason = WarcTruncationReason.UNSPECIFIED;
|
||||
} catch (TimeoutException e) {
|
||||
truncationReason = WarcTruncationReason.TIME;
|
||||
} catch (InterruptedException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
@@ -123,12 +140,12 @@ class ErrorBuffer extends WarcInputBuffer {
|
||||
/** Buffer for when we have the response in memory */
|
||||
class MemoryBuffer extends WarcInputBuffer {
|
||||
byte[] data;
|
||||
public MemoryBuffer(HttpHeaders headers, InputStream responseStream, int size) {
|
||||
public MemoryBuffer(HttpHeaders headers, Duration timeLimit, InputStream responseStream, int size) {
|
||||
super(headers);
|
||||
|
||||
var outputStream = new ByteArrayOutputStream(size);
|
||||
|
||||
copy(responseStream, outputStream);
|
||||
copy(responseStream, outputStream, timeLimit);
|
||||
|
||||
data = outputStream.toByteArray();
|
||||
}
|
||||
@@ -152,7 +169,7 @@ class MemoryBuffer extends WarcInputBuffer {
|
||||
class FileBuffer extends WarcInputBuffer {
|
||||
private final Path tempFile;
|
||||
|
||||
public FileBuffer(HttpHeaders headers, InputStream responseStream) throws IOException {
|
||||
public FileBuffer(HttpHeaders headers, Duration timeLimit, InputStream responseStream) throws IOException {
|
||||
super(suppressContentEncoding(headers));
|
||||
|
||||
this.tempFile = Files.createTempFile("rsp", ".html");
|
||||
@@ -160,7 +177,7 @@ class FileBuffer extends WarcInputBuffer {
|
||||
|
||||
if ("gzip".equalsIgnoreCase(headers.firstValue("Content-Encoding").orElse(""))) {
|
||||
try (var out = Files.newOutputStream(tempFile)) {
|
||||
copy(new GZIPInputStream(responseStream), out);
|
||||
copy(new GZIPInputStream(responseStream), out, timeLimit);
|
||||
}
|
||||
catch (Exception ex) {
|
||||
truncationReason = WarcTruncationReason.UNSPECIFIED;
|
||||
@@ -168,7 +185,7 @@ class FileBuffer extends WarcInputBuffer {
|
||||
}
|
||||
else {
|
||||
try (var out = Files.newOutputStream(tempFile)) {
|
||||
copy(responseStream, out);
|
||||
copy(responseStream, out, timeLimit);
|
||||
}
|
||||
catch (Exception ex) {
|
||||
truncationReason = WarcTruncationReason.UNSPECIFIED;
|
||||
|
@@ -102,7 +102,7 @@ public class WarcRecorder implements AutoCloseable {
|
||||
}
|
||||
|
||||
|
||||
try (WarcInputBuffer inputBuffer = WarcInputBuffer.forResponse(response);
|
||||
try (WarcInputBuffer inputBuffer = WarcInputBuffer.forResponse(response, request.timeout().orElseGet(() -> Duration.ofMillis(MAX_TIME)));
|
||||
InputStream inputStream = inputBuffer.read())
|
||||
{
|
||||
if (cookies.hasCookies()) {
|
||||
|
@@ -3,6 +3,7 @@ package nu.marginalia.crawl.retreival;
|
||||
import nu.marginalia.crawl.fetcher.HttpFetcherImpl;
|
||||
|
||||
import java.time.Duration;
|
||||
import java.util.concurrent.ThreadLocalRandom;
|
||||
|
||||
import static java.lang.Math.max;
|
||||
import static java.lang.Math.min;
|
||||
@@ -53,12 +54,13 @@ public class CrawlDelayTimer {
|
||||
public void waitFetchDelay(long spentTime) {
|
||||
long sleepTime = delayTime;
|
||||
|
||||
long jitter = ThreadLocalRandom.current().nextLong(0, 150);
|
||||
try {
|
||||
if (sleepTime >= 1) {
|
||||
if (spentTime > sleepTime)
|
||||
return;
|
||||
|
||||
Thread.sleep(min(sleepTime - spentTime, 5000));
|
||||
Thread.sleep(min(sleepTime - spentTime, 5000) + jitter);
|
||||
} else {
|
||||
// When no crawl delay is specified, lean toward twice the fetch+process time,
|
||||
// within sane limits. This means slower servers get slower crawling, and faster
|
||||
@@ -71,17 +73,17 @@ public class CrawlDelayTimer {
|
||||
if (spentTime > sleepTime)
|
||||
return;
|
||||
|
||||
Thread.sleep(sleepTime - spentTime);
|
||||
Thread.sleep(sleepTime - spentTime + jitter);
|
||||
}
|
||||
|
||||
if (slowDown) {
|
||||
// Additional delay when the server is signalling it wants slower requests
|
||||
Thread.sleep(DEFAULT_CRAWL_DELAY_MIN_MS);
|
||||
Thread.sleep(DEFAULT_CRAWL_DELAY_MIN_MS + jitter);
|
||||
}
|
||||
}
|
||||
catch (InterruptedException e) {
|
||||
Thread.currentThread().interrupt();
|
||||
throw new RuntimeException();
|
||||
throw new RuntimeException("Interrupted", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -26,6 +26,8 @@ import java.io.IOException;
|
||||
import java.net.InetAddress;
|
||||
import java.net.UnknownHostException;
|
||||
import java.nio.file.Path;
|
||||
import java.time.Duration;
|
||||
import java.time.Instant;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
@@ -108,15 +110,24 @@ public class CrawlerRetreiver implements AutoCloseable {
|
||||
DomainStateDb.SummaryRecord summaryRecord = sniffRootDocument(probedUrl, delayTimer);
|
||||
domainStateDb.save(summaryRecord);
|
||||
|
||||
if (Thread.interrupted()) {
|
||||
// There's a small chance we're interrupted during the sniffing portion
|
||||
throw new InterruptedException();
|
||||
}
|
||||
|
||||
Instant recrawlStart = Instant.now();
|
||||
CrawlerRevisitor.RecrawlMetadata recrawlMetadata = crawlerRevisitor.recrawl(oldCrawlData, robotsRules, delayTimer);
|
||||
Duration recrawlTime = Duration.between(recrawlStart, Instant.now());
|
||||
|
||||
// Play back the old crawl data (if present) and fetch the documents comparing etags and last-modified
|
||||
if (crawlerRevisitor.recrawl(oldCrawlData, robotsRules, delayTimer) > 0) {
|
||||
if (recrawlMetadata.size() > 0) {
|
||||
// If we have reference data, we will always grow the crawl depth a bit
|
||||
crawlFrontier.increaseDepth(1.5, 2500);
|
||||
}
|
||||
|
||||
oldCrawlData.close(); // proactively close the crawl data reference here to not hold onto expensive resources
|
||||
|
||||
yield crawlDomain(probedUrl, robotsRules, delayTimer, domainLinks);
|
||||
yield crawlDomain(probedUrl, robotsRules, delayTimer, domainLinks, recrawlMetadata, recrawlTime);
|
||||
}
|
||||
case HttpFetcher.DomainProbeResult.Redirect(EdgeDomain domain1) -> {
|
||||
domainStateDb.save(DomainStateDb.SummaryRecord.forError(domain, "Redirect", domain1.toString()));
|
||||
@@ -138,8 +149,11 @@ public class CrawlerRetreiver implements AutoCloseable {
|
||||
private int crawlDomain(EdgeUrl rootUrl,
|
||||
SimpleRobotRules robotsRules,
|
||||
CrawlDelayTimer delayTimer,
|
||||
DomainLinks domainLinks) {
|
||||
DomainLinks domainLinks,
|
||||
CrawlerRevisitor.RecrawlMetadata recrawlMetadata,
|
||||
Duration recrawlTime) {
|
||||
|
||||
Instant crawlStart = Instant.now();
|
||||
|
||||
// Add external links to the crawl frontier
|
||||
crawlFrontier.addAllToQueue(domainLinks.getUrls(rootUrl.proto));
|
||||
@@ -149,6 +163,8 @@ public class CrawlerRetreiver implements AutoCloseable {
|
||||
crawlFrontier.addAllToQueue(fetcher.fetchSitemapUrls(sitemap, delayTimer));
|
||||
}
|
||||
|
||||
int crawlerAdditions = 0;
|
||||
|
||||
while (!crawlFrontier.isEmpty()
|
||||
&& !crawlFrontier.isCrawlDepthReached()
|
||||
&& errorCount < MAX_ERRORS
|
||||
@@ -180,7 +196,11 @@ public class CrawlerRetreiver implements AutoCloseable {
|
||||
continue;
|
||||
|
||||
try {
|
||||
fetchContentWithReference(top, delayTimer, DocumentWithReference.empty());
|
||||
var result = fetchContentWithReference(top, delayTimer, DocumentWithReference.empty());
|
||||
|
||||
if (result.isOk()) {
|
||||
crawlerAdditions++;
|
||||
}
|
||||
}
|
||||
catch (InterruptedException ex) {
|
||||
Thread.currentThread().interrupt();
|
||||
@@ -188,6 +208,17 @@ public class CrawlerRetreiver implements AutoCloseable {
|
||||
}
|
||||
}
|
||||
|
||||
Duration crawlTime = Duration.between(crawlStart, Instant.now());
|
||||
domainStateDb.save(new DomainStateDb.CrawlMeta(
|
||||
domain,
|
||||
Instant.now(),
|
||||
recrawlTime,
|
||||
crawlTime,
|
||||
recrawlMetadata.errors(),
|
||||
crawlerAdditions,
|
||||
recrawlMetadata.size() + crawlerAdditions
|
||||
));
|
||||
|
||||
return crawlFrontier.visitedSize();
|
||||
}
|
||||
|
||||
@@ -289,6 +320,10 @@ public class CrawlerRetreiver implements AutoCloseable {
|
||||
}
|
||||
catch (Exception ex) {
|
||||
logger.error("Error configuring link filter", ex);
|
||||
if (Thread.interrupted()) {
|
||||
Thread.currentThread().interrupt();
|
||||
return DomainStateDb.SummaryRecord.forError(domain, "Crawler Interrupted", ex.getMessage());
|
||||
}
|
||||
}
|
||||
finally {
|
||||
crawlFrontier.addVisited(rootUrl);
|
||||
@@ -316,7 +351,7 @@ public class CrawlerRetreiver implements AutoCloseable {
|
||||
);
|
||||
|
||||
private Optional<String> guessFeedUrl(CrawlDelayTimer timer) throws InterruptedException {
|
||||
var oldDomainStateRecord = domainStateDb.get(domain);
|
||||
var oldDomainStateRecord = domainStateDb.getSummary(domain);
|
||||
|
||||
// If we are already aware of an old feed URL, then we can just revalidate it
|
||||
if (oldDomainStateRecord.isPresent()) {
|
||||
|
@@ -31,7 +31,7 @@ public class CrawlerRevisitor {
|
||||
}
|
||||
|
||||
/** Performs a re-crawl of old documents, comparing etags and last-modified */
|
||||
public int recrawl(CrawlDataReference oldCrawlData,
|
||||
public RecrawlMetadata recrawl(CrawlDataReference oldCrawlData,
|
||||
SimpleRobotRules robotsRules,
|
||||
CrawlDelayTimer delayTimer)
|
||||
throws InterruptedException {
|
||||
@@ -39,6 +39,7 @@ public class CrawlerRevisitor {
|
||||
int retained = 0;
|
||||
int errors = 0;
|
||||
int skipped = 0;
|
||||
int size = 0;
|
||||
|
||||
for (CrawledDocument doc : oldCrawlData) {
|
||||
if (errors > 20) {
|
||||
@@ -46,6 +47,10 @@ public class CrawlerRevisitor {
|
||||
break;
|
||||
}
|
||||
|
||||
if (Thread.interrupted()) {
|
||||
throw new InterruptedException();
|
||||
}
|
||||
|
||||
var urlMaybe = EdgeUrl.parse(doc.url);
|
||||
if (urlMaybe.isEmpty())
|
||||
continue;
|
||||
@@ -78,6 +83,7 @@ public class CrawlerRevisitor {
|
||||
continue;
|
||||
}
|
||||
|
||||
size++;
|
||||
|
||||
double skipProb;
|
||||
|
||||
@@ -150,6 +156,8 @@ public class CrawlerRevisitor {
|
||||
}
|
||||
}
|
||||
|
||||
return recrawled;
|
||||
return new RecrawlMetadata(size, errors, skipped);
|
||||
}
|
||||
|
||||
public record RecrawlMetadata(int size, int errors, int skipped) {}
|
||||
}
|
||||
|
@@ -8,6 +8,7 @@ import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.sql.SQLException;
|
||||
import java.time.Duration;
|
||||
import java.time.Instant;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.*;
|
||||
@@ -47,8 +48,8 @@ class DomainStateDbTest {
|
||||
db.save(allFields);
|
||||
db.save(minFields);
|
||||
|
||||
assertEquals(allFields, db.get("all.marginalia.nu").orElseThrow());
|
||||
assertEquals(minFields, db.get("min.marginalia.nu").orElseThrow());
|
||||
assertEquals(allFields, db.getSummary("all.marginalia.nu").orElseThrow());
|
||||
assertEquals(minFields, db.getSummary("min.marginalia.nu").orElseThrow());
|
||||
|
||||
var updatedAllFields = new DomainStateDb.SummaryRecord(
|
||||
"all.marginalia.nu",
|
||||
@@ -59,7 +60,19 @@ class DomainStateDbTest {
|
||||
);
|
||||
|
||||
db.save(updatedAllFields);
|
||||
assertEquals(updatedAllFields, db.get("all.marginalia.nu").orElseThrow());
|
||||
assertEquals(updatedAllFields, db.getSummary("all.marginalia.nu").orElseThrow());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMetadata() throws SQLException {
|
||||
try (var db = new DomainStateDb(tempFile)) {
|
||||
var original = new DomainStateDb.CrawlMeta("example.com", Instant.ofEpochMilli(12345), Duration.ofMillis(30), Duration.ofMillis(300), 1, 2, 3);
|
||||
db.save(original);
|
||||
|
||||
var maybeMeta = db.getMeta("example.com");
|
||||
assertTrue(maybeMeta.isPresent());
|
||||
assertEquals(original, maybeMeta.get());
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -0,0 +1,149 @@
|
||||
package nu.marginalia.crawl.retreival.fetcher;
|
||||
|
||||
import com.sun.net.httpserver.HttpServer;
|
||||
import nu.marginalia.crawl.fetcher.Cookies;
|
||||
import nu.marginalia.crawl.fetcher.warc.WarcRecorder;
|
||||
import org.junit.jupiter.api.*;
|
||||
import org.netpreserve.jwarc.WarcReader;
|
||||
import org.netpreserve.jwarc.WarcRequest;
|
||||
import org.netpreserve.jwarc.WarcResponse;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.InetSocketAddress;
|
||||
import java.net.URISyntaxException;
|
||||
import java.net.http.HttpClient;
|
||||
import java.net.http.HttpRequest;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.time.Duration;
|
||||
import java.time.Instant;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
@Tag("slow")
|
||||
class WarcRecorderFakeServerTest {
|
||||
static HttpServer server;
|
||||
|
||||
@BeforeAll
|
||||
public static void setUpAll() throws IOException {
|
||||
server = HttpServer.create(new InetSocketAddress("127.0.0.1", 14510), 10);
|
||||
server.createContext("/fast", exchange -> {
|
||||
exchange.getResponseHeaders().add("Content-Type", "text/html");
|
||||
exchange.sendResponseHeaders(200, "<html><body>hello</body></html>".length());
|
||||
|
||||
try (var os = exchange.getResponseBody()) {
|
||||
os.write("<html><body>hello</body></html>".getBytes());
|
||||
os.flush();
|
||||
}
|
||||
exchange.close();
|
||||
});
|
||||
|
||||
server.createContext("/slow", exchange -> {
|
||||
exchange.getResponseHeaders().add("Content-Type", "text/html");
|
||||
exchange.sendResponseHeaders(200, "<html><body>hello</body></html>:D".length());
|
||||
|
||||
try (var os = exchange.getResponseBody()) {
|
||||
os.write("<html><body>hello</body></html>".getBytes());
|
||||
os.flush();
|
||||
try {
|
||||
TimeUnit.SECONDS.sleep(10);
|
||||
} catch (InterruptedException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
os.write(":D".getBytes());
|
||||
os.flush();
|
||||
}
|
||||
exchange.close();
|
||||
});
|
||||
|
||||
server.start();
|
||||
}
|
||||
|
||||
@AfterAll
|
||||
public static void tearDownAll() {
|
||||
server.stop(0);
|
||||
}
|
||||
|
||||
Path fileNameWarc;
|
||||
Path fileNameParquet;
|
||||
WarcRecorder client;
|
||||
|
||||
HttpClient httpClient;
|
||||
@BeforeEach
|
||||
public void setUp() throws Exception {
|
||||
httpClient = HttpClient.newBuilder().build();
|
||||
|
||||
fileNameWarc = Files.createTempFile("test", ".warc");
|
||||
fileNameParquet = Files.createTempFile("test", ".parquet");
|
||||
|
||||
client = new WarcRecorder(fileNameWarc, new Cookies());
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
public void tearDown() throws Exception {
|
||||
client.close();
|
||||
Files.delete(fileNameWarc);
|
||||
}
|
||||
|
||||
@Test
|
||||
void fetchFast() throws NoSuchAlgorithmException, IOException, URISyntaxException, InterruptedException {
|
||||
client.fetch(httpClient,
|
||||
HttpRequest.newBuilder()
|
||||
.uri(new java.net.URI("http://localhost:14510/fast"))
|
||||
.timeout(Duration.ofSeconds(1))
|
||||
.header("User-agent", "test.marginalia.nu")
|
||||
.header("Accept-Encoding", "gzip")
|
||||
.GET().build()
|
||||
);
|
||||
|
||||
Map<String, String> sampleData = new HashMap<>();
|
||||
try (var warcReader = new WarcReader(fileNameWarc)) {
|
||||
warcReader.forEach(record -> {
|
||||
if (record instanceof WarcRequest req) {
|
||||
sampleData.put(record.type(), req.target());
|
||||
}
|
||||
if (record instanceof WarcResponse rsp) {
|
||||
sampleData.put(record.type(), rsp.target());
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
System.out.println(sampleData);
|
||||
}
|
||||
|
||||
@Test
|
||||
void fetchSlow() throws NoSuchAlgorithmException, IOException, URISyntaxException, InterruptedException {
|
||||
Instant start = Instant.now();
|
||||
client.fetch(httpClient,
|
||||
HttpRequest.newBuilder()
|
||||
.uri(new java.net.URI("http://localhost:14510/slow"))
|
||||
.timeout(Duration.ofSeconds(1))
|
||||
.header("User-agent", "test.marginalia.nu")
|
||||
.header("Accept-Encoding", "gzip")
|
||||
.GET().build()
|
||||
);
|
||||
Instant end = Instant.now();
|
||||
|
||||
Map<String, String> sampleData = new HashMap<>();
|
||||
try (var warcReader = new WarcReader(fileNameWarc)) {
|
||||
warcReader.forEach(record -> {
|
||||
if (record instanceof WarcRequest req) {
|
||||
sampleData.put(record.type(), req.target());
|
||||
}
|
||||
if (record instanceof WarcResponse rsp) {
|
||||
sampleData.put(record.type(), rsp.target());
|
||||
System.out.println(rsp.target());
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
System.out.println(sampleData);
|
||||
|
||||
// Timeout is set to 1 second, but the server will take 5 seconds to respond, so we expect the request to take 1s and change
|
||||
// before it times out.
|
||||
Assertions.assertTrue(Duration.between(start, end).toMillis() < 2000, "Request should take less than 2 seconds");
|
||||
}
|
||||
|
||||
}
|
@@ -82,17 +82,17 @@ public class SearchService extends JoobyService {
|
||||
jooby.get("/site/https://*", this::handleSiteUrlRedirect);
|
||||
jooby.get("/site/http://*", this::handleSiteUrlRedirect);
|
||||
|
||||
String emptySvg = "<svg xmlns=\"http://www.w3.org/2000/svg\"></svg>";
|
||||
jooby.get("/site/{domain}/favicon", ctx -> {
|
||||
String domain = ctx.path("domain").value();
|
||||
logger.info("Finding icon for domain {}", domain);
|
||||
domainQueries.getDomainId(new EdgeDomain(domain));
|
||||
try {
|
||||
DbDomainQueries.DomainIdWithNode domainIdWithNode = domainQueries.getDomainIdWithNode(new EdgeDomain(domain));
|
||||
var faviconMaybe = faviconClient.getFavicon(domain, domainIdWithNode.nodeAffinity());
|
||||
|
||||
if (faviconMaybe.isEmpty()) {
|
||||
ctx.setResponseCode(404);
|
||||
return "";
|
||||
ctx.setResponseType(MediaType.valueOf("image/svg+xml"));
|
||||
return emptySvg;
|
||||
} else {
|
||||
var favicon = faviconMaybe.get();
|
||||
|
||||
@@ -102,7 +102,8 @@ public class SearchService extends JoobyService {
|
||||
}
|
||||
}
|
||||
catch (NoSuchElementException ex) {
|
||||
ctx.setResponseCode(404);
|
||||
ctx.setResponseType(MediaType.valueOf("image/svg+xml"));
|
||||
return emptySvg;
|
||||
}
|
||||
return "";
|
||||
});
|
||||
|
@@ -10,7 +10,7 @@
|
||||
|
||||
@template.part.head(title = "Marginalia Search - Explore")
|
||||
|
||||
<body class="min-h-screen bg-slate-100 dark:bg-gray-900 dark:text-white font-sans ">
|
||||
<body class="min-h-screen bg-bgblue dark:bg-gray-900 dark:text-white font-sans ">
|
||||
|
||||
@template.part.navbar(navbar = navbar)
|
||||
|
||||
@@ -23,7 +23,7 @@
|
||||
</header>
|
||||
|
||||
<div class="max-w-[1400px] mx-auto flex flex-col gap-1 place-items-center">
|
||||
<div class="border dark:border-gray-600 bg-white dark:bg-gray-800 dark:text-gray-100 my-4 p-3 rounded overflow-hidden flex flex-col space-y-4">
|
||||
<div class="border border-gray-300 dark:border-gray-600 bg-white dark:bg-gray-800 dark:text-gray-100 my-4 p-3 rounded overflow-hidden flex flex-col space-y-4">
|
||||
@if (results.hasFocusDomain())
|
||||
<div class="flex space-x-1">
|
||||
<span>Showing websites similar to <a class="font-mono text-liteblue dark:text-blue-200" href="/site/${results.focusDomain()}"><i class="fas fa-globe"></i> <span class="underline">${results.focusDomain()}</span></a></span>
|
||||
@@ -36,7 +36,7 @@
|
||||
</div>
|
||||
<div class="grid-cols-1 gap-4 sm:grid sm:grid-cols-1 md:grid-cols-3 xl:grid-cols-4 mx-auto sm:p-4">
|
||||
@for (BrowseResult result : results.results())
|
||||
<div class="bg-white border dark:border-gray-600 dark:bg-gray-800 rounded overflow-hidden">
|
||||
<div class="bg-white border border-gray-300 dark:border-gray-600 dark:bg-gray-800 rounded overflow-hidden">
|
||||
<div class="bg-margeblue text-white p-2 flex space-x-4 text-sm">
|
||||
<span class="break-words">${result.displayDomain()}</span>
|
||||
<div class="grow"></div>
|
||||
|
@@ -9,6 +9,7 @@
|
||||
nicotine: '#f8f8ee',
|
||||
margeblue: '#3e5f6f',
|
||||
liteblue: '#0066cc',
|
||||
bgblue: '#e5e9eb',
|
||||
},
|
||||
screens: {
|
||||
'coarsepointer': {
|
||||
|
@@ -15,7 +15,7 @@
|
||||
|
||||
@template.part.head(title = "Marginalia Search - " + parameters.query())
|
||||
|
||||
<body class="min-h-screen bg-slate-100 dark:bg-gray-900 dark:text-white font-sans " >
|
||||
<body class="min-h-screen bg-bgblue dark:bg-gray-900 dark:text-white font-sans " >
|
||||
@template.part.navbar(navbar = navbar)
|
||||
|
||||
<div>
|
||||
|
@@ -9,7 +9,7 @@
|
||||
|
||||
@template.part.head(title = "Marginalia Search - Error")
|
||||
|
||||
<body class="min-h-screen bg-slate-100 dark:bg-gray-900 dark:text-white font-sans " >
|
||||
<body class="min-h-screen bg-bgblue dark:bg-gray-900 dark:text-white font-sans " >
|
||||
|
||||
@template.part.navbar(navbar = navbar)
|
||||
|
||||
|
@@ -11,7 +11,7 @@
|
||||
|
||||
@template.part.head(title = "Marginalia Search - " + results.getQuery())
|
||||
|
||||
<body class="min-h-screen bg-slate-100 dark:bg-gray-900 dark:text-white font-sans " >
|
||||
<body class="min-h-screen bg-bgblue dark:bg-gray-900 dark:text-white font-sans " >
|
||||
@template.part.navbar(navbar = navbar)
|
||||
|
||||
<div>
|
||||
|
@@ -26,15 +26,15 @@
|
||||
It operates a bit like a clock, starting at the top and working its way around clockwise.</p>
|
||||
|
||||
<div class="flex gap-4 place-items-middle">
|
||||
@template.serp.part.matchogram(mask = 90)
|
||||
@template.serp.part.matchogram(mask = 90, domain = "example.com")
|
||||
<div>This is by the beginning</div>
|
||||
</div>
|
||||
<div class="flex gap-4 place-items-middle">
|
||||
@template.serp.part.matchogram(mask = 90L<<26)
|
||||
@template.serp.part.matchogram(mask = 90L<<26, domain = "example.com")
|
||||
<div>This is in the middle</div>
|
||||
</div>
|
||||
<div class="flex gap-4 place-items-middle">
|
||||
@template.serp.part.matchogram(mask = 5L<<48)
|
||||
@template.serp.part.matchogram(mask = 5L<<48, domain = "example.com")
|
||||
<div>This is toward the end</div>
|
||||
</div>
|
||||
|
||||
|
@@ -1,11 +1,13 @@
|
||||
@import java.util.stream.IntStream
|
||||
|
||||
@param long mask
|
||||
@param String domain
|
||||
|
||||
<svg width="40" height="40">
|
||||
<svg width="40" height="40"
|
||||
style="background-image: url('/site/${domain}/favicon'); background-repeat: no-repeat; background-size: 16px 16px; background-position: center; ">
|
||||
<circle
|
||||
cx="18"
|
||||
cy="18"
|
||||
cx="20"
|
||||
cy="20"
|
||||
r="16"
|
||||
fill="none"
|
||||
stroke="#eee"
|
||||
@@ -13,10 +15,10 @@
|
||||
/>
|
||||
@for (int bit : IntStream.range(0, 56).filter(bit -> (mask & (1L << bit)) != 0).toArray())
|
||||
<line
|
||||
x1="${18 + 15*Math.sin(2 * Math.PI * bit / 56.)}"
|
||||
y1="${18 - 15*Math.cos(2 * Math.PI * bit / 56.)}"
|
||||
x2="${18 + 17*Math.sin(2 * Math.PI * bit / 56.)}"
|
||||
y2="${18 - 17*Math.cos(2 * Math.PI * bit / 56.)}"
|
||||
x1="${20 + 15*Math.sin(2 * Math.PI * bit / 56.)}"
|
||||
y1="${20 - 15*Math.cos(2 * Math.PI * bit / 56.)}"
|
||||
x2="${20 + 17*Math.sin(2 * Math.PI * bit / 56.)}"
|
||||
y2="${20 - 17*Math.cos(2 * Math.PI * bit / 56.)}"
|
||||
stroke="#444"
|
||||
stroke-width="2"
|
||||
/>
|
||||
|
@@ -12,7 +12,7 @@
|
||||
<div class="flex flex-col grow" >
|
||||
<div class="flex flex-row space-x-2 place-items-center">
|
||||
<div class="flex-0" title="Match density">
|
||||
@template.serp.part.matchogram(mask = result.first.positionsMask)
|
||||
@template.serp.part.matchogram(mask = result.first.positionsMask, domain=result.getFirst().url.domain.toString())
|
||||
</div>
|
||||
<div class="flex grow justify-between items-start">
|
||||
<div class="flex-1">
|
||||
|
@@ -13,7 +13,7 @@
|
||||
@for (List<SearchFilters.Filter> filterGroup : filters.getFilterGroups())
|
||||
@for (SearchFilters.Filter filter : filterGroup)
|
||||
<label class="flex items-center">
|
||||
<button title="${filter.displayName}" onclick="document.location='$unsafe{filter.url}'" class="flex-1 py-2 pl-2 rounded flex space-x-2 dark:has-[:checked]:bg-gray-950 has-[:checked]:bg-gray-100 has-[:checked]:text-slate-900 dark:has-[:checked]:text-slate-100 hover:bg-gray-50 dark:hover:bg-gray-950 bg-white dark:bg-gray-900 dark:border dark:border-gray-600 text-margeblue dark:text-slate-200 outline-1 active:outline">
|
||||
<button title="${filter.displayName}" onclick="document.location='$unsafe{filter.url}'" class="flex-1 py-2 pl-2 rounded flex space-x-2 dark:has-[:checked]:bg-gray-950 has-[:checked]:bg-gray-300 has-[:checked]:text-slate-900 dark:has-[:checked]:text-slate-100 hover:bg-gray-50 dark:hover:bg-gray-950 bg-white dark:bg-gray-900 dark:border dark:border-gray-600 text-margeblue dark:text-slate-200 outline-1 active:outline">
|
||||
@if (filter.current)
|
||||
<input type="checkbox" checked class="sr-only" aria-checked="true" />
|
||||
@else
|
||||
@@ -38,7 +38,7 @@
|
||||
<div class="space-y-2">
|
||||
@for (SearchFilters.SearchOption option : filters.searchOptions())
|
||||
<label class="flex items-center">
|
||||
<button title="${option.name()}" onclick="document.location='$unsafe{option.getUrl()}'" class="flex-1 py-2 pl-2 rounded flex space-x-2 dark:has-[:checked]:bg-gray-950 has-[:checked]:bg-gray-100 has-[:checked]:text-slate-900 dark:has-[:checked]:text-slate-100 hover:bg-gray-50 dark:hover:bg-gray-950 bg-white dark:bg-gray-900 dark:border dark:border-gray-600 text-margeblue dark:text-slate-200 outline-1 active:outline">
|
||||
<button title="${option.name()}" onclick="document.location='$unsafe{option.getUrl()}'" class="flex-1 py-2 pl-2 rounded flex space-x-2 dark:has-[:checked]:bg-gray-950 has-[:checked]:bg-gray-300 has-[:checked]:text-slate-900 dark:has-[:checked]:text-slate-100 hover:bg-gray-50 dark:hover:bg-gray-950 bg-white dark:bg-gray-900 dark:border dark:border-gray-600 text-margeblue dark:text-slate-200 outline-1 active:outline">
|
||||
@if (option.isSet())
|
||||
<input type="checkbox" checked class="sr-only" aria-checked="true" />
|
||||
@else
|
||||
|
@@ -15,7 +15,7 @@
|
||||
|
||||
@template.part.head(title = "Marginalia Search", allowIndexing = true)
|
||||
|
||||
<body class="min-h-screen bg-slate-100 dark:bg-gray-900 dark:text-white font-sans " >
|
||||
<body class="min-h-screen bg-bgblue dark:bg-gray-900 dark:text-white font-sans " >
|
||||
|
||||
@template.part.navbar(navbar = navbar)
|
||||
|
||||
@@ -32,18 +32,14 @@
|
||||
|
||||
@if (model.news().isEmpty())
|
||||
<div class="max-w-7xl mx-auto flex flex-col space-y-4 fill-w">
|
||||
<div class="border dark:border-gray-600 dark:bg-gray-800 bg-white rounded p-2 m-4 ">
|
||||
<div class="border border-gray-300 border-gray-100 dark:border-gray-600 dark:bg-gray-800 bg-white rounded p-2 m-4 ">
|
||||
<div class="text-slate-700 dark:text-white text-sm p-4">
|
||||
<div class="fas fa-gift mr-1 text-margeblue dark:text-slate-200"></div>
|
||||
This is the new design and home of Marginalia Search.
|
||||
You can read about what this entails <a href="https://about.marginalia-search.com/article/redesign/" class="underline text-liteblue dark:text-blue-200">here</a>.
|
||||
<p class="my-4"></p>
|
||||
The old version of Marginalia Search remains available at
|
||||
<a href="https://old-search.marginalia.nu/" class="underline text-liteblue dark:text-blue-200">https://old-search.marginalia.nu/</a>.
|
||||
The old version of Marginalia Search remains available
|
||||
<a href="https://old-search.marginalia.nu/" class="underline text-liteblue dark:text-blue-200">here</a>.
|
||||
</div>
|
||||
</div>
|
||||
<div class="mx-auto flex flex-col sm:flex-row my-4 sm:space-x-2 space-y-2 sm:space-y-0 w-full md:w-auto px-2">
|
||||
<div class="flex flex-col border dark:border-gray-600 rounded overflow-hidden dark:bg-gray-800 bg-white p-6 space-y-3">
|
||||
<div class="flex flex-col border border-gray-300 dark:border-gray-600 rounded overflow-hidden dark:bg-gray-800 bg-white p-6 space-y-3">
|
||||
<div><i class="fas fa-sailboat mx-2 text-margeblue dark:text-slate-200"></i>Explore the Web</div>
|
||||
<ul class="list-disc ml-6 text-slate-700 dark:text-white text-xs leading-5">
|
||||
<li>Prioritizes non-commercial content</li>
|
||||
@@ -52,7 +48,7 @@
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<div class="flex flex-col border dark:border-gray-600 rounded overflow-hidden dark:bg-gray-800 bg-white p-6 space-y-3 ">
|
||||
<div class="flex flex-col border border-gray-300 dark:border-gray-600 rounded overflow-hidden dark:bg-gray-800 bg-white p-6 space-y-3 ">
|
||||
<div><i class="fas fa-hand-holding-hand mx-2 text-margeblue dark:text-slate-200"></i>Open Source</div>
|
||||
<ul class="list-disc ml-6 text-slate-700 dark:text-white text-xs leading-5">
|
||||
<li>Custom index and crawler software</li>
|
||||
@@ -65,7 +61,7 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="flex flex-col border dark:border-gray-600 rounded overflow-hidden dark:bg-gray-800 bg-white p-6 space-y-3 ">
|
||||
<div class="flex flex-col border border-gray-300 dark:border-gray-600 rounded overflow-hidden dark:bg-gray-800 bg-white p-6 space-y-3 ">
|
||||
<div><i class="fas fa-lock mx-2 text-margeblue dark:text-slate-200"></i> Privacy by default</div>
|
||||
<ul class="list-disc ml-6 text-slate-700 dark:text-white text-xs leading-5">
|
||||
<li>Filter out tracking and adtech</li>
|
||||
|
@@ -13,7 +13,7 @@
|
||||
|
||||
@template.part.head(title = "Marginalia Search - " + parameters.query())
|
||||
|
||||
<body class="min-h-screen bg-slate-100 dark:bg-gray-900 dark:text-white font-sans " >
|
||||
<body class="min-h-screen bg-bgblue dark:bg-gray-900 dark:text-white font-sans " >
|
||||
@template.part.navbar(navbar = navbar)
|
||||
|
||||
<div>
|
||||
|
@@ -11,7 +11,7 @@
|
||||
|
||||
@template.part.head(title = "Marginalia Search - " + model.domainA() + "/" + model.domainB())
|
||||
|
||||
<body class="min-h-screen bg-slate-100 dark:bg-gray-900 dark:text-white font-sans " >
|
||||
<body class="min-h-screen bg-bgblue dark:bg-gray-900 dark:text-white font-sans " >
|
||||
|
||||
@template.part.navbar(navbar = navbar)
|
||||
|
||||
|
@@ -9,7 +9,7 @@
|
||||
|
||||
@template.part.head(title = "Marginalia Search - " + model.domain())
|
||||
|
||||
<body class="min-h-screen bg-slate-100 dark:bg-gray-900 dark:text-white font-sans " >
|
||||
<body class="min-h-screen bg-bgblue dark:bg-gray-900 dark:text-white font-sans " >
|
||||
|
||||
@template.part.navbar(navbar = navbar)
|
||||
|
||||
|
@@ -7,7 +7,7 @@
|
||||
|
||||
@if (!list.isEmpty())
|
||||
|
||||
<div class="bg-white dark:bg-gray-800 shadow-sm rounded overflow-hidden border dark:border-gray-600">
|
||||
<div class="bg-white dark:bg-gray-800 shadow-sm rounded overflow-hidden border border-gray-300 dark:border-gray-600">
|
||||
<div class="px-4 py-2 bg-margeblue text-white border-b border-gray-200 dark:border-gray-600 flex place-items-baseline">
|
||||
<h2 class="text-md">${title}</h2>
|
||||
<div class="grow"></div>
|
||||
|
@@ -9,11 +9,11 @@
|
||||
|
||||
@template.part.head(title = "Marginalia Search - Site Viewer")
|
||||
|
||||
<body class="min-h-screen bg-slate-100 dark:bg-gray-900 dark:text-white font-sans " >
|
||||
<body class="min-h-screen bg-bgblue dark:bg-gray-900 dark:text-white font-sans " >
|
||||
|
||||
@template.part.navbar(navbar = navbar)
|
||||
|
||||
<header class="border-b border-gray-300 dark:border-gray-600 bg-white dark:bg-gray-800 shadow-md">
|
||||
<header class="border-b border-gray-300 border-gray-300 dark:border-gray-600 bg-white dark:bg-gray-800 shadow-md">
|
||||
<div class="max-w-[1400px] mx-auto px-4 py-4">
|
||||
<h1 class="text-base md:text-xl mr-2 md:mr-8 font-serif">View Site Information</h1>
|
||||
</div>
|
||||
@@ -22,7 +22,7 @@
|
||||
<div class="max-w-[1000px] mx-auto flex gap-4 flex-col md:flex-row place-items-center md:place-items-start p-4">
|
||||
|
||||
|
||||
<div class="border dark:border-gray-600 rounded md:my-4 overflow-hidden bg-white dark:bg-gray-800 flex flex-col space-y-2 flex-1">
|
||||
<div class="border border-gray-300 dark:border-gray-600 rounded md:my-4 overflow-hidden bg-white dark:bg-gray-800 flex flex-col space-y-2 flex-1">
|
||||
<div class="bg-margeblue text-white p-2 text-sm mb-2">View Site Information</div>
|
||||
|
||||
<p class="mx-4">This utility lets you explore what the search engine knows about the web,
|
||||
@@ -45,7 +45,7 @@
|
||||
</div>
|
||||
|
||||
@if (!model.domains().isEmpty())
|
||||
<div class="border dark:border-gray-600 rounded md:my-4 overflow-hidden w-full md:w-auto">
|
||||
<div class="border border-gray-300 dark:border-gray-600 rounded md:my-4 overflow-hidden w-full md:w-auto">
|
||||
<div class="bg-margeblue text-white p-2 text-sm">Recently Discovered Domains</div>
|
||||
|
||||
|
||||
|
@@ -8,17 +8,17 @@
|
||||
<div class="flex flex-col space-y-4 my-4 w-full">
|
||||
|
||||
@if (backlinks.results().isEmpty())
|
||||
<div class="border dark:border-gray-600 rounded bg-white dark:bg-gray-800 dark:text-white flex flex-col overflow-hidden p-4 mx-4 text-gray-800 text-sm ">
|
||||
<div class="border border-gray-300 dark:border-gray-600 rounded bg-white dark:bg-gray-800 dark:text-white flex flex-col overflow-hidden p-4 mx-4 text-gray-800 text-sm ">
|
||||
The search engine isn't aware of any backlinks to ${backlinks.domain()}!
|
||||
</div>
|
||||
@else
|
||||
<div class="border dark:border-gray-600 rounded bg-white dark:bg-gray-800 dark:text-white flex flex-col overflow-hidden p-4 mx-4 text-gray-800 text-sm">
|
||||
<div class="border border-gray-300 dark:border-gray-600 rounded bg-white dark:bg-gray-800 dark:text-white flex flex-col overflow-hidden p-4 mx-4 text-gray-800 text-sm">
|
||||
Showing documents linking to ${backlinks.domain()}
|
||||
</div>
|
||||
@endif
|
||||
|
||||
@for (GroupedUrlDetails group : backlinks.results())
|
||||
<div class="border dark:border-gray-600 rounded bg-white dark:bg-gray-800 dark:text-white flex flex-col overflow-hidden mx-4">
|
||||
<div class="border dark:border-gray-600 border-gray-300 rounded bg-white dark:bg-gray-800 dark:text-white flex flex-col overflow-hidden mx-4">
|
||||
<div class="flex space-x-2 flex-row place-items-baseline bg-margeblue text-white p-2 text-md">
|
||||
<span class="fas fa-globe"></span>
|
||||
<a href="/site/${group.domain().toString()}">${group.domain().toString()}</a>
|
||||
|
@@ -9,17 +9,17 @@
|
||||
<div class="flex flex-col space-y-4 my-4">
|
||||
|
||||
@if (docs.results().isEmpty())
|
||||
<div class="border dark:border-gray-600 rounded bg-white dark:bg-gray-800 dark:text-white flex flex-col overflow-hidden p-4 mx-4 text-gray-800 text-sm">
|
||||
<div class="border border-gray-300 dark:border-gray-600 rounded bg-white dark:bg-gray-800 dark:text-white flex flex-col overflow-hidden p-4 mx-4 text-gray-800 text-sm">
|
||||
The search engine doesn't index any documents from ${docs.domain()}
|
||||
</div>
|
||||
@else
|
||||
<div class="border dark:border-gray-600 rounded bg-white dark:bg-gray-800 dark:text-white flex flex-col overflow-hidden p-4 mx-4 text-gray-800 text-sm">
|
||||
<div class="border border-gray-300 dark:border-gray-600 rounded bg-white dark:bg-gray-800 dark:text-white flex flex-col overflow-hidden p-4 mx-4 text-gray-800 text-sm">
|
||||
Showing documents from ${docs.domain()}
|
||||
</div>
|
||||
@endif
|
||||
|
||||
@for (UrlDetails details : docs.results())
|
||||
<div class="border dark:border-gray-600 rounded bg-white dark:bg-gray-800 dark:text-white flex flex-col overflow-hidden mx-4">
|
||||
<div class="border border-gray-300 dark:border-gray-600 rounded bg-white dark:bg-gray-800 dark:text-white flex flex-col overflow-hidden mx-4">
|
||||
<div class="flex grow justify-between items-start p-4">
|
||||
<div class="flex-1">
|
||||
<h2 class="text-xl text-gray-800 dark:text-white font-serif mr-4">
|
||||
|
@@ -8,7 +8,7 @@
|
||||
<!-- Main content -->
|
||||
|
||||
<div class="flex-1 p-4 space-y-4 mx-auto w-full md:w-auto">
|
||||
<div class="flex border dark:border-gray-600 rounded bg-white dark:bg-gray-800 flex-col space-y-4 pb-4 overflow-hidden md:max-w-lg" >
|
||||
<div class="flex border border-gray-300 dark:border-gray-600 rounded bg-white dark:bg-gray-800 flex-col space-y-4 pb-4 overflow-hidden md:max-w-lg" >
|
||||
<div class="flex place-items-center space-x-2 p-2 text-md border-b dark:border-gray-600 bg-margeblue text-white">
|
||||
<img src="/site/${siteInfo.domain()}/favicon" style="width: 16px; height: 16px; vertical-align: center">
|
||||
<span>${siteInfo.domain()}</span>
|
||||
|
@@ -4,7 +4,7 @@
|
||||
@param ReportDomain reportDomain
|
||||
|
||||
<div class="flex-col mx-auto">
|
||||
<div class="max-w-2xl mx-auto bg-white dark:bg-gray-800 border dark:border-gray-600 rounded overflow-auto shadow-sm my-4 space-y-4 w-full">
|
||||
<div class="max-w-2xl mx-auto bg-white dark:bg-gray-800 border border-gray-300 dark:border-gray-600 rounded overflow-auto shadow-sm my-4 space-y-4 w-full">
|
||||
<div class="px-4 py-2 bg-margeblue text-white border-b border-gray-200 dark:border-gray-800">
|
||||
<h2 class="text-md">Report Domain Issue</h2>
|
||||
</div>
|
||||
|
@@ -9,6 +9,7 @@ module.exports = {
|
||||
nicotine: '#f8f8ee',
|
||||
margeblue: '#3e5f6f',
|
||||
liteblue: '#0066cc',
|
||||
bgblue: '#e5e9eb',
|
||||
},
|
||||
screens: {
|
||||
'coarsepointer': {
|
||||
|
Reference in New Issue
Block a user