1
1
mirror of https://github.com/MarginaliaSearch/MarginaliaSearch.git synced 2025-10-06 17:32:39 +02:00

Compare commits

...

16 Commits

Author SHA1 Message Date
Viktor Lofgren
626cab5fab (favicon) Add favicon to site overview 2025-03-21 14:15:23 +01:00
Viktor Lofgren
cfd4712191 (favicon) Add capability for fetching favicons 2025-03-21 13:38:58 +01:00
Viktor Lofgren
9f18ced73d (crawler) Improve deferred task behavior 2025-03-18 12:54:18 +01:00
Viktor Lofgren
18e91269ab (crawler) Improve deferred task behavior 2025-03-18 12:25:22 +01:00
Viktor Lofgren
e315ca5758 (search) Change icon for small web filter
The previous icon was of an irregular size and shifted the layout in an unaesthetic way.
2025-03-17 12:07:34 +01:00
Viktor Lofgren
3ceea17c1d (search) Adjustments to devicd detection in CSS
Use pointer:fine media query to better distinguish between mobile devices and PCs with a window in portrait orientation.

With this, we never show mobile filtering functionality on mobile; and never show the touch-inaccessible minimized sidebar on mobile.
2025-03-17 12:04:34 +01:00
Viktor Lofgren
b34527c1a3 (search) Add small web filter for new UI 2025-03-17 11:39:19 +01:00
Viktor Lofgren
185bf28fca (crawler) Correct issue leading to parquet files not being correctly preconverted
Path.endsWith("str") != String.endsWith(".str")
2025-03-10 13:48:12 +01:00
Viktor Lofgren
78cc25584a (crawler) Add error logging when entering bad path for historical crawl data 2025-03-10 13:38:40 +01:00
Viktor Lofgren
62ba30bacf (common) Log info about metrics server 2025-03-10 13:12:39 +01:00
Viktor Lofgren
3bb84eb206 (common) Log info about metrics server 2025-03-10 13:03:48 +01:00
Viktor Lofgren
be7d13ccce (crawler) Correct task execution logic in crawler
The old behavior would flag domains as pending too soon, leading to them being omitted from execution if they were not immediately available to run.
2025-03-09 13:47:51 +01:00
Viktor Lofgren
8c088a7c0b (crawler) Remove custom thread factory
This was causing issues, and not really doing much of benefit.
2025-03-09 11:50:52 +01:00
Viktor Lofgren
ea9a642b9b (crawler) More effective task scheduling in the crawler
This should hopefully allow more threads to be busy
2025-03-09 11:44:59 +01:00
Viktor Lofgren
27f528af6a (search) Fix "Remove Javascript" toggle
A bug was introduced at some point where the special keyword for filtering on javascript was changed to special:scripts, from js:true/js:false.

Solves issue #155
2025-02-28 12:03:04 +01:00
Viktor Lofgren
20ca41ec95 (processed model) Use String columns instead of Txt columns for SlopDocumentRecord
It's very likely TxtStringColumn is the culprit of the bug seen in https://github.com/MarginaliaSearch/MarginaliaSearch/issues/154 where the wrong URL was shown for a search result.
2025-02-24 11:41:51 +01:00
27 changed files with 402 additions and 34 deletions

View File

@@ -22,6 +22,7 @@ public class DbDomainQueries {
private static final Logger logger = LoggerFactory.getLogger(DbDomainQueries.class); private static final Logger logger = LoggerFactory.getLogger(DbDomainQueries.class);
private final Cache<EdgeDomain, Integer> domainIdCache = CacheBuilder.newBuilder().maximumSize(10_000).build(); private final Cache<EdgeDomain, Integer> domainIdCache = CacheBuilder.newBuilder().maximumSize(10_000).build();
private final Cache<EdgeDomain, DomainIdWithNode> domainWithNodeCache = CacheBuilder.newBuilder().maximumSize(10_000).build();
private final Cache<Integer, EdgeDomain> domainNameCache = CacheBuilder.newBuilder().maximumSize(10_000).build(); private final Cache<Integer, EdgeDomain> domainNameCache = CacheBuilder.newBuilder().maximumSize(10_000).build();
private final Cache<String, List<DomainWithNode>> siblingsCache = CacheBuilder.newBuilder().maximumSize(10_000).build(); private final Cache<String, List<DomainWithNode>> siblingsCache = CacheBuilder.newBuilder().maximumSize(10_000).build();
@@ -59,6 +60,34 @@ public class DbDomainQueries {
} }
} }
public DomainIdWithNode getDomainIdWithNode(EdgeDomain domain) throws NoSuchElementException {
try {
return domainWithNodeCache.get(domain, () -> {
try (var connection = dataSource.getConnection();
var stmt = connection.prepareStatement("SELECT ID, NODE_AFFINITY FROM EC_DOMAIN WHERE DOMAIN_NAME=?")) {
stmt.setString(1, domain.toString());
var rsp = stmt.executeQuery();
if (rsp.next()) {
return new DomainIdWithNode(rsp.getInt(1), rsp.getInt(2));
}
}
catch (SQLException ex) {
throw new RuntimeException(ex);
}
throw new NoSuchElementException();
});
}
catch (UncheckedExecutionException ex) {
throw new NoSuchElementException();
}
catch (ExecutionException ex) {
throw new RuntimeException(ex.getCause());
}
}
public OptionalInt tryGetDomainId(EdgeDomain domain) { public OptionalInt tryGetDomainId(EdgeDomain domain) {
Integer maybeId = domainIdCache.getIfPresent(domain); Integer maybeId = domainIdCache.getIfPresent(domain);
@@ -145,4 +174,6 @@ public class DbDomainQueries {
return nodeAffinity > 0; return nodeAffinity > 0;
} }
} }
public record DomainIdWithNode (int domainId, int nodeAffinity) { }
} }

View File

@@ -121,6 +121,7 @@ public class ServiceConfigurationModule extends AbstractModule {
while (nets.hasMoreElements()) { while (nets.hasMoreElements()) {
NetworkInterface netif = nets.nextElement(); NetworkInterface netif = nets.nextElement();
logger.info("Considering network interface {}: Up? {}, Loopback? {}", netif.getDisplayName(), netif.isUp(), netif.isLoopback());
if (!netif.isUp() || netif.isLoopback()) { if (!netif.isUp() || netif.isLoopback()) {
continue; continue;
} }
@@ -128,6 +129,7 @@ public class ServiceConfigurationModule extends AbstractModule {
Enumeration<InetAddress> inetAddresses = netif.getInetAddresses(); Enumeration<InetAddress> inetAddresses = netif.getInetAddresses();
while (inetAddresses.hasMoreElements()) { while (inetAddresses.hasMoreElements()) {
InetAddress addr = inetAddresses.nextElement(); InetAddress addr = inetAddresses.nextElement();
logger.info("Considering address {}: SiteLocal? {}, Loopback? {}", addr.getHostAddress(), addr.isSiteLocalAddress(), addr.isLoopbackAddress());
if (addr.isSiteLocalAddress() && !addr.isLoopbackAddress()) { if (addr.isSiteLocalAddress() && !addr.isLoopbackAddress()) {
return addr.getHostAddress(); return addr.getHostAddress();
} }

View File

@@ -13,7 +13,7 @@ import java.net.InetSocketAddress;
public class MetricsServer { public class MetricsServer {
private static Logger logger = LoggerFactory.getLogger(MetricsServer.class); private static final Logger logger = LoggerFactory.getLogger(MetricsServer.class);
@Inject @Inject
public MetricsServer(ServiceConfiguration configuration) { public MetricsServer(ServiceConfiguration configuration) {
@@ -30,6 +30,8 @@ public class MetricsServer {
context.addServlet(new ServletHolder(new MetricsServlet()), "/metrics"); context.addServlet(new ServletHolder(new MetricsServlet()), "/metrics");
logger.info("MetricsServer listening on {}:{}", configuration.bindAddress(), configuration.metricsPort());
server.start(); server.start();
} }
catch (Exception|NoSuchMethodError ex) { catch (Exception|NoSuchMethodError ex) {

View File

@@ -0,0 +1,47 @@
plugins {
id 'java'
id "com.google.protobuf" version "0.9.4"
id 'jvm-test-suite'
}
java {
toolchain {
languageVersion.set(JavaLanguageVersion.of(rootProject.ext.jvmVersion))
}
}
jar.archiveBaseName = 'favicon-api'
apply from: "$rootProject.projectDir/protobuf.gradle"
apply from: "$rootProject.projectDir/srcsets.gradle"
dependencies {
implementation project(':code:common:model')
implementation project(':code:common:config')
implementation project(':code:common:service')
implementation libs.bundles.slf4j
implementation libs.prometheus
implementation libs.notnull
implementation libs.guava
implementation dependencies.create(libs.guice.get()) {
exclude group: 'com.google.guava'
}
implementation libs.gson
implementation libs.bundles.protobuf
implementation libs.guava
libs.bundles.grpc.get().each {
implementation dependencies.create(it) {
exclude group: 'com.google.guava'
}
}
testImplementation libs.bundles.slf4j.test
testImplementation libs.bundles.junit
testImplementation libs.mockito
}

View File

@@ -0,0 +1,39 @@
package nu.marginalia.api.favicon;
import com.google.inject.Inject;
import nu.marginalia.service.client.GrpcChannelPoolFactory;
import nu.marginalia.service.client.GrpcMultiNodeChannelPool;
import nu.marginalia.service.discovery.property.ServiceKey;
import nu.marginalia.service.discovery.property.ServicePartition;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Optional;
public class FaviconClient {
private static final Logger logger = LoggerFactory.getLogger(FaviconClient.class);
private final GrpcMultiNodeChannelPool<FaviconAPIGrpc.FaviconAPIBlockingStub> channelPool;
@Inject
public FaviconClient(GrpcChannelPoolFactory factory) {
this.channelPool = factory.createMulti(
ServiceKey.forGrpcApi(FaviconAPIGrpc.class, ServicePartition.multi()),
FaviconAPIGrpc::newBlockingStub);
}
public record FaviconData(byte[] bytes, String contentType) {}
public Optional<FaviconData> getFavicon(String domain, int node) {
RpcFaviconResponse rsp = channelPool.call(FaviconAPIGrpc.FaviconAPIBlockingStub::getFavicon)
.forNode(node)
.run(RpcFaviconRequest.newBuilder().setDomain(domain).build());
if (rsp.getData().isEmpty())
return Optional.empty();
return Optional.of(new FaviconData(rsp.getData().toByteArray(), rsp.getContentType()));
}
}

View File

@@ -0,0 +1,20 @@
syntax="proto3";
package marginalia.api.favicon;
option java_package="nu.marginalia.api.favicon";
option java_multiple_files=true;
service FaviconAPI {
/** Fetches information about a domain. */
rpc getFavicon(RpcFaviconRequest) returns (RpcFaviconResponse) {}
}
message RpcFaviconRequest {
string domain = 1;
}
message RpcFaviconResponse {
string domain = 1;
bytes data = 2;
string contentType = 3;
}

View File

@@ -0,0 +1,49 @@
plugins {
id 'java'
id 'application'
id 'jvm-test-suite'
}
java {
toolchain {
languageVersion.set(JavaLanguageVersion.of(rootProject.ext.jvmVersion))
}
}
apply from: "$rootProject.projectDir/srcsets.gradle"
dependencies {
implementation project(':code:common:config')
implementation project(':code:common:service')
implementation project(':code:common:model')
implementation project(':code:common:db')
implementation project(':code:functions:favicon:api')
implementation project(':code:processes:crawling-process')
implementation libs.bundles.slf4j
implementation libs.prometheus
implementation libs.guava
libs.bundles.grpc.get().each {
implementation dependencies.create(it) {
exclude group: 'com.google.guava'
}
}
implementation libs.notnull
implementation libs.guava
implementation dependencies.create(libs.guice.get()) {
exclude group: 'com.google.guava'
}
implementation dependencies.create(libs.spark.get()) {
exclude group: 'org.eclipse.jetty'
}
testImplementation libs.bundles.slf4j.test
testImplementation libs.bundles.junit
testImplementation libs.mockito
}

View File

@@ -0,0 +1,44 @@
package nu.marginalia.functions.favicon;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import com.google.protobuf.ByteString;
import io.grpc.stub.StreamObserver;
import nu.marginalia.api.favicon.FaviconAPIGrpc;
import nu.marginalia.api.favicon.RpcFaviconRequest;
import nu.marginalia.api.favicon.RpcFaviconResponse;
import nu.marginalia.crawl.DomainStateDb;
import nu.marginalia.service.server.DiscoverableService;
import java.util.Optional;
@Singleton
public class FaviconGrpcService extends FaviconAPIGrpc.FaviconAPIImplBase implements DiscoverableService {
private final DomainStateDb domainStateDb;
@Inject
public FaviconGrpcService(DomainStateDb domainStateDb) {
this.domainStateDb = domainStateDb;
}
@Override
public void getFavicon(RpcFaviconRequest request, StreamObserver<RpcFaviconResponse> responseObserver) {
Optional<DomainStateDb.FaviconRecord> icon = domainStateDb.getIcon(request.getDomain());
RpcFaviconResponse response;
if (icon.isEmpty()) {
response = RpcFaviconResponse.newBuilder().build();
}
else {
var iconRecord = icon.get();
response = RpcFaviconResponse.newBuilder()
.setContentType(iconRecord.contentType())
.setDomain(request.getDomain())
.setData(ByteString.copyFrom(iconRecord.imageData()))
.build();
}
responseObserver.onNext(response);
responseObserver.onCompleted();
}
}

View File

@@ -11,7 +11,6 @@ import nu.marginalia.slop.column.primitive.IntColumn;
import nu.marginalia.slop.column.primitive.LongColumn; import nu.marginalia.slop.column.primitive.LongColumn;
import nu.marginalia.slop.column.string.EnumColumn; import nu.marginalia.slop.column.string.EnumColumn;
import nu.marginalia.slop.column.string.StringColumn; import nu.marginalia.slop.column.string.StringColumn;
import nu.marginalia.slop.column.string.TxtStringColumn;
import nu.marginalia.slop.desc.StorageType; import nu.marginalia.slop.desc.StorageType;
import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.Nullable;
@@ -182,8 +181,8 @@ public record SlopDocumentRecord(
} }
// Basic information // Basic information
private static final TxtStringColumn domainsColumn = new TxtStringColumn("domain", StandardCharsets.UTF_8, StorageType.GZIP); private static final StringColumn domainsColumn = new StringColumn("domain", StandardCharsets.UTF_8, StorageType.GZIP);
private static final TxtStringColumn urlsColumn = new TxtStringColumn("url", StandardCharsets.UTF_8, StorageType.GZIP); private static final StringColumn urlsColumn = new StringColumn("url", StandardCharsets.UTF_8, StorageType.GZIP);
private static final VarintColumn ordinalsColumn = new VarintColumn("ordinal", StorageType.PLAIN); private static final VarintColumn ordinalsColumn = new VarintColumn("ordinal", StorageType.PLAIN);
private static final EnumColumn statesColumn = new EnumColumn("state", StandardCharsets.US_ASCII, StorageType.PLAIN); private static final EnumColumn statesColumn = new EnumColumn("state", StandardCharsets.US_ASCII, StorageType.PLAIN);
private static final StringColumn stateReasonsColumn = new StringColumn("stateReason", StandardCharsets.US_ASCII, StorageType.GZIP); private static final StringColumn stateReasonsColumn = new StringColumn("stateReason", StandardCharsets.US_ASCII, StorageType.GZIP);
@@ -211,7 +210,7 @@ public record SlopDocumentRecord(
private static final VarintCodedSequenceArrayColumn spansColumn = new VarintCodedSequenceArrayColumn("spans", StorageType.ZSTD); private static final VarintCodedSequenceArrayColumn spansColumn = new VarintCodedSequenceArrayColumn("spans", StorageType.ZSTD);
public static class KeywordsProjectionReader extends SlopTable { public static class KeywordsProjectionReader extends SlopTable {
private final TxtStringColumn.Reader domainsReader; private final StringColumn.Reader domainsReader;
private final VarintColumn.Reader ordinalsReader; private final VarintColumn.Reader ordinalsReader;
private final IntColumn.Reader htmlFeaturesReader; private final IntColumn.Reader htmlFeaturesReader;
private final LongColumn.Reader domainMetadataReader; private final LongColumn.Reader domainMetadataReader;
@@ -275,8 +274,8 @@ public record SlopDocumentRecord(
} }
public static class MetadataReader extends SlopTable { public static class MetadataReader extends SlopTable {
private final TxtStringColumn.Reader domainsReader; private final StringColumn.Reader domainsReader;
private final TxtStringColumn.Reader urlsReader; private final StringColumn.Reader urlsReader;
private final VarintColumn.Reader ordinalsReader; private final VarintColumn.Reader ordinalsReader;
private final StringColumn.Reader titlesReader; private final StringColumn.Reader titlesReader;
private final StringColumn.Reader descriptionsReader; private final StringColumn.Reader descriptionsReader;
@@ -332,8 +331,8 @@ public record SlopDocumentRecord(
} }
public static class Writer extends SlopTable { public static class Writer extends SlopTable {
private final TxtStringColumn.Writer domainsWriter; private final StringColumn.Writer domainsWriter;
private final TxtStringColumn.Writer urlsWriter; private final StringColumn.Writer urlsWriter;
private final VarintColumn.Writer ordinalsWriter; private final VarintColumn.Writer ordinalsWriter;
private final EnumColumn.Writer statesWriter; private final EnumColumn.Writer statesWriter;
private final StringColumn.Writer stateReasonsWriter; private final StringColumn.Writer stateReasonsWriter;

View File

@@ -248,9 +248,14 @@ public class CrawlerMain extends ProcessMainClass {
// (this happens when the process is restarted after a crash or a shutdown) // (this happens when the process is restarted after a crash or a shutdown)
tasksDone.set(workLog.countFinishedJobs()); tasksDone.set(workLog.countFinishedJobs());
// Create crawl tasks and submit them to the pool for execution // List of deferred tasks used to ensure beneficial scheduling of domains with regard to DomainLocks,
// merely shuffling the domains tends to lead to a lot of threads being blocked waiting for a semphore,
// this will more aggressively attempt to schedule the jobs to avoid blocking
List<CrawlTask> taskList = new ArrayList<>();
// Create crawl tasks
for (CrawlSpecRecord crawlSpec : crawlSpecRecords) { for (CrawlSpecRecord crawlSpec : crawlSpecRecords) {
if (workLog.isJobFinished(crawlSpec.domain())) if (workLog.isJobFinished(crawlSpec.domain))
continue; continue;
var task = new CrawlTask( var task = new CrawlTask(
@@ -261,11 +266,22 @@ public class CrawlerMain extends ProcessMainClass {
domainStateDb, domainStateDb,
workLog); workLog);
if (pendingCrawlTasks.putIfAbsent(crawlSpec.domain(), task) == null) { // Try to run immediately, to avoid unnecessarily keeping the entire work set in RAM
pool.submitQuietly(task); if (!trySubmitDeferredTask(task)) {
// Otherwise add to the taskList for deferred execution
taskList.add(task);
} }
} }
// Schedule viable tasks for execution until list is empty
while (!taskList.isEmpty()) {
taskList.removeIf(this::trySubmitDeferredTask);
// Add a small pause here to avoid busy looping toward the end of the execution cycle when
// we might have no new viable tasks to run for hours on end
TimeUnit.MILLISECONDS.sleep(50);
}
logger.info("Shutting down the pool, waiting for tasks to complete..."); logger.info("Shutting down the pool, waiting for tasks to complete...");
pool.shutDown(); pool.shutDown();
@@ -290,6 +306,28 @@ public class CrawlerMain extends ProcessMainClass {
} }
} }
/** Submit a task for execution if it can be run, returns true if it was submitted
* or if it can be discarded */
private boolean trySubmitDeferredTask(CrawlTask task) {
if (!task.canRun()) {
return false;
}
if (pendingCrawlTasks.putIfAbsent(task.domain, task) != null) {
return true; // task has already run, duplicate in crawl specs
}
try {
// This blocks the caller when the pool is full
pool.submitQuietly(task);
return true;
}
catch (RuntimeException ex) {
logger.error("Failed to submit task " + task.domain, ex);
return false;
}
}
public void runForSingleDomain(String targetDomainName, FileStorageId fileStorageId) throws Exception { public void runForSingleDomain(String targetDomainName, FileStorageId fileStorageId) throws Exception {
runForSingleDomain(targetDomainName, fileStorageService.getStorage(fileStorageId).asPath()); runForSingleDomain(targetDomainName, fileStorageService.getStorage(fileStorageId).asPath());
} }
@@ -346,9 +384,20 @@ public class CrawlerMain extends ProcessMainClass {
this.id = Integer.toHexString(domain.hashCode()); this.id = Integer.toHexString(domain.hashCode());
} }
/** Best effort indicator whether we could start this now without getting stuck in
* DomainLocks purgatory */
public boolean canRun() {
return domainLocks.canLock(new EdgeDomain(domain));
}
@Override @Override
public void run() throws Exception { public void run() throws Exception {
if (workLog.isJobFinished(domain)) { // No-Op
logger.info("Omitting task {}, as it is already run", domain);
return;
}
Path newWarcFile = CrawlerOutputFile.createWarcPath(outputDir, id, domain, CrawlerOutputFile.WarcFileVersion.LIVE); Path newWarcFile = CrawlerOutputFile.createWarcPath(outputDir, id, domain, CrawlerOutputFile.WarcFileVersion.LIVE);
Path tempFile = CrawlerOutputFile.createWarcPath(outputDir, id, domain, CrawlerOutputFile.WarcFileVersion.TEMP); Path tempFile = CrawlerOutputFile.createWarcPath(outputDir, id, domain, CrawlerOutputFile.WarcFileVersion.TEMP);
Path slopFile = CrawlerOutputFile.createSlopPath(outputDir, id, domain); Path slopFile = CrawlerOutputFile.createSlopPath(outputDir, id, domain);
@@ -403,7 +452,7 @@ public class CrawlerMain extends ProcessMainClass {
logger.error("Error fetching domain " + domain, e); logger.error("Error fetching domain " + domain, e);
} }
finally { finally {
// We don't need to double-count these; it's also kept int he workLog // We don't need to double-count these; it's also kept in the workLog
pendingCrawlTasks.remove(domain); pendingCrawlTasks.remove(domain);
Thread.currentThread().setName("[idle]"); Thread.currentThread().setName("[idle]");
@@ -494,7 +543,7 @@ public class CrawlerMain extends ProcessMainClass {
// //
// This must be synchronized as chewing through parquet files in parallel leads to enormous memory overhead // This must be synchronized as chewing through parquet files in parallel leads to enormous memory overhead
private synchronized Path migrateParquetData(Path inputPath, String domain, Path crawlDataRoot) throws IOException { private synchronized Path migrateParquetData(Path inputPath, String domain, Path crawlDataRoot) throws IOException {
if (!inputPath.endsWith(".parquet")) { if (!inputPath.toString().endsWith(".parquet")) {
return inputPath; return inputPath;
} }

View File

@@ -1,5 +1,8 @@
package nu.marginalia.crawl; package nu.marginalia.crawl;
import com.google.inject.Inject;
import nu.marginalia.storage.FileStorageService;
import nu.marginalia.storage.model.FileStorageType;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@@ -63,6 +66,23 @@ public class DomainStateDb implements AutoCloseable {
public record FaviconRecord(String contentType, byte[] imageData) {} public record FaviconRecord(String contentType, byte[] imageData) {}
@Inject
public DomainStateDb(FileStorageService fileStorageService) throws SQLException {
this(findFilename(fileStorageService));
}
private static Path findFilename(FileStorageService fileStorageService) throws SQLException {
var fsId = fileStorageService.getOnlyActiveFileStorage(FileStorageType.CRAWL_DATA);
if (fsId.isPresent()) {
var fs = fileStorageService.getStorage(fsId.get());
return fs.asPath().resolve("domainstate.db");
}
else {
throw new SQLException("Could not find crawl data storage");
}
}
public DomainStateDb(Path filename) throws SQLException { public DomainStateDb(Path filename) throws SQLException {
String sqliteDbString = "jdbc:sqlite:" + filename.toString(); String sqliteDbString = "jdbc:sqlite:" + filename.toString();
connection = DriverManager.getConnection(sqliteDbString); connection = DriverManager.getConnection(sqliteDbString);

View File

@@ -251,6 +251,7 @@ public class HttpFetcherImpl implements HttpFetcher {
return new SitemapRetriever(); return new SitemapRetriever();
} }
/** Recursively fetch sitemaps */
@Override @Override
public List<EdgeUrl> fetchSitemapUrls(String root, CrawlDelayTimer delayTimer) { public List<EdgeUrl> fetchSitemapUrls(String root, CrawlDelayTimer delayTimer) {
try { try {
@@ -270,7 +271,7 @@ public class HttpFetcherImpl implements HttpFetcher {
while (!sitemapQueue.isEmpty() && ret.size() < 20_000 && ++fetchedSitemaps < 10) { while (!sitemapQueue.isEmpty() && ret.size() < 20_000 && ++fetchedSitemaps < 10) {
var head = sitemapQueue.removeFirst(); var head = sitemapQueue.removeFirst();
switch (fetchSitemap(head)) { switch (fetchSingleSitemap(head)) {
case SitemapResult.SitemapUrls(List<String> urls) -> { case SitemapResult.SitemapUrls(List<String> urls) -> {
for (var url : urls) { for (var url : urls) {
@@ -306,7 +307,7 @@ public class HttpFetcherImpl implements HttpFetcher {
} }
private SitemapResult fetchSitemap(EdgeUrl sitemapUrl) throws URISyntaxException, IOException, InterruptedException { private SitemapResult fetchSingleSitemap(EdgeUrl sitemapUrl) throws URISyntaxException, IOException, InterruptedException {
HttpRequest getRequest = HttpRequest.newBuilder() HttpRequest getRequest = HttpRequest.newBuilder()
.GET() .GET()
.uri(sitemapUrl.asURI()) .uri(sitemapUrl.asURI())

View File

@@ -44,6 +44,14 @@ public class DomainLocks {
return new Semaphore(2); return new Semaphore(2);
} }
public boolean canLock(EdgeDomain domain) {
Semaphore sem = locks.get(domain.topDomain.toLowerCase());
if (null == sem)
return true;
else
return sem.availablePermits() > 0;
}
public static class DomainLock implements AutoCloseable { public static class DomainLock implements AutoCloseable {
private final String domainName; private final String domainName;
private final Semaphore semaphore; private final Semaphore semaphore;

View File

@@ -42,18 +42,20 @@ public interface SerializableCrawlDataStream extends AutoCloseable {
{ {
String fileName = fullPath.getFileName().toString(); String fileName = fullPath.getFileName().toString();
if (fileName.endsWith(".parquet")) {
if (fileName.endsWith(".slop.zip")) {
try { try {
return new ParquetSerializableCrawlDataStream(fullPath); return new SlopSerializableCrawlDataStream(fullPath);
} catch (Exception ex) { } catch (Exception ex) {
logger.error("Error reading domain data from " + fullPath, ex); logger.error("Error reading domain data from " + fullPath, ex);
return SerializableCrawlDataStream.empty(); return SerializableCrawlDataStream.empty();
} }
} }
if (fileName.endsWith(".slop.zip")) { else if (fileName.endsWith(".parquet")) {
logger.error("Opening deprecated parquet-style crawl data stream", new Exception());
try { try {
return new SlopSerializableCrawlDataStream(fullPath); return new ParquetSerializableCrawlDataStream(fullPath);
} catch (Exception ex) { } catch (Exception ex) {
logger.error("Error reading domain data from " + fullPath, ex); logger.error("Error reading domain data from " + fullPath, ex);
return SerializableCrawlDataStream.empty(); return SerializableCrawlDataStream.empty();

View File

@@ -7,8 +7,7 @@ import java.util.Arrays;
public enum SearchJsParameter { public enum SearchJsParameter {
DEFAULT("default"), DEFAULT("default"),
DENY_JS("no-js", "js:true"), DENY_JS("no-js", "special:scripts");
REQUIRE_JS("yes-js", "js:false");
public final String value; public final String value;
public final String[] implictExcludeSearchTerms; public final String[] implictExcludeSearchTerms;
@@ -20,7 +19,6 @@ public enum SearchJsParameter {
public static SearchJsParameter parse(@Nullable String value) { public static SearchJsParameter parse(@Nullable String value) {
if (DENY_JS.value.equals(value)) return DENY_JS; if (DENY_JS.value.equals(value)) return DENY_JS;
if (REQUIRE_JS.value.equals(value)) return REQUIRE_JS;
return DEFAULT; return DEFAULT;
} }

View File

@@ -41,6 +41,7 @@ dependencies {
implementation project(':code:functions:live-capture:api') implementation project(':code:functions:live-capture:api')
implementation project(':code:functions:math:api') implementation project(':code:functions:math:api')
implementation project(':code:functions:favicon:api')
implementation project(':code:functions:domain-info:api') implementation project(':code:functions:domain-info:api')
implementation project(':code:functions:search-query:api') implementation project(':code:functions:search-query:api')

View File

@@ -3,10 +3,14 @@ package nu.marginalia.search;
import com.google.inject.Inject; import com.google.inject.Inject;
import io.jooby.Context; import io.jooby.Context;
import io.jooby.Jooby; import io.jooby.Jooby;
import io.jooby.MediaType;
import io.jooby.StatusCode; import io.jooby.StatusCode;
import io.prometheus.client.Counter; import io.prometheus.client.Counter;
import io.prometheus.client.Histogram; import io.prometheus.client.Histogram;
import nu.marginalia.WebsiteUrl; import nu.marginalia.WebsiteUrl;
import nu.marginalia.api.favicon.FaviconClient;
import nu.marginalia.db.DbDomainQueries;
import nu.marginalia.model.EdgeDomain;
import nu.marginalia.search.svc.*; import nu.marginalia.search.svc.*;
import nu.marginalia.service.discovery.property.ServicePartition; import nu.marginalia.service.discovery.property.ServicePartition;
import nu.marginalia.service.server.BaseServiceParams; import nu.marginalia.service.server.BaseServiceParams;
@@ -15,11 +19,14 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.util.List; import java.util.List;
import java.util.NoSuchElementException;
public class SearchService extends JoobyService { public class SearchService extends JoobyService {
private final WebsiteUrl websiteUrl; private final WebsiteUrl websiteUrl;
private final SearchSiteSubscriptionService siteSubscriptionService; private final SearchSiteSubscriptionService siteSubscriptionService;
private final FaviconClient faviconClient;
private final DbDomainQueries domainQueries;
private static final Logger logger = LoggerFactory.getLogger(SearchService.class); private static final Logger logger = LoggerFactory.getLogger(SearchService.class);
private static final Histogram wmsa_search_service_request_time = Histogram.build() private static final Histogram wmsa_search_service_request_time = Histogram.build()
@@ -43,6 +50,8 @@ public class SearchService extends JoobyService {
SearchSiteInfoService siteInfoService, SearchSiteInfoService siteInfoService,
SearchCrosstalkService crosstalkService, SearchCrosstalkService crosstalkService,
SearchBrowseService searchBrowseService, SearchBrowseService searchBrowseService,
FaviconClient faviconClient,
DbDomainQueries domainQueries,
SearchQueryService searchQueryService) SearchQueryService searchQueryService)
throws Exception { throws Exception {
super(params, super(params,
@@ -58,6 +67,8 @@ public class SearchService extends JoobyService {
this.websiteUrl = websiteUrl; this.websiteUrl = websiteUrl;
this.siteSubscriptionService = siteSubscriptionService; this.siteSubscriptionService = siteSubscriptionService;
this.faviconClient = faviconClient;
this.domainQueries = domainQueries;
} }
@Override @Override
@@ -71,6 +82,31 @@ public class SearchService extends JoobyService {
jooby.get("/site/https://*", this::handleSiteUrlRedirect); jooby.get("/site/https://*", this::handleSiteUrlRedirect);
jooby.get("/site/http://*", this::handleSiteUrlRedirect); jooby.get("/site/http://*", this::handleSiteUrlRedirect);
jooby.get("/site/{domain}/favicon", ctx -> {
String domain = ctx.path("domain").value();
logger.info("Finding icon for domain {}", domain);
domainQueries.getDomainId(new EdgeDomain(domain));
try {
DbDomainQueries.DomainIdWithNode domainIdWithNode = domainQueries.getDomainIdWithNode(new EdgeDomain(domain));
var faviconMaybe = faviconClient.getFavicon(domain, domainIdWithNode.nodeAffinity());
if (faviconMaybe.isEmpty()) {
ctx.setResponseCode(404);
return "";
} else {
var favicon = faviconMaybe.get();
ctx.responseStream(MediaType.valueOf(favicon.contentType()), consumer -> {
consumer.write(favicon.bytes());
});
}
}
catch (NoSuchElementException ex) {
ctx.setResponseCode(404);
}
return "";
});
jooby.before((Context ctx) -> { jooby.before((Context ctx) -> {
ctx.setAttribute(startTimeAttribute, System.nanoTime()); ctx.setAttribute(startTimeAttribute, System.nanoTime());
}); });

View File

@@ -7,9 +7,7 @@ import java.util.Arrays;
public enum SearchJsParameter { public enum SearchJsParameter {
DEFAULT("default"), DEFAULT("default"),
DENY_JS("no-js", "js:true"), DENY_JS("no-js", "special:scripts");
REQUIRE_JS("yes-js", "js:false");
public final String value; public final String value;
public final String[] implictExcludeSearchTerms; public final String[] implictExcludeSearchTerms;
@@ -20,7 +18,6 @@ public enum SearchJsParameter {
public static SearchJsParameter parse(@Nullable String value) { public static SearchJsParameter parse(@Nullable String value) {
if (DENY_JS.value.equals(value)) return DENY_JS; if (DENY_JS.value.equals(value)) return DENY_JS;
if (REQUIRE_JS.value.equals(value)) return REQUIRE_JS;
return DEFAULT; return DEFAULT;
} }

View File

@@ -81,6 +81,7 @@ public class SearchFilters {
), ),
List.of( List.of(
new Filter("Vintage", "fa-clock-rotate-left", SearchProfile.VINTAGE, parameters), new Filter("Vintage", "fa-clock-rotate-left", SearchProfile.VINTAGE, parameters),
new Filter("Small Web", "fa-minus", SearchProfile.SMALLWEB, parameters),
new Filter("Plain Text", "fa-file", SearchProfile.PLAIN_TEXT, parameters), new Filter("Plain Text", "fa-file", SearchProfile.PLAIN_TEXT, parameters),
new Filter("Tilde", "fa-house", SearchProfile.TILDE, parameters) new Filter("Tilde", "fa-house", SearchProfile.TILDE, parameters)
), ),

View File

@@ -9,6 +9,14 @@
nicotine: '#f8f8ee', nicotine: '#f8f8ee',
margeblue: '#3e5f6f', margeblue: '#3e5f6f',
liteblue: '#0066cc', liteblue: '#0066cc',
},
screens: {
'coarsepointer': {
'raw': '(pointer: coarse)'
},
'finepointer': {
'raw': '(pointer: fine)'
},
} }
}, },
screens: { screens: {

View File

@@ -23,7 +23,7 @@
@template.serp.part.searchform(query = results.getParams().query(), profile = results.getProfile(), filters = results.getFilters()) @template.serp.part.searchform(query = results.getParams().query(), profile = results.getProfile(), filters = results.getFilters())
</div> </div>
<div class="grow"></div> <div class="grow"></div>
<button class="fixed bottom-10 right-5 sm:hidden text-sm bg-margeblue text-white p-4 rounded-xl active:text-slate-200" id="filter-button"> <button class="fixed bottom-10 right-5 finepointer:hidden md:hidden text-sm bg-margeblue text-white p-4 rounded-xl active:text-slate-200" id="filter-button">
<i class="fas fa-filter mr-3"></i> <i class="fas fa-filter mr-3"></i>
Filters Filters
</button> </button>

View File

@@ -3,7 +3,7 @@
@param SearchFilters filters @param SearchFilters filters
<aside class="md:w-64 py-4 shrink-0 hidden sm:block"> <aside class="md:w-64 py-4 shrink-0 hidden md:block finepointer:block">
<div class="space-y-6 sticky top-4"> <div class="space-y-6 sticky top-4">
<div class="bg-white dark:bg-gray-800 p-4 border dark:border-gray-600 border-gray-300"> <div class="bg-white dark:bg-gray-800 p-4 border dark:border-gray-600 border-gray-300">
<h2 class="font-medium mb-3 flex items-center font-serif hidden md:block"> <h2 class="font-medium mb-3 flex items-center font-serif hidden md:block">

View File

@@ -9,8 +9,8 @@
<div class="flex-1 p-4 space-y-4 mx-auto w-full md:w-auto"> <div class="flex-1 p-4 space-y-4 mx-auto w-full md:w-auto">
<div class="flex border dark:border-gray-600 rounded bg-white dark:bg-gray-800 flex-col space-y-4 pb-4 overflow-hidden md:max-w-lg" > <div class="flex border dark:border-gray-600 rounded bg-white dark:bg-gray-800 flex-col space-y-4 pb-4 overflow-hidden md:max-w-lg" >
<div class="flex place-items-baseline space-x-2 p-2 text-md border-b dark:border-gray-600 bg-margeblue text-white"> <div class="flex place-items-center space-x-2 p-2 text-md border-b dark:border-gray-600 bg-margeblue text-white">
<i class="fa fa-globe"></i> <img src="/site/${siteInfo.domain()}/favicon" style="width: 16px; height: 16px; vertical-align: center">
<span>${siteInfo.domain()}</span> <span>${siteInfo.domain()}</span>
<div class="grow"> <div class="grow">
</div> </div>

View File

@@ -9,6 +9,14 @@ module.exports = {
nicotine: '#f8f8ee', nicotine: '#f8f8ee',
margeblue: '#3e5f6f', margeblue: '#3e5f6f',
liteblue: '#0066cc', liteblue: '#0066cc',
},
screens: {
'coarsepointer': {
'raw': '(pointer: coarse)'
},
'finepointer': {
'raw': '(pointer: fine)'
},
} }
}, },
screens: { screens: {

View File

@@ -42,6 +42,8 @@ dependencies {
implementation project(':code:libraries:message-queue') implementation project(':code:libraries:message-queue')
implementation project(':code:functions:link-graph:api') implementation project(':code:functions:link-graph:api')
implementation project(':code:functions:favicon')
implementation project(':code:functions:favicon:api')
implementation project(':code:processes:crawling-process:model') implementation project(':code:processes:crawling-process:model')
implementation project(':code:processes:crawling-process:model') implementation project(':code:processes:crawling-process:model')

View File

@@ -2,6 +2,7 @@ package nu.marginalia.executor;
import com.google.inject.Inject; import com.google.inject.Inject;
import nu.marginalia.execution.*; import nu.marginalia.execution.*;
import nu.marginalia.functions.favicon.FaviconGrpcService;
import nu.marginalia.service.discovery.property.ServicePartition; import nu.marginalia.service.discovery.property.ServicePartition;
import nu.marginalia.service.server.BaseServiceParams; import nu.marginalia.service.server.BaseServiceParams;
import nu.marginalia.service.server.SparkService; import nu.marginalia.service.server.SparkService;
@@ -24,6 +25,7 @@ public class ExecutorSvc extends SparkService {
ExecutorCrawlGrpcService executorCrawlGrpcService, ExecutorCrawlGrpcService executorCrawlGrpcService,
ExecutorSideloadGrpcService executorSideloadGrpcService, ExecutorSideloadGrpcService executorSideloadGrpcService,
ExecutorExportGrpcService executorExportGrpcService, ExecutorExportGrpcService executorExportGrpcService,
FaviconGrpcService faviconGrpcService,
ExecutionInit executionInit, ExecutionInit executionInit,
ExecutorFileTransferService fileTransferService) throws Exception { ExecutorFileTransferService fileTransferService) throws Exception {
super(params, super(params,
@@ -31,7 +33,8 @@ public class ExecutorSvc extends SparkService {
List.of(executorGrpcService, List.of(executorGrpcService,
executorCrawlGrpcService, executorCrawlGrpcService,
executorSideloadGrpcService, executorSideloadGrpcService,
executorExportGrpcService) executorExportGrpcService,
faviconGrpcService)
); );
this.executionInit = executionInit; this.executionInit = executionInit;

View File

@@ -16,7 +16,8 @@ include 'code:services-application:status-service'
include 'code:functions:math' include 'code:functions:math'
include 'code:functions:math:api' include 'code:functions:math:api'
include 'code:functions:favicon'
include 'code:functions:favicon:api'
include 'code:functions:domain-info' include 'code:functions:domain-info'
include 'code:functions:domain-info:api' include 'code:functions:domain-info:api'