mirror of
https://github.com/MarginaliaSearch/MarginaliaSearch.git
synced 2025-10-06 07:32:38 +02:00
Compare commits
118 Commits
deploy-002
...
deploy-005
Author | SHA1 | Date | |
---|---|---|---|
|
55aeb03c4a | ||
|
faa589962f | ||
|
c7edd6b39f | ||
|
79da622e3b | ||
|
3da8337ba6 | ||
|
a32d230f0a | ||
|
3772bfd387 | ||
|
02a7900d1a | ||
|
a1fb92468f | ||
|
b7f0a2a98e | ||
|
5fb76b2e79 | ||
|
ad8c97f342 | ||
|
dc1b6373eb | ||
|
983d6d067c | ||
|
a84a06975c | ||
|
d2864c13ec | ||
|
03ba53ce51 | ||
|
d4a6684931 | ||
|
6f0485287a | ||
|
59e2dd4c26 | ||
|
ca1807caae | ||
|
26c20e18ac | ||
|
7c90b6b414 | ||
|
b63c54c4ce | ||
|
fecd2f4ec3 | ||
|
39e420de88 | ||
|
dc83619861 | ||
|
87d1c89701 | ||
|
a42a7769e2 | ||
|
202bda884f | ||
|
2315fdc731 | ||
|
b5469bd8a1 | ||
|
6a6318d04c | ||
|
55933f8d40 | ||
|
be6382e0d0 | ||
|
45e771f96b | ||
|
8dde502cc9 | ||
|
3e66767af3 | ||
|
9ec9d1b338 | ||
|
dcad0d7863 | ||
|
94e1aa0baf | ||
|
b62f043910 | ||
|
6ea22d0d21 | ||
|
8c69dc31b8 | ||
|
00734ea87f | ||
|
3009713db4 | ||
|
9b2ceaf37c | ||
|
8019c2ce18 | ||
|
a9e312b8b1 | ||
|
4da3563d8a | ||
|
48d0a3089a | ||
|
06efb5abfc | ||
|
8c8f2ad5ee | ||
|
f71e79d10f | ||
|
1b27c5cf06 | ||
|
8b05c788fd | ||
|
236f033bc9 | ||
|
510fc75121 | ||
|
0376f2e6e3 | ||
|
84f55b84ff | ||
|
ab5c30ad51 | ||
|
0c839453c5 | ||
|
5e4c5d03ae | ||
|
a5b0a1ae62 | ||
|
e9f71ee39b | ||
|
81cdd6385d | ||
|
e76c42329f | ||
|
e6ef4734ea | ||
|
df4bc1d7e9 | ||
|
2b222efa75 | ||
|
6d18e6d840 | ||
|
2a3c63f209 | ||
|
9f70cecaef | ||
|
c08203e2ed | ||
|
86497fd32f | ||
|
3b998573fd | ||
|
e161882ec7 | ||
|
357f349e30 | ||
|
e4769f541d | ||
|
2a173e2861 | ||
|
a6a900266c | ||
|
bdba53f055 | ||
|
bbdde789e7 | ||
|
eab61cd48a | ||
|
0ce2ba9ad9 | ||
|
3ddcebaa36 | ||
|
b91463383e | ||
|
7444a2f36c | ||
|
fdee07048d | ||
|
2fbf201761 | ||
|
4018e4c434 | ||
|
f3382b5bd8 | ||
|
9287ee0141 | ||
|
2769c8f869 | ||
|
ddb66f33ba | ||
|
79500b8fbc | ||
|
187eea43a4 | ||
|
a89ed6fa9f | ||
|
8d168be138 | ||
|
6e1aa7b391 | ||
|
deab9b9516 | ||
|
39d99a906a | ||
|
6f72e6e0d3 | ||
|
d786d79483 | ||
|
01510f6c2e | ||
|
7ba43e9e3f | ||
|
97bfcd1353 | ||
|
aa3c85c196 | ||
|
fb75a3827d | ||
|
7d546d0e2a | ||
|
8fcb6ffd7a | ||
|
f97de0c15a | ||
|
be9e192b78 | ||
|
75ae1c9526 | ||
|
33761a0236 | ||
|
19b69b1764 | ||
|
8b804359a9 | ||
|
f050bf5c4c |
1
.github/FUNDING.yml
vendored
1
.github/FUNDING.yml
vendored
@@ -1,5 +1,6 @@
|
||||
# These are supported funding model platforms
|
||||
|
||||
polar: marginalia-search
|
||||
github: MarginaliaSearch
|
||||
patreon: marginalia_nu
|
||||
open_collective: # Replace with a single Open Collective username
|
||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@@ -7,3 +7,4 @@ build/
|
||||
lombok.config
|
||||
Dockerfile
|
||||
run
|
||||
jte-classes
|
@@ -48,6 +48,7 @@ ext {
|
||||
dockerImageTag='latest'
|
||||
dockerImageRegistry='marginalia'
|
||||
jibVersion = '3.4.3'
|
||||
|
||||
}
|
||||
|
||||
idea {
|
||||
|
@@ -8,17 +8,18 @@ import com.google.inject.Inject;
|
||||
import com.google.inject.Singleton;
|
||||
import com.zaxxer.hikari.HikariDataSource;
|
||||
import nu.marginalia.model.EdgeDomain;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.NoSuchElementException;
|
||||
import java.util.Optional;
|
||||
import java.util.OptionalInt;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
|
||||
@Singleton
|
||||
public class DbDomainQueries {
|
||||
private final HikariDataSource dataSource;
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(DbDomainQueries.class);
|
||||
private final Cache<EdgeDomain, Integer> domainIdCache = CacheBuilder.newBuilder().maximumSize(10_000).build();
|
||||
|
||||
@Inject
|
||||
@@ -28,7 +29,7 @@ public class DbDomainQueries {
|
||||
}
|
||||
|
||||
|
||||
public Integer getDomainId(EdgeDomain domain) {
|
||||
public Integer getDomainId(EdgeDomain domain) throws NoSuchElementException {
|
||||
try (var connection = dataSource.getConnection()) {
|
||||
|
||||
return domainIdCache.get(domain, () -> {
|
||||
@@ -42,6 +43,9 @@ public class DbDomainQueries {
|
||||
throw new NoSuchElementException();
|
||||
});
|
||||
}
|
||||
catch (UncheckedExecutionException ex) {
|
||||
throw new NoSuchElementException();
|
||||
}
|
||||
catch (ExecutionException ex) {
|
||||
throw new RuntimeException(ex.getCause());
|
||||
}
|
||||
@@ -98,4 +102,34 @@ public class DbDomainQueries {
|
||||
throw new RuntimeException(ex);
|
||||
}
|
||||
}
|
||||
|
||||
public List<DomainWithNode> otherSubdomains(EdgeDomain domain, int cnt) {
|
||||
List<DomainWithNode> ret = new ArrayList<>();
|
||||
|
||||
try (var conn = dataSource.getConnection();
|
||||
var stmt = conn.prepareStatement("SELECT DOMAIN_NAME, NODE_AFFINITY FROM EC_DOMAIN WHERE DOMAIN_TOP = ? LIMIT ?")) {
|
||||
stmt.setString(1, domain.topDomain);
|
||||
stmt.setInt(2, cnt);
|
||||
|
||||
var rs = stmt.executeQuery();
|
||||
while (rs.next()) {
|
||||
var sibling = new EdgeDomain(rs.getString(1));
|
||||
|
||||
if (sibling.equals(domain))
|
||||
continue;
|
||||
|
||||
ret.add(new DomainWithNode(sibling, rs.getInt(2)));
|
||||
}
|
||||
} catch (SQLException e) {
|
||||
logger.error("Failed to get domain neighbors");
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
public record DomainWithNode (EdgeDomain domain, int nodeAffinity) {
|
||||
public boolean isIndexed() {
|
||||
return nodeAffinity > 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -83,6 +83,11 @@ public class QueryParams {
|
||||
if (path.endsWith("StoryView.py")) { // folklore.org is neat
|
||||
return param.startsWith("project=") || param.startsWith("story=");
|
||||
}
|
||||
|
||||
// www.perseus.tufts.edu:
|
||||
if (param.startsWith("collection=")) return true;
|
||||
if (param.startsWith("doc=")) return true;
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
@@ -42,6 +42,12 @@ dependencies {
|
||||
implementation libs.bundles.curator
|
||||
implementation libs.bundles.flyway
|
||||
|
||||
libs.bundles.jooby.get().each {
|
||||
implementation dependencies.create(it) {
|
||||
exclude group: 'org.slf4j'
|
||||
}
|
||||
}
|
||||
|
||||
testImplementation libs.bundles.slf4j.test
|
||||
implementation libs.bundles.mariadb
|
||||
|
||||
|
@@ -7,8 +7,6 @@ import nu.marginalia.service.discovery.property.PartitionTraits;
|
||||
import nu.marginalia.service.discovery.property.ServiceEndpoint;
|
||||
import nu.marginalia.service.discovery.property.ServiceKey;
|
||||
import nu.marginalia.service.discovery.property.ServicePartition;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
@@ -24,7 +22,7 @@ import java.util.function.Function;
|
||||
public class GrpcMultiNodeChannelPool<STUB> {
|
||||
private final ConcurrentHashMap<Integer, GrpcSingleNodeChannelPool<STUB>> pools =
|
||||
new ConcurrentHashMap<>();
|
||||
private static final Logger logger = LoggerFactory.getLogger(GrpcMultiNodeChannelPool.class);
|
||||
|
||||
private final ServiceRegistryIf serviceRegistryIf;
|
||||
private final ServiceKey<? extends PartitionTraits.Multicast> serviceKey;
|
||||
private final Function<ServiceEndpoint.InstanceAddress, ManagedChannel> channelConstructor;
|
||||
|
@@ -10,6 +10,8 @@ import nu.marginalia.service.discovery.property.ServiceKey;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.slf4j.Marker;
|
||||
import org.slf4j.MarkerFactory;
|
||||
|
||||
import java.time.Duration;
|
||||
import java.util.*;
|
||||
@@ -26,6 +28,7 @@ import java.util.function.Function;
|
||||
public class GrpcSingleNodeChannelPool<STUB> extends ServiceChangeMonitor {
|
||||
private final Map<InstanceAddress, ConnectionHolder> channels = new ConcurrentHashMap<>();
|
||||
|
||||
private final Marker grpcMarker = MarkerFactory.getMarker("GRPC");
|
||||
private static final Logger logger = LoggerFactory.getLogger(GrpcSingleNodeChannelPool.class);
|
||||
|
||||
private final ServiceRegistryIf serviceRegistryIf;
|
||||
@@ -59,10 +62,10 @@ public class GrpcSingleNodeChannelPool<STUB> extends ServiceChangeMonitor {
|
||||
for (var route : Sets.symmetricDifference(oldRoutes, newRoutes)) {
|
||||
ConnectionHolder oldChannel;
|
||||
if (newRoutes.contains(route)) {
|
||||
logger.info("Adding route {}", route);
|
||||
logger.info(grpcMarker, "Adding route {} => {}", serviceKey, route);
|
||||
oldChannel = channels.put(route, new ConnectionHolder(route));
|
||||
} else {
|
||||
logger.info("Expelling route {}", route);
|
||||
logger.info(grpcMarker, "Expelling route {} => {}", serviceKey, route);
|
||||
oldChannel = channels.remove(route);
|
||||
}
|
||||
if (oldChannel != null) {
|
||||
@@ -100,7 +103,7 @@ public class GrpcSingleNodeChannelPool<STUB> extends ServiceChangeMonitor {
|
||||
}
|
||||
|
||||
try {
|
||||
logger.info("Creating channel for {}:{}", serviceKey, address);
|
||||
logger.info(grpcMarker, "Creating channel for {} => {}", serviceKey, address);
|
||||
value = channelConstructor.apply(address);
|
||||
if (channel.compareAndSet(null, value)) {
|
||||
return value;
|
||||
@@ -111,7 +114,7 @@ public class GrpcSingleNodeChannelPool<STUB> extends ServiceChangeMonitor {
|
||||
}
|
||||
}
|
||||
catch (Exception e) {
|
||||
logger.error("Failed to get channel for " + address, e);
|
||||
logger.error(grpcMarker, "Failed to get channel for " + address, e);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@@ -203,7 +206,7 @@ public class GrpcSingleNodeChannelPool<STUB> extends ServiceChangeMonitor {
|
||||
}
|
||||
|
||||
for (var e : exceptions) {
|
||||
logger.error("Failed to call service {}", serviceKey, e);
|
||||
logger.error(grpcMarker, "Failed to call service {}", serviceKey, e);
|
||||
}
|
||||
|
||||
throw new ServiceNotAvailableException(serviceKey);
|
||||
|
@@ -4,6 +4,11 @@ import nu.marginalia.service.discovery.property.ServiceKey;
|
||||
|
||||
public class ServiceNotAvailableException extends RuntimeException {
|
||||
public ServiceNotAvailableException(ServiceKey<?> key) {
|
||||
super("Service " + key + " not available");
|
||||
super(key.toString());
|
||||
}
|
||||
|
||||
@Override
|
||||
public StackTraceElement[] getStackTrace() { // Suppress stack trace
|
||||
return new StackTraceElement[0];
|
||||
}
|
||||
}
|
||||
|
@@ -48,5 +48,10 @@ public record ServiceEndpoint(String host, int port) {
|
||||
public int port() {
|
||||
return endpoint.port();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return endpoint().host() + ":" + endpoint.port() + " [" + instance + "]";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -48,6 +48,19 @@ public sealed interface ServiceKey<P extends ServicePartition> {
|
||||
{
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
final String shortName;
|
||||
|
||||
int periodIndex = name.lastIndexOf('.');
|
||||
|
||||
if (periodIndex >= 0) shortName = name.substring(periodIndex+1);
|
||||
else shortName = name;
|
||||
|
||||
return "rest:" + shortName;
|
||||
}
|
||||
|
||||
}
|
||||
record Grpc<P extends ServicePartition>(String name, P partition) implements ServiceKey<P> {
|
||||
public String baseName() {
|
||||
@@ -64,6 +77,18 @@ public sealed interface ServiceKey<P extends ServicePartition> {
|
||||
{
|
||||
return new Grpc<>(name, partition);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
final String shortName;
|
||||
|
||||
int periodIndex = name.lastIndexOf('.');
|
||||
|
||||
if (periodIndex >= 0) shortName = name.substring(periodIndex+1);
|
||||
else shortName = name;
|
||||
|
||||
return "grpc:" + shortName + "[" + partition.identifier() + "]";
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -0,0 +1,178 @@
|
||||
package nu.marginalia.service.server;
|
||||
|
||||
import io.jooby.*;
|
||||
import io.prometheus.client.Counter;
|
||||
import nu.marginalia.mq.inbox.MqInboxIf;
|
||||
import nu.marginalia.service.client.ServiceNotAvailableException;
|
||||
import nu.marginalia.service.discovery.property.ServiceEndpoint;
|
||||
import nu.marginalia.service.discovery.property.ServiceKey;
|
||||
import nu.marginalia.service.discovery.property.ServicePartition;
|
||||
import nu.marginalia.service.module.ServiceConfiguration;
|
||||
import nu.marginalia.service.server.jte.JteModule;
|
||||
import nu.marginalia.service.server.mq.ServiceMqSubscription;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.slf4j.Marker;
|
||||
import org.slf4j.MarkerFactory;
|
||||
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.List;
|
||||
|
||||
public class JoobyService {
|
||||
private final Logger logger = LoggerFactory.getLogger(getClass());
|
||||
|
||||
// Marker for filtering out sensitive content from the persistent logs
|
||||
private final Marker httpMarker = MarkerFactory.getMarker("HTTP");
|
||||
|
||||
private final Initialization initialization;
|
||||
|
||||
private final static Counter request_counter = Counter.build("wmsa_request_counter", "Request Counter")
|
||||
.labelNames("service", "node")
|
||||
.register();
|
||||
private final static Counter request_counter_good = Counter.build("wmsa_request_counter_good", "Good Requests")
|
||||
.labelNames("service", "node")
|
||||
.register();
|
||||
private final static Counter request_counter_bad = Counter.build("wmsa_request_counter_bad", "Bad Requests")
|
||||
.labelNames("service", "node")
|
||||
.register();
|
||||
private final static Counter request_counter_err = Counter.build("wmsa_request_counter_err", "Error Requests")
|
||||
.labelNames("service", "node")
|
||||
.register();
|
||||
private final String serviceName;
|
||||
private static volatile boolean initialized = false;
|
||||
|
||||
protected final MqInboxIf messageQueueInbox;
|
||||
private final int node;
|
||||
private GrpcServer grpcServer;
|
||||
|
||||
private ServiceConfiguration config;
|
||||
private final List<MvcExtension> joobyServices;
|
||||
private final ServiceEndpoint restEndpoint;
|
||||
|
||||
public JoobyService(BaseServiceParams params,
|
||||
ServicePartition partition,
|
||||
List<DiscoverableService> grpcServices,
|
||||
List<MvcExtension> joobyServices
|
||||
) throws Exception {
|
||||
|
||||
this.joobyServices = joobyServices;
|
||||
this.initialization = params.initialization;
|
||||
config = params.configuration;
|
||||
node = config.node();
|
||||
|
||||
String inboxName = config.serviceName();
|
||||
logger.info("Inbox name: {}", inboxName);
|
||||
|
||||
var serviceRegistry = params.serviceRegistry;
|
||||
|
||||
restEndpoint = serviceRegistry.registerService(ServiceKey.forRest(config.serviceId(), config.node()),
|
||||
config.instanceUuid(), config.externalAddress());
|
||||
|
||||
var mqInboxFactory = params.messageQueueInboxFactory;
|
||||
messageQueueInbox = mqInboxFactory.createSynchronousInbox(inboxName, config.node(), config.instanceUuid());
|
||||
messageQueueInbox.subscribe(new ServiceMqSubscription(this));
|
||||
|
||||
serviceName = System.getProperty("service-name");
|
||||
|
||||
initialization.addCallback(params.heartbeat::start);
|
||||
initialization.addCallback(messageQueueInbox::start);
|
||||
initialization.addCallback(() -> params.eventLog.logEvent("SVC-INIT", serviceName + ":" + config.node()));
|
||||
initialization.addCallback(() -> serviceRegistry.announceInstance(config.instanceUuid()));
|
||||
|
||||
Thread.setDefaultUncaughtExceptionHandler((t, e) -> {
|
||||
if (e instanceof ServiceNotAvailableException) {
|
||||
// reduce log spam for this common case
|
||||
logger.error("Service not available: {}", e.getMessage());
|
||||
}
|
||||
else {
|
||||
logger.error("Uncaught exception", e);
|
||||
}
|
||||
request_counter_err.labels(serviceName, Integer.toString(node)).inc();
|
||||
});
|
||||
|
||||
if (!initialization.isReady() && ! initialized ) {
|
||||
initialized = true;
|
||||
grpcServer = new GrpcServer(config, serviceRegistry, partition, grpcServices);
|
||||
grpcServer.start();
|
||||
}
|
||||
}
|
||||
|
||||
public void startJooby(Jooby jooby) {
|
||||
|
||||
logger.info("{} Listening to {}:{} ({})", getClass().getSimpleName(),
|
||||
restEndpoint.host(),
|
||||
restEndpoint.port(),
|
||||
config.externalAddress());
|
||||
|
||||
// FIXME: This won't work outside of docker, may need to submit a PR to jooby to allow classpaths here
|
||||
jooby.install(new JteModule(Path.of("/app/resources/jte"), Path.of("/app/classes/jte-precompiled")));
|
||||
jooby.assets("/*", Paths.get("/app/resources/static"));
|
||||
|
||||
var options = new ServerOptions();
|
||||
options.setHost(config.bindAddress());
|
||||
options.setPort(restEndpoint.port());
|
||||
|
||||
// Enable gzip compression of response data, but set compression to the lowest level
|
||||
// since it doesn't really save much more space to dial it up. It's typically a
|
||||
// single digit percentage difference since HTML already compresses very well with level = 1.
|
||||
options.setCompressionLevel(1);
|
||||
|
||||
|
||||
jooby.setServerOptions(options);
|
||||
|
||||
jooby.get("/internal/ping", ctx -> "pong");
|
||||
jooby.get("/internal/started", this::isInitialized);
|
||||
jooby.get("/internal/ready", this::isReady);
|
||||
|
||||
for (var service : joobyServices) {
|
||||
jooby.mvc(service);
|
||||
}
|
||||
|
||||
jooby.before(this::auditRequestIn);
|
||||
jooby.after(this::auditRequestOut);
|
||||
}
|
||||
|
||||
private Object isInitialized(Context ctx) {
|
||||
if (initialization.isReady()) {
|
||||
return "ok";
|
||||
}
|
||||
else {
|
||||
ctx.setResponseCode(StatusCode.FAILED_DEPENDENCY_CODE);
|
||||
return "bad";
|
||||
}
|
||||
}
|
||||
|
||||
public boolean isReady() {
|
||||
return true;
|
||||
}
|
||||
|
||||
private String isReady(Context ctx) {
|
||||
if (isReady()) {
|
||||
return "ok";
|
||||
}
|
||||
else {
|
||||
ctx.setResponseCode(StatusCode.FAILED_DEPENDENCY_CODE);
|
||||
return "bad";
|
||||
}
|
||||
}
|
||||
|
||||
private void auditRequestIn(Context ctx) {
|
||||
request_counter.labels(serviceName, Integer.toString(node)).inc();
|
||||
}
|
||||
|
||||
private void auditRequestOut(Context ctx, Object result, Throwable failure) {
|
||||
if (ctx.getResponseCode().value() < 400) {
|
||||
request_counter_good.labels(serviceName, Integer.toString(node)).inc();
|
||||
}
|
||||
else {
|
||||
request_counter_bad.labels(serviceName, Integer.toString(node)).inc();
|
||||
}
|
||||
|
||||
if (failure != null) {
|
||||
logger.error("Request failed " + ctx.getMethod() + " " + ctx.getRequestURL(), failure);
|
||||
request_counter_err.labels(serviceName, Integer.toString(node)).inc();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@@ -16,7 +16,7 @@ import spark.Spark;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public class Service {
|
||||
public class SparkService {
|
||||
private final Logger logger = LoggerFactory.getLogger(getClass());
|
||||
|
||||
// Marker for filtering out sensitive content from the persistent logs
|
||||
@@ -43,7 +43,7 @@ public class Service {
|
||||
private final int node;
|
||||
private GrpcServer grpcServer;
|
||||
|
||||
public Service(BaseServiceParams params,
|
||||
public SparkService(BaseServiceParams params,
|
||||
Runnable configureStaticFiles,
|
||||
ServicePartition partition,
|
||||
List<DiscoverableService> grpcServices) throws Exception {
|
||||
@@ -126,18 +126,18 @@ public class Service {
|
||||
}
|
||||
}
|
||||
|
||||
public Service(BaseServiceParams params,
|
||||
public SparkService(BaseServiceParams params,
|
||||
ServicePartition partition,
|
||||
List<DiscoverableService> grpcServices) throws Exception {
|
||||
this(params,
|
||||
Service::defaultSparkConfig,
|
||||
SparkService::defaultSparkConfig,
|
||||
partition,
|
||||
grpcServices);
|
||||
}
|
||||
|
||||
public Service(BaseServiceParams params) throws Exception {
|
||||
public SparkService(BaseServiceParams params) throws Exception {
|
||||
this(params,
|
||||
Service::defaultSparkConfig,
|
||||
SparkService::defaultSparkConfig,
|
||||
ServicePartition.any(),
|
||||
List.of());
|
||||
}
|
@@ -0,0 +1,61 @@
|
||||
package nu.marginalia.service.server.jte;
|
||||
|
||||
import edu.umd.cs.findbugs.annotations.NonNull;
|
||||
import edu.umd.cs.findbugs.annotations.Nullable;
|
||||
import gg.jte.ContentType;
|
||||
import gg.jte.TemplateEngine;
|
||||
import gg.jte.resolve.DirectoryCodeResolver;
|
||||
import io.jooby.*;
|
||||
|
||||
import java.io.File;
|
||||
import java.nio.file.Path;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
// Temporary workaround for a bug
|
||||
// APL-2.0 https://github.com/jooby-project/jooby
|
||||
public class JteModule implements Extension {
|
||||
private Path sourceDirectory;
|
||||
private Path classDirectory;
|
||||
private TemplateEngine templateEngine;
|
||||
|
||||
public JteModule(@NonNull Path sourceDirectory, @NonNull Path classDirectory) {
|
||||
this.sourceDirectory = (Path)Objects.requireNonNull(sourceDirectory, "Source directory is required.");
|
||||
this.classDirectory = (Path)Objects.requireNonNull(classDirectory, "Class directory is required.");
|
||||
}
|
||||
|
||||
public JteModule(@NonNull Path sourceDirectory) {
|
||||
this.sourceDirectory = (Path)Objects.requireNonNull(sourceDirectory, "Source directory is required.");
|
||||
}
|
||||
|
||||
public JteModule(@NonNull TemplateEngine templateEngine) {
|
||||
this.templateEngine = (TemplateEngine)Objects.requireNonNull(templateEngine, "Template engine is required.");
|
||||
}
|
||||
|
||||
public void install(@NonNull Jooby application) {
|
||||
if (this.templateEngine == null) {
|
||||
this.templateEngine = create(application.getEnvironment(), this.sourceDirectory, this.classDirectory);
|
||||
}
|
||||
|
||||
ServiceRegistry services = application.getServices();
|
||||
services.put(TemplateEngine.class, this.templateEngine);
|
||||
application.encoder(MediaType.html, new JteTemplateEngine(this.templateEngine));
|
||||
}
|
||||
|
||||
public static TemplateEngine create(@NonNull Environment environment, @NonNull Path sourceDirectory, @Nullable Path classDirectory) {
|
||||
boolean dev = environment.isActive("dev", new String[]{"test"});
|
||||
if (dev) {
|
||||
Objects.requireNonNull(sourceDirectory, "Source directory is required.");
|
||||
Path requiredClassDirectory = (Path)Optional.ofNullable(classDirectory).orElseGet(() -> sourceDirectory.resolve("jte-classes"));
|
||||
TemplateEngine engine = TemplateEngine.create(new DirectoryCodeResolver(sourceDirectory), requiredClassDirectory, ContentType.Html, environment.getClassLoader());
|
||||
Optional<List<String>> var10000 = Optional.ofNullable(System.getProperty("jooby.run.classpath")).map((it) -> it.split(File.pathSeparator)).map(Stream::of).map(Stream::toList);
|
||||
Objects.requireNonNull(engine);
|
||||
var10000.ifPresent(engine::setClassPath);
|
||||
return engine;
|
||||
} else {
|
||||
return classDirectory == null ? TemplateEngine.createPrecompiled(ContentType.Html) : TemplateEngine.createPrecompiled(classDirectory, ContentType.Html);
|
||||
}
|
||||
}
|
||||
}
|
@@ -0,0 +1,48 @@
|
||||
package nu.marginalia.service.server.jte;
|
||||
|
||||
import edu.umd.cs.findbugs.annotations.NonNull;
|
||||
import gg.jte.TemplateEngine;
|
||||
import io.jooby.Context;
|
||||
import io.jooby.MapModelAndView;
|
||||
import io.jooby.ModelAndView;
|
||||
import io.jooby.buffer.DataBuffer;
|
||||
import io.jooby.internal.jte.DataBufferOutput;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
|
||||
// Temporary workaround for a bug
|
||||
// APL-2.0 https://github.com/jooby-project/jooby
|
||||
class JteTemplateEngine implements io.jooby.TemplateEngine {
|
||||
private final TemplateEngine jte;
|
||||
private final List<String> extensions;
|
||||
|
||||
public JteTemplateEngine(TemplateEngine jte) {
|
||||
this.jte = jte;
|
||||
this.extensions = List.of(".jte", ".kte");
|
||||
}
|
||||
|
||||
|
||||
@NonNull @Override
|
||||
public List<String> extensions() {
|
||||
return extensions;
|
||||
}
|
||||
|
||||
@Override
|
||||
public DataBuffer render(Context ctx, ModelAndView modelAndView) {
|
||||
var buffer = ctx.getBufferFactory().allocateBuffer();
|
||||
var output = new DataBufferOutput(buffer, StandardCharsets.UTF_8);
|
||||
var attributes = ctx.getAttributes();
|
||||
if (modelAndView instanceof MapModelAndView mapModelAndView) {
|
||||
var mapModel = new HashMap<String, Object>();
|
||||
mapModel.putAll(attributes);
|
||||
mapModel.putAll(mapModelAndView.getModel());
|
||||
jte.render(modelAndView.getView(), mapModel, output);
|
||||
} else {
|
||||
jte.render(modelAndView.getView(), modelAndView.getModel(), output);
|
||||
}
|
||||
|
||||
return buffer;
|
||||
}
|
||||
}
|
@@ -3,7 +3,6 @@ package nu.marginalia.service.server.mq;
|
||||
import nu.marginalia.mq.MqMessage;
|
||||
import nu.marginalia.mq.inbox.MqInboxResponse;
|
||||
import nu.marginalia.mq.inbox.MqSubscription;
|
||||
import nu.marginalia.service.server.Service;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@@ -15,10 +14,10 @@ import java.util.Map;
|
||||
public class ServiceMqSubscription implements MqSubscription {
|
||||
private static final Logger logger = LoggerFactory.getLogger(ServiceMqSubscription.class);
|
||||
private final Map<String, Method> requests = new HashMap<>();
|
||||
private final Service service;
|
||||
private final Object service;
|
||||
|
||||
|
||||
public ServiceMqSubscription(Service service) {
|
||||
public ServiceMqSubscription(Object service) {
|
||||
this.service = service;
|
||||
|
||||
/* Wire up all methods annotated with @MqRequest and @MqNotification
|
||||
|
@@ -6,4 +6,8 @@ public record BrowseResultSet(Collection<BrowseResult> results, String focusDoma
|
||||
public BrowseResultSet(Collection<BrowseResult> results) {
|
||||
this(results, "");
|
||||
}
|
||||
|
||||
public boolean hasFocusDomain() {
|
||||
return focusDomain != null && !focusDomain.isBlank();
|
||||
}
|
||||
}
|
||||
|
@@ -38,6 +38,7 @@ public class DomainsProtobufCodec {
|
||||
sd.getIndexed(),
|
||||
sd.getActive(),
|
||||
sd.getScreenshot(),
|
||||
sd.getFeed(),
|
||||
SimilarDomain.LinkType.valueOf(sd.getLinkType().name())
|
||||
);
|
||||
}
|
||||
|
@@ -71,6 +71,23 @@ public class DomainInformation {
|
||||
return new String(Character.toChars(firstChar)) + new String(Character.toChars(secondChar));
|
||||
}
|
||||
|
||||
public String getAsnFlag() {
|
||||
if (asnCountry == null || asnCountry.codePointCount(0, asnCountry.length()) != 2) {
|
||||
return "";
|
||||
}
|
||||
String country = asnCountry;
|
||||
|
||||
if ("UK".equals(country)) {
|
||||
country = "GB";
|
||||
}
|
||||
|
||||
int offset = 0x1F1E6;
|
||||
int asciiOffset = 0x41;
|
||||
int firstChar = Character.codePointAt(country, 0) - asciiOffset + offset;
|
||||
int secondChar = Character.codePointAt(country, 1) - asciiOffset + offset;
|
||||
return new String(Character.toChars(firstChar)) + new String(Character.toChars(secondChar));
|
||||
}
|
||||
|
||||
public EdgeDomain getDomain() {
|
||||
return this.domain;
|
||||
}
|
||||
|
@@ -9,6 +9,7 @@ public record SimilarDomain(EdgeUrl url,
|
||||
boolean indexed,
|
||||
boolean active,
|
||||
boolean screenshot,
|
||||
boolean feed,
|
||||
LinkType linkType) {
|
||||
|
||||
public String getRankSymbols() {
|
||||
@@ -52,12 +53,12 @@ public record SimilarDomain(EdgeUrl url,
|
||||
return NONE;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
public String faIcon() {
|
||||
return switch (this) {
|
||||
case FOWARD -> "→";
|
||||
case BACKWARD -> "←";
|
||||
case BIDIRECTIONAL -> "⇆";
|
||||
case NONE -> "-";
|
||||
case FOWARD -> "fa-solid fa-arrow-right";
|
||||
case BACKWARD -> "fa-solid fa-arrow-left";
|
||||
case BIDIRECTIONAL -> "fa-solid fa-arrow-right-arrow-left";
|
||||
case NONE -> "";
|
||||
};
|
||||
}
|
||||
|
||||
|
@@ -5,6 +5,7 @@ import com.google.inject.Singleton;
|
||||
import nu.marginalia.api.livecapture.LiveCaptureApiGrpc.LiveCaptureApiBlockingStub;
|
||||
import nu.marginalia.service.client.GrpcChannelPoolFactory;
|
||||
import nu.marginalia.service.client.GrpcSingleNodeChannelPool;
|
||||
import nu.marginalia.service.client.ServiceNotAvailableException;
|
||||
import nu.marginalia.service.discovery.property.ServiceKey;
|
||||
import nu.marginalia.service.discovery.property.ServicePartition;
|
||||
import org.slf4j.Logger;
|
||||
@@ -29,6 +30,9 @@ public class LiveCaptureClient {
|
||||
channelPool.call(LiveCaptureApiBlockingStub::requestScreengrab)
|
||||
.run(RpcDomainId.newBuilder().setDomainId(domainId).build());
|
||||
}
|
||||
catch (ServiceNotAvailableException e) {
|
||||
logger.info("requestScreengrab() failed since the service is not available");
|
||||
}
|
||||
catch (Exception e) {
|
||||
logger.error("API Exception", e);
|
||||
}
|
||||
|
@@ -27,8 +27,9 @@ dependencies {
|
||||
implementation project(':code:processes:crawling-process:ft-content-type')
|
||||
|
||||
implementation libs.jsoup
|
||||
implementation libs.rssreader
|
||||
implementation project(':third-party:rssreader')
|
||||
implementation libs.opencsv
|
||||
implementation libs.slop
|
||||
implementation libs.sqlite
|
||||
implementation libs.bundles.slf4j
|
||||
implementation libs.commons.lang3
|
||||
|
@@ -15,7 +15,9 @@ import java.util.Map;
|
||||
|
||||
/** Client for local browserless.io API */
|
||||
public class BrowserlessClient implements AutoCloseable {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(BrowserlessClient.class);
|
||||
private static final String BROWSERLESS_TOKEN = System.getProperty("live-capture.browserless-token", "BROWSERLESS_TOKEN");
|
||||
|
||||
private final HttpClient httpClient = HttpClient.newBuilder()
|
||||
.version(HttpClient.Version.HTTP_1_1)
|
||||
@@ -36,7 +38,7 @@ public class BrowserlessClient implements AutoCloseable {
|
||||
);
|
||||
|
||||
var request = HttpRequest.newBuilder()
|
||||
.uri(browserlessURI.resolve("/content"))
|
||||
.uri(browserlessURI.resolve("/content?token="+BROWSERLESS_TOKEN))
|
||||
.method("POST", HttpRequest.BodyPublishers.ofString(
|
||||
gson.toJson(requestData)
|
||||
))
|
||||
@@ -63,7 +65,7 @@ public class BrowserlessClient implements AutoCloseable {
|
||||
);
|
||||
|
||||
var request = HttpRequest.newBuilder()
|
||||
.uri(browserlessURI.resolve("/screenshot"))
|
||||
.uri(browserlessURI.resolve("/screenshot?token="+BROWSERLESS_TOKEN))
|
||||
.method("POST", HttpRequest.BodyPublishers.ofString(
|
||||
gson.toJson(requestData)
|
||||
))
|
||||
|
@@ -1,6 +1,6 @@
|
||||
package nu.marginalia.rss.model;
|
||||
|
||||
import com.apptasticsoftware.rssreader.Item;
|
||||
import nu.marginalia.rss.svc.SimpleFeedParser;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.jsoup.Jsoup;
|
||||
@@ -18,37 +18,33 @@ public record FeedItem(String title,
|
||||
public static final int MAX_DESC_LENGTH = 255;
|
||||
public static final DateTimeFormatter DATE_FORMAT = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSSZ");
|
||||
|
||||
public static FeedItem fromItem(Item item, boolean keepFragment) {
|
||||
String title = item.getTitle().orElse("");
|
||||
public static FeedItem fromItem(SimpleFeedParser.ItemData item, boolean keepFragment) {
|
||||
String title = item.title();
|
||||
String date = getItemDate(item);
|
||||
String description = getItemDescription(item);
|
||||
String url;
|
||||
|
||||
if (keepFragment || item.getLink().isEmpty()) {
|
||||
url = item.getLink().orElse("");
|
||||
if (keepFragment) {
|
||||
url = item.url();
|
||||
}
|
||||
else {
|
||||
try {
|
||||
String link = item.getLink().get();
|
||||
String link = item.url();
|
||||
var linkUri = new URI(link);
|
||||
var cleanUri = new URI(linkUri.getScheme(), linkUri.getAuthority(), linkUri.getPath(), linkUri.getQuery(), null);
|
||||
url = cleanUri.toString();
|
||||
}
|
||||
catch (Exception e) {
|
||||
// fallback to original link if we can't clean it, this is not a very important step
|
||||
url = item.getLink().get();
|
||||
url = item.url();
|
||||
}
|
||||
}
|
||||
|
||||
return new FeedItem(title, date, description, url);
|
||||
}
|
||||
|
||||
private static String getItemDescription(Item item) {
|
||||
Optional<String> description = item.getDescription();
|
||||
if (description.isEmpty())
|
||||
return "";
|
||||
|
||||
String rawDescription = description.get();
|
||||
private static String getItemDescription(SimpleFeedParser.ItemData item) {
|
||||
String rawDescription = item.description();
|
||||
if (rawDescription.indexOf('<') >= 0) {
|
||||
rawDescription = Jsoup.parseBodyFragment(rawDescription).text();
|
||||
}
|
||||
@@ -58,15 +54,18 @@ public record FeedItem(String title,
|
||||
|
||||
// e.g. http://fabiensanglard.net/rss.xml does dates like this: 1 Apr 2021 00:00:00 +0000
|
||||
private static final DateTimeFormatter extraFormatter = DateTimeFormatter.ofPattern("d MMM yyyy HH:mm:ss Z");
|
||||
private static String getItemDate(Item item) {
|
||||
private static String getItemDate(SimpleFeedParser.ItemData item) {
|
||||
Optional<ZonedDateTime> zonedDateTime = Optional.empty();
|
||||
try {
|
||||
zonedDateTime = item.getPubDateZonedDateTime();
|
||||
}
|
||||
catch (Exception e) {
|
||||
zonedDateTime = item.getPubDate()
|
||||
.map(extraFormatter::parse)
|
||||
.map(ZonedDateTime::from);
|
||||
try {
|
||||
zonedDateTime = Optional.of(ZonedDateTime.from(extraFormatter.parse(item.pubDate())));
|
||||
}
|
||||
catch (Exception e2) {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
|
||||
return zonedDateTime.map(date -> date.format(DATE_FORMAT)).orElse("");
|
||||
|
@@ -1,7 +1,5 @@
|
||||
package nu.marginalia.rss.svc;
|
||||
|
||||
import com.apptasticsoftware.rssreader.Item;
|
||||
import com.apptasticsoftware.rssreader.RssReader;
|
||||
import com.google.inject.Inject;
|
||||
import com.opencsv.CSVReader;
|
||||
import nu.marginalia.WmsaHome;
|
||||
@@ -20,7 +18,6 @@ import nu.marginalia.storage.FileStorageService;
|
||||
import nu.marginalia.storage.model.FileStorage;
|
||||
import nu.marginalia.storage.model.FileStorageType;
|
||||
import nu.marginalia.util.SimpleBlockingThreadPool;
|
||||
import org.apache.commons.io.input.BOMInputStream;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@@ -32,7 +29,6 @@ import java.net.URISyntaxException;
|
||||
import java.net.http.HttpClient;
|
||||
import java.net.http.HttpRequest;
|
||||
import java.net.http.HttpResponse;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.sql.SQLException;
|
||||
import java.time.*;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
@@ -48,8 +44,6 @@ public class FeedFetcherService {
|
||||
private static final int MAX_FEED_ITEMS = 10;
|
||||
private static final Logger logger = LoggerFactory.getLogger(FeedFetcherService.class);
|
||||
|
||||
private final RssReader rssReader = new RssReader();
|
||||
|
||||
private final FeedDb feedDb;
|
||||
private final FileStorageService fileStorageService;
|
||||
private final NodeConfigurationService nodeConfigurationService;
|
||||
@@ -72,17 +66,6 @@ public class FeedFetcherService {
|
||||
this.nodeConfigurationService = nodeConfigurationService;
|
||||
this.serviceHeartbeat = serviceHeartbeat;
|
||||
this.executorClient = executorClient;
|
||||
|
||||
|
||||
// Add support for some alternate date tags for atom
|
||||
rssReader.addItemExtension("issued", this::setDateFallback);
|
||||
rssReader.addItemExtension("created", this::setDateFallback);
|
||||
}
|
||||
|
||||
private void setDateFallback(Item item, String value) {
|
||||
if (item.getPubDate().isEmpty()) {
|
||||
item.setPubDate(value);
|
||||
}
|
||||
}
|
||||
|
||||
public enum UpdateMode {
|
||||
@@ -96,6 +79,7 @@ public class FeedFetcherService {
|
||||
throw new IllegalStateException("Already updating feeds, refusing to start another update");
|
||||
}
|
||||
|
||||
|
||||
try (FeedDbWriter writer = feedDb.createWriter();
|
||||
HttpClient client = HttpClient.newBuilder()
|
||||
.connectTimeout(Duration.ofSeconds(15))
|
||||
@@ -103,6 +87,7 @@ public class FeedFetcherService {
|
||||
.followRedirects(HttpClient.Redirect.NORMAL)
|
||||
.version(HttpClient.Version.HTTP_2)
|
||||
.build();
|
||||
FeedJournal feedJournal = FeedJournal.create();
|
||||
var heartbeat = serviceHeartbeat.createServiceAdHocTaskHeartbeat("Update Rss Feeds")
|
||||
) {
|
||||
updating = true;
|
||||
@@ -155,6 +140,8 @@ public class FeedFetcherService {
|
||||
case FetchResult.Success(String value, String etag) -> {
|
||||
writer.saveEtag(feed.domain(), etag);
|
||||
writer.saveFeed(parseFeed(value, feed));
|
||||
|
||||
feedJournal.record(feed.feedUrl(), value);
|
||||
}
|
||||
case FetchResult.NotModified() -> {
|
||||
writer.saveEtag(feed.domain(), ifNoneMatchTag);
|
||||
@@ -367,12 +354,7 @@ public class FeedFetcherService {
|
||||
|
||||
public FeedItems parseFeed(String feedData, FeedDefinition definition) {
|
||||
try {
|
||||
feedData = sanitizeEntities(feedData);
|
||||
|
||||
List<Item> rawItems = rssReader.read(
|
||||
// Massage the data to maximize the possibility of the flaky XML parser consuming it
|
||||
new BOMInputStream(new ByteArrayInputStream(feedData.trim().getBytes(StandardCharsets.UTF_8)), false)
|
||||
).toList();
|
||||
List<SimpleFeedParser.ItemData> rawItems = SimpleFeedParser.parse(feedData);
|
||||
|
||||
boolean keepUriFragment = rawItems.size() < 2 || areFragmentsDisparate(rawItems);
|
||||
|
||||
@@ -395,33 +377,6 @@ public class FeedFetcherService {
|
||||
}
|
||||
}
|
||||
|
||||
private static final Map<String, String> HTML_ENTITIES = Map.of(
|
||||
"»", "»",
|
||||
"«", "«",
|
||||
"—", "--",
|
||||
"–", "-",
|
||||
"’", "'",
|
||||
"‘", "'",
|
||||
""", "\"",
|
||||
" ", ""
|
||||
);
|
||||
|
||||
/** The XML parser will blow up if you insert HTML entities in the feed XML,
|
||||
* which is unfortunately relatively common. Replace them as far as is possible
|
||||
* with their corresponding characters
|
||||
*/
|
||||
static String sanitizeEntities(String feedData) {
|
||||
String result = feedData;
|
||||
for (Map.Entry<String, String> entry : HTML_ENTITIES.entrySet()) {
|
||||
result = result.replace(entry.getKey(), entry.getValue());
|
||||
}
|
||||
|
||||
// Handle lone ampersands not part of a recognized XML entity
|
||||
result = result.replaceAll("&(?!(amp|lt|gt|apos|quot);)", "&");
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/** Decide whether to keep URI fragments in the feed items.
|
||||
* <p></p>
|
||||
* We keep fragments if there are multiple different fragments in the items.
|
||||
@@ -429,16 +384,16 @@ public class FeedFetcherService {
|
||||
* @param items The items to check
|
||||
* @return True if we should keep the fragments, false otherwise
|
||||
*/
|
||||
private boolean areFragmentsDisparate(List<Item> items) {
|
||||
private boolean areFragmentsDisparate(List<SimpleFeedParser.ItemData> items) {
|
||||
Set<String> seenFragments = new HashSet<>();
|
||||
|
||||
try {
|
||||
for (var item : items) {
|
||||
if (item.getLink().isEmpty()) {
|
||||
if (item.url().isBlank()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
var link = item.getLink().get();
|
||||
var link = item.url();
|
||||
if (!link.contains("#")) {
|
||||
continue;
|
||||
}
|
||||
|
@@ -0,0 +1,76 @@
|
||||
package nu.marginalia.rss.svc;
|
||||
|
||||
import nu.marginalia.WmsaHome;
|
||||
import nu.marginalia.slop.SlopTable;
|
||||
import nu.marginalia.slop.column.string.StringColumn;
|
||||
import nu.marginalia.slop.desc.StorageType;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.function.BiConsumer;
|
||||
|
||||
/** Utility for recording fetched feeds to a journal, useful in debugging feed parser issues.
|
||||
*/
|
||||
public interface FeedJournal extends AutoCloseable {
|
||||
StringColumn urlColumn = new StringColumn("url");
|
||||
StringColumn contentsColumn = new StringColumn("contents", StandardCharsets.UTF_8, StorageType.ZSTD);
|
||||
|
||||
void record(String url, String contents) throws IOException;
|
||||
void close() throws IOException;
|
||||
|
||||
|
||||
static FeedJournal create() throws IOException {
|
||||
if (Boolean.getBoolean("feedFetcher.persistJournal")) {
|
||||
Path journalPath = WmsaHome.getDataPath().resolve("feed-journal");
|
||||
if (Files.isDirectory(journalPath)) {
|
||||
FileUtils.deleteDirectory(journalPath.toFile());
|
||||
}
|
||||
Files.createDirectories(journalPath);
|
||||
return new RecordingFeedJournal(journalPath);
|
||||
}
|
||||
else {
|
||||
return new NoOpFeedJournal();
|
||||
}
|
||||
}
|
||||
|
||||
class NoOpFeedJournal implements FeedJournal {
|
||||
@Override
|
||||
public void record(String url, String contents) {}
|
||||
|
||||
@Override
|
||||
public void close() {}
|
||||
}
|
||||
|
||||
class RecordingFeedJournal extends SlopTable implements FeedJournal {
|
||||
|
||||
private final StringColumn.Writer urlWriter;
|
||||
private final StringColumn.Writer contentsWriter;
|
||||
|
||||
public RecordingFeedJournal(Path path) throws IOException {
|
||||
super(path, SlopTable.getNumPages(path, FeedJournal.urlColumn));
|
||||
|
||||
urlWriter = urlColumn.create(this);
|
||||
contentsWriter = contentsColumn.create(this);
|
||||
}
|
||||
|
||||
public synchronized void record(String url, String contents) throws IOException {
|
||||
urlWriter.put(url);
|
||||
contentsWriter.put(contents);
|
||||
}
|
||||
}
|
||||
|
||||
static void replay(Path journalPath, BiConsumer<String, String> urlAndContent) throws IOException {
|
||||
try (SlopTable table = new SlopTable(journalPath)) {
|
||||
final StringColumn.Reader urlReader = urlColumn.open(table);
|
||||
final StringColumn.Reader contentsReader = contentsColumn.open(table);
|
||||
|
||||
while (urlReader.hasRemaining()) {
|
||||
urlAndContent.accept(urlReader.get(), contentsReader.get());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
@@ -0,0 +1,94 @@
|
||||
package nu.marginalia.rss.svc;
|
||||
|
||||
import com.apptasticsoftware.rssreader.DateTimeParser;
|
||||
import com.apptasticsoftware.rssreader.util.Default;
|
||||
import org.jsoup.Jsoup;
|
||||
import org.jsoup.parser.Parser;
|
||||
|
||||
import java.time.ZonedDateTime;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
public class SimpleFeedParser {
|
||||
|
||||
private static final DateTimeParser dateTimeParser = Default.getDateTimeParser();
|
||||
|
||||
public record ItemData (
|
||||
String title,
|
||||
String description,
|
||||
String url,
|
||||
String pubDate
|
||||
) {
|
||||
public boolean isWellFormed() {
|
||||
return title != null && !title.isBlank() &&
|
||||
description != null && !description.isBlank() &&
|
||||
url != null && !url.isBlank() &&
|
||||
pubDate != null && !pubDate.isBlank();
|
||||
}
|
||||
|
||||
public Optional<ZonedDateTime> getPubDateZonedDateTime() {
|
||||
try {
|
||||
return Optional.ofNullable(dateTimeParser.parse(pubDate()));
|
||||
}
|
||||
catch (Exception e) {
|
||||
return Optional.empty();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static List<ItemData> parse(String content) {
|
||||
var doc = Jsoup.parse(content, Parser.xmlParser());
|
||||
List<ItemData> ret = new ArrayList<>();
|
||||
|
||||
doc.select("item, entry").forEach(element -> {
|
||||
String link = "";
|
||||
String title = "";
|
||||
String description = "";
|
||||
String pubDate = "";
|
||||
|
||||
for (String attr : List.of("title", "dc:title")) {
|
||||
if (!title.isBlank())
|
||||
break;
|
||||
var tag = element.getElementsByTag(attr).first();
|
||||
if (tag != null) {
|
||||
title = tag.text();
|
||||
}
|
||||
}
|
||||
|
||||
for (String attr : List.of("title", "summary", "content", "description", "dc:description")) {
|
||||
if (!description.isBlank())
|
||||
break;
|
||||
var tag = element.getElementsByTag(attr).first();
|
||||
if (tag != null) {
|
||||
description = tag.text();
|
||||
}
|
||||
}
|
||||
|
||||
for (String attr : List.of("pubDate", "published", "updated", "issued", "created", "dc:date")) {
|
||||
if (!pubDate.isBlank())
|
||||
break;
|
||||
var tag = element.getElementsByTag(attr).first();
|
||||
if (tag != null) {
|
||||
pubDate = tag.text();
|
||||
}
|
||||
}
|
||||
|
||||
for (String attr : List.of("link", "url")) {
|
||||
if (!link.isBlank())
|
||||
break;
|
||||
var tag = element.getElementsByTag(attr).first();
|
||||
if (tag != null) {
|
||||
link = tag.text();
|
||||
}
|
||||
}
|
||||
|
||||
ret.add(new ItemData(title, description, link, pubDate));
|
||||
});
|
||||
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
}
|
27
code/functions/live-capture/test-resources/nlnet.atom
Normal file
27
code/functions/live-capture/test-resources/nlnet.atom
Normal file
@@ -0,0 +1,27 @@
|
||||
<feed xmlns="http://www.w3.org/2005/Atom" xml:base="https://nlnet.nl">
|
||||
<title type="text">NLnet news</title>
|
||||
<updated>2025-01-01T00:00:00Z</updated>
|
||||
<id>https://nlnet.nl/feed.atom</id>
|
||||
<link rel="self" type="application/atom+xml" href="https://nlnet.nl/feed.atom"/>
|
||||
<entry>
|
||||
<id>https://nlnet.nl/news/2025/20250101-announcing-grantees-June-call.html</id>
|
||||
<author>
|
||||
<name>NLnet</name>
|
||||
</author>
|
||||
<title type="xhtml">
|
||||
<div xmlns="http://www.w3.org/1999/xhtml">50 Free and Open Source Projects Selected for NGI Zero grants</div>
|
||||
</title>
|
||||
<link href="/news/2025/20250101-announcing-grantees-June-call.html"/>
|
||||
<updated>2025-01-01T00:00:00Z</updated>
|
||||
<content type="xhtml">
|
||||
<div xmlns="http://www.w3.org/1999/xhtml">
|
||||
<p class="paralead">Happy 2025 everyone! On this first day of the fresh new year we are happy to announce 50 project teams were selected to receive NGI Zero grants. We are welcoming projects from 18 countries involving people and organisations of various types: individuals, associations, small and medium enterprises, foundations, universities, and informal collectives. The new projects are all across the different layers of the NGI technology stack: from trustworthy open hardware to services & applications which provide autonomy for end-users.</p>
|
||||
<p>The 50 free and open source projects were selected across two funds. 19 teams will receive grants from the <a href="/commonsfund/">NGI Zero Commons Fund</a>, a broadly themed fund that supports people working on reclaiming the public nature of the internet. The other 31 projects will work within <a href="/core/">NGI Zero Core</a> which focuses on strengthening the open internet architecture. Both funds offer financial and practical support. The latter consisting of <a href="/NGI0/services/">support services</a> such as accessibility and security audits, advice on license compliance, help with testing, documentation or UX design.</p>
|
||||
<h2>If you applied for a grant</h2>
|
||||
<p>This is the selection for the <a href="https://nlnet.nl/news/2024/20240401-call.html">June call</a>. We always inform <em>all</em> applicants about the outcome of the review ahead of the public announcement, if the are selected or not. If you have not heard anything, you probably applied to a later call that is still under review. You can see which call you applied to by checking the application number assigned to the project when you applied. The second number in the sequence refers to the month of the call, so 06 in the case of the June call. (It should not happen, but if you did apply to the June call and did not hear anything, do contact us.)</p>
|
||||
<h2>Meet the new projects!</h2>
|
||||
</div>
|
||||
</content>
|
||||
</entry>
|
||||
|
||||
</feed>
|
@@ -2,16 +2,21 @@ package nu.marginalia.livecapture;
|
||||
|
||||
import org.junit.jupiter.api.Assertions;
|
||||
import org.junit.jupiter.api.BeforeAll;
|
||||
import org.junit.jupiter.api.Tag;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.testcontainers.containers.GenericContainer;
|
||||
import org.testcontainers.junit.jupiter.Testcontainers;
|
||||
import org.testcontainers.utility.DockerImageName;
|
||||
|
||||
import java.net.URI;
|
||||
import java.util.Map;
|
||||
|
||||
@Testcontainers
|
||||
@Tag("slow")
|
||||
public class BrowserlessClientTest {
|
||||
static GenericContainer<?> container = new GenericContainer<>(DockerImageName.parse("browserless/chrome")).withExposedPorts(3000);
|
||||
static GenericContainer<?> container = new GenericContainer<>(DockerImageName.parse("browserless/chrome"))
|
||||
.withEnv(Map.of("TOKEN", "BROWSERLESS_TOKEN"))
|
||||
.withExposedPorts(3000);
|
||||
|
||||
@BeforeAll
|
||||
public static void setup() {
|
||||
|
@@ -1,30 +0,0 @@
|
||||
package nu.marginalia.rss.svc;
|
||||
|
||||
import org.junit.jupiter.api.Assertions;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
public class TestXmlSanitization {
|
||||
|
||||
@Test
|
||||
public void testPreservedEntities() {
|
||||
Assertions.assertEquals("&", FeedFetcherService.sanitizeEntities("&"));
|
||||
Assertions.assertEquals("<", FeedFetcherService.sanitizeEntities("<"));
|
||||
Assertions.assertEquals(">", FeedFetcherService.sanitizeEntities(">"));
|
||||
Assertions.assertEquals("'", FeedFetcherService.sanitizeEntities("'"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testStrayAmpersand() {
|
||||
Assertions.assertEquals("Bed & Breakfast", FeedFetcherService.sanitizeEntities("Bed & Breakfast"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTranslatedHtmlEntity() {
|
||||
Assertions.assertEquals("Foo -- Bar", FeedFetcherService.sanitizeEntities("Foo — Bar"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTranslatedHtmlEntityQuot() {
|
||||
Assertions.assertEquals("\"Bob\"", FeedFetcherService.sanitizeEntities(""Bob""));
|
||||
}
|
||||
}
|
@@ -7,4 +7,8 @@ public record DictionaryResponse(String word, List<DictionaryEntry> entries) {
|
||||
this.word = word;
|
||||
this.entries = entries.stream().toList(); // Make an immutable copy
|
||||
}
|
||||
|
||||
public boolean hasEntries() {
|
||||
return !entries.isEmpty();
|
||||
}
|
||||
}
|
||||
|
@@ -2,9 +2,6 @@ package nu.marginalia.api.searchquery;
|
||||
|
||||
import nu.marginalia.api.searchquery.model.query.SearchPhraseConstraint;
|
||||
import nu.marginalia.api.searchquery.model.query.SearchQuery;
|
||||
import nu.marginalia.api.searchquery.model.results.Bm25Parameters;
|
||||
import nu.marginalia.api.searchquery.model.results.ResultRankingParameters;
|
||||
import nu.marginalia.index.query.limit.QueryLimits;
|
||||
import nu.marginalia.index.query.limit.SpecificationLimit;
|
||||
import nu.marginalia.index.query.limit.SpecificationLimitType;
|
||||
|
||||
@@ -27,37 +24,19 @@ public class IndexProtobufCodec {
|
||||
.build();
|
||||
}
|
||||
|
||||
public static QueryLimits convertQueryLimits(RpcQueryLimits queryLimits) {
|
||||
return new QueryLimits(
|
||||
queryLimits.getResultsByDomain(),
|
||||
queryLimits.getResultsTotal(),
|
||||
queryLimits.getTimeoutMs(),
|
||||
queryLimits.getFetchSize()
|
||||
);
|
||||
}
|
||||
|
||||
public static RpcQueryLimits convertQueryLimits(QueryLimits queryLimits) {
|
||||
return RpcQueryLimits.newBuilder()
|
||||
.setResultsByDomain(queryLimits.resultsByDomain())
|
||||
.setResultsTotal(queryLimits.resultsTotal())
|
||||
.setTimeoutMs(queryLimits.timeoutMs())
|
||||
.setFetchSize(queryLimits.fetchSize())
|
||||
.build();
|
||||
}
|
||||
|
||||
public static SearchQuery convertRpcQuery(RpcQuery query) {
|
||||
List<SearchPhraseConstraint> phraeConstraints = new ArrayList<>();
|
||||
List<SearchPhraseConstraint> phraseConstraints = new ArrayList<>();
|
||||
|
||||
for (int j = 0; j < query.getPhrasesCount(); j++) {
|
||||
var coh = query.getPhrases(j);
|
||||
if (coh.getType() == RpcPhrases.TYPE.OPTIONAL) {
|
||||
phraeConstraints.add(new SearchPhraseConstraint.Optional(List.copyOf(coh.getTermsList())));
|
||||
phraseConstraints.add(new SearchPhraseConstraint.Optional(List.copyOf(coh.getTermsList())));
|
||||
}
|
||||
else if (coh.getType() == RpcPhrases.TYPE.MANDATORY) {
|
||||
phraeConstraints.add(new SearchPhraseConstraint.Mandatory(List.copyOf(coh.getTermsList())));
|
||||
phraseConstraints.add(new SearchPhraseConstraint.Mandatory(List.copyOf(coh.getTermsList())));
|
||||
}
|
||||
else if (coh.getType() == RpcPhrases.TYPE.FULL) {
|
||||
phraeConstraints.add(new SearchPhraseConstraint.Full(List.copyOf(coh.getTermsList())));
|
||||
phraseConstraints.add(new SearchPhraseConstraint.Full(List.copyOf(coh.getTermsList())));
|
||||
}
|
||||
else {
|
||||
throw new IllegalArgumentException("Unknown phrase constraint type: " + coh.getType());
|
||||
@@ -70,7 +49,7 @@ public class IndexProtobufCodec {
|
||||
query.getExcludeList(),
|
||||
query.getAdviceList(),
|
||||
query.getPriorityList(),
|
||||
phraeConstraints
|
||||
phraseConstraints
|
||||
);
|
||||
}
|
||||
|
||||
@@ -103,60 +82,4 @@ public class IndexProtobufCodec {
|
||||
return subqueryBuilder.build();
|
||||
}
|
||||
|
||||
public static ResultRankingParameters convertRankingParameterss(RpcResultRankingParameters params) {
|
||||
if (params == null)
|
||||
return ResultRankingParameters.sensibleDefaults();
|
||||
|
||||
return new ResultRankingParameters(
|
||||
new Bm25Parameters(params.getBm25K(), params.getBm25B()),
|
||||
params.getShortDocumentThreshold(),
|
||||
params.getShortDocumentPenalty(),
|
||||
params.getDomainRankBonus(),
|
||||
params.getQualityPenalty(),
|
||||
params.getShortSentenceThreshold(),
|
||||
params.getShortSentencePenalty(),
|
||||
params.getBm25Weight(),
|
||||
params.getTcfFirstPositionWeight(),
|
||||
params.getTcfVerbatimWeight(),
|
||||
params.getTcfProximityWeight(),
|
||||
ResultRankingParameters.TemporalBias.valueOf(params.getTemporalBias().getBias().name()),
|
||||
params.getTemporalBiasWeight(),
|
||||
params.getExportDebugData()
|
||||
);
|
||||
}
|
||||
|
||||
public static RpcResultRankingParameters convertRankingParameterss(ResultRankingParameters rankingParams,
|
||||
RpcTemporalBias temporalBias)
|
||||
{
|
||||
if (rankingParams == null) {
|
||||
rankingParams = ResultRankingParameters.sensibleDefaults();
|
||||
}
|
||||
|
||||
var builder = RpcResultRankingParameters.newBuilder()
|
||||
.setBm25B(rankingParams.bm25Params.b())
|
||||
.setBm25K(rankingParams.bm25Params.k())
|
||||
.setShortDocumentThreshold(rankingParams.shortDocumentThreshold)
|
||||
.setShortDocumentPenalty(rankingParams.shortDocumentPenalty)
|
||||
.setDomainRankBonus(rankingParams.domainRankBonus)
|
||||
.setQualityPenalty(rankingParams.qualityPenalty)
|
||||
.setShortSentenceThreshold(rankingParams.shortSentenceThreshold)
|
||||
.setShortSentencePenalty(rankingParams.shortSentencePenalty)
|
||||
.setBm25Weight(rankingParams.bm25Weight)
|
||||
.setTcfFirstPositionWeight(rankingParams.tcfFirstPosition)
|
||||
.setTcfProximityWeight(rankingParams.tcfProximity)
|
||||
.setTcfVerbatimWeight(rankingParams.tcfVerbatim)
|
||||
.setTemporalBiasWeight(rankingParams.temporalBiasWeight)
|
||||
.setExportDebugData(rankingParams.exportDebugData);
|
||||
|
||||
if (temporalBias != null && temporalBias.getBias() != RpcTemporalBias.Bias.NONE) {
|
||||
builder.setTemporalBias(temporalBias);
|
||||
}
|
||||
else {
|
||||
builder.setTemporalBias(RpcTemporalBias.newBuilder()
|
||||
.setBias(RpcTemporalBias.Bias.valueOf(rankingParams.temporalBias.name())));
|
||||
}
|
||||
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -5,7 +5,7 @@ import nu.marginalia.api.searchquery.model.query.QueryParams;
|
||||
import nu.marginalia.api.searchquery.model.query.QueryResponse;
|
||||
import nu.marginalia.api.searchquery.model.query.SearchSpecification;
|
||||
import nu.marginalia.api.searchquery.model.results.DecoratedSearchResultItem;
|
||||
import nu.marginalia.api.searchquery.model.results.ResultRankingParameters;
|
||||
import nu.marginalia.api.searchquery.model.results.PrototypeRankingParameters;
|
||||
import nu.marginalia.api.searchquery.model.results.SearchResultItem;
|
||||
import nu.marginalia.api.searchquery.model.results.SearchResultKeywordScore;
|
||||
import nu.marginalia.api.searchquery.model.results.debug.DebugFactor;
|
||||
@@ -37,7 +37,7 @@ public class QueryProtobufCodec {
|
||||
builder.setSize(IndexProtobufCodec.convertSpecLimit(query.specs.size));
|
||||
builder.setRank(IndexProtobufCodec.convertSpecLimit(query.specs.rank));
|
||||
|
||||
builder.setQueryLimits(IndexProtobufCodec.convertQueryLimits(query.specs.queryLimits));
|
||||
builder.setQueryLimits(query.specs.queryLimits);
|
||||
|
||||
// Query strategy may be overridden by the query, but if not, use the one from the request
|
||||
if (query.specs.queryStrategy != null && query.specs.queryStrategy != QueryStrategy.AUTO)
|
||||
@@ -45,9 +45,27 @@ public class QueryProtobufCodec {
|
||||
else
|
||||
builder.setQueryStrategy(request.getQueryStrategy());
|
||||
|
||||
if (request.getTemporalBias().getBias() != RpcTemporalBias.Bias.NONE) {
|
||||
if (query.specs.rankingParams != null) {
|
||||
builder.setParameters(IndexProtobufCodec.convertRankingParameterss(query.specs.rankingParams, request.getTemporalBias()));
|
||||
builder.setParameters(
|
||||
RpcResultRankingParameters.newBuilder(query.specs.rankingParams)
|
||||
.setTemporalBias(request.getTemporalBias())
|
||||
.build()
|
||||
);
|
||||
} else {
|
||||
builder.setParameters(
|
||||
RpcResultRankingParameters.newBuilder(PrototypeRankingParameters.sensibleDefaults())
|
||||
.setTemporalBias(request.getTemporalBias())
|
||||
.build()
|
||||
);
|
||||
}
|
||||
} else if (query.specs.rankingParams != null) {
|
||||
builder.setParameters(query.specs.rankingParams);
|
||||
}
|
||||
// else {
|
||||
// if we have no ranking params, we don't need to set them, the client check and use the default values
|
||||
// so we don't need to send this huge object over the wire
|
||||
// }
|
||||
|
||||
return builder.build();
|
||||
}
|
||||
@@ -65,18 +83,13 @@ public class QueryProtobufCodec {
|
||||
builder.setSize(IndexProtobufCodec.convertSpecLimit(query.specs.size));
|
||||
builder.setRank(IndexProtobufCodec.convertSpecLimit(query.specs.rank));
|
||||
|
||||
builder.setQueryLimits(IndexProtobufCodec.convertQueryLimits(query.specs.queryLimits));
|
||||
builder.setQueryLimits(query.specs.queryLimits);
|
||||
|
||||
// Query strategy may be overridden by the query, but if not, use the one from the request
|
||||
builder.setQueryStrategy(query.specs.queryStrategy.name());
|
||||
|
||||
if (query.specs.rankingParams != null) {
|
||||
builder.setParameters(IndexProtobufCodec.convertRankingParameterss(
|
||||
query.specs.rankingParams,
|
||||
RpcTemporalBias.newBuilder().setBias(
|
||||
RpcTemporalBias.Bias.NONE)
|
||||
.build())
|
||||
);
|
||||
builder.setParameters(query.specs.rankingParams);
|
||||
}
|
||||
|
||||
return builder.build();
|
||||
@@ -95,10 +108,10 @@ public class QueryProtobufCodec {
|
||||
IndexProtobufCodec.convertSpecLimit(request.getSize()),
|
||||
IndexProtobufCodec.convertSpecLimit(request.getRank()),
|
||||
request.getDomainIdsList(),
|
||||
IndexProtobufCodec.convertQueryLimits(request.getQueryLimits()),
|
||||
request.getQueryLimits(),
|
||||
request.getSearchSetIdentifier(),
|
||||
QueryStrategy.valueOf(request.getQueryStrategy()),
|
||||
ResultRankingParameters.TemporalBias.valueOf(request.getTemporalBias().getBias().name()),
|
||||
RpcTemporalBias.Bias.valueOf(request.getTemporalBias().getBias().name()),
|
||||
request.getPagination().getPage()
|
||||
);
|
||||
}
|
||||
@@ -294,9 +307,9 @@ public class QueryProtobufCodec {
|
||||
IndexProtobufCodec.convertSpecLimit(specs.getYear()),
|
||||
IndexProtobufCodec.convertSpecLimit(specs.getSize()),
|
||||
IndexProtobufCodec.convertSpecLimit(specs.getRank()),
|
||||
IndexProtobufCodec.convertQueryLimits(specs.getQueryLimits()),
|
||||
specs.getQueryLimits(),
|
||||
QueryStrategy.valueOf(specs.getQueryStrategy()),
|
||||
IndexProtobufCodec.convertRankingParameterss(specs.getParameters())
|
||||
specs.hasParameters() ? specs.getParameters() : null
|
||||
);
|
||||
}
|
||||
|
||||
@@ -307,7 +320,7 @@ public class QueryProtobufCodec {
|
||||
.addAllTacitExcludes(params.tacitExcludes())
|
||||
.addAllTacitPriority(params.tacitPriority())
|
||||
.setHumanQuery(params.humanQuery())
|
||||
.setQueryLimits(IndexProtobufCodec.convertQueryLimits(params.limits()))
|
||||
.setQueryLimits(params.limits())
|
||||
.setQuality(IndexProtobufCodec.convertSpecLimit(params.quality()))
|
||||
.setYear(IndexProtobufCodec.convertSpecLimit(params.year()))
|
||||
.setSize(IndexProtobufCodec.convertSpecLimit(params.size()))
|
||||
@@ -319,7 +332,7 @@ public class QueryProtobufCodec {
|
||||
.build())
|
||||
.setPagination(RpcQsQueryPagination.newBuilder()
|
||||
.setPage(params.page())
|
||||
.setPageSize(Math.min(100, params.limits().resultsTotal()))
|
||||
.setPageSize(Math.min(100, params.limits().getResultsTotal()))
|
||||
.build());
|
||||
|
||||
if (params.nearDomain() != null)
|
||||
|
@@ -1,7 +1,7 @@
|
||||
package nu.marginalia.api.searchquery.model.query;
|
||||
|
||||
import nu.marginalia.api.searchquery.model.results.ResultRankingParameters;
|
||||
import nu.marginalia.index.query.limit.QueryLimits;
|
||||
import nu.marginalia.api.searchquery.RpcQueryLimits;
|
||||
import nu.marginalia.api.searchquery.RpcTemporalBias;
|
||||
import nu.marginalia.index.query.limit.QueryStrategy;
|
||||
import nu.marginalia.index.query.limit.SpecificationLimit;
|
||||
|
||||
@@ -21,14 +21,14 @@ public record QueryParams(
|
||||
SpecificationLimit size,
|
||||
SpecificationLimit rank,
|
||||
List<Integer> domainIds,
|
||||
QueryLimits limits,
|
||||
RpcQueryLimits limits,
|
||||
String identifier,
|
||||
QueryStrategy queryStrategy,
|
||||
ResultRankingParameters.TemporalBias temporalBias,
|
||||
RpcTemporalBias.Bias temporalBias,
|
||||
int page
|
||||
)
|
||||
{
|
||||
public QueryParams(String query, QueryLimits limits, String identifier) {
|
||||
public QueryParams(String query, RpcQueryLimits limits, String identifier) {
|
||||
this(query, null,
|
||||
List.of(),
|
||||
List.of(),
|
||||
@@ -42,7 +42,7 @@ public record QueryParams(
|
||||
limits,
|
||||
identifier,
|
||||
QueryStrategy.AUTO,
|
||||
ResultRankingParameters.TemporalBias.NONE,
|
||||
RpcTemporalBias.Bias.NONE,
|
||||
1 // page
|
||||
);
|
||||
}
|
||||
|
@@ -1,10 +1,11 @@
|
||||
package nu.marginalia.api.searchquery.model.query;
|
||||
|
||||
import nu.marginalia.api.searchquery.model.results.ResultRankingParameters;
|
||||
import nu.marginalia.index.query.limit.QueryLimits;
|
||||
import nu.marginalia.api.searchquery.RpcQueryLimits;
|
||||
import nu.marginalia.api.searchquery.RpcResultRankingParameters;
|
||||
import nu.marginalia.index.query.limit.QueryStrategy;
|
||||
import nu.marginalia.index.query.limit.SpecificationLimit;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
import java.util.List;
|
||||
|
||||
public class SearchSpecification {
|
||||
@@ -24,11 +25,12 @@ public class SearchSpecification {
|
||||
public SpecificationLimit size;
|
||||
public SpecificationLimit rank;
|
||||
|
||||
public final QueryLimits queryLimits;
|
||||
public final RpcQueryLimits queryLimits;
|
||||
|
||||
public final QueryStrategy queryStrategy;
|
||||
|
||||
public final ResultRankingParameters rankingParams;
|
||||
@Nullable
|
||||
public final RpcResultRankingParameters rankingParams;
|
||||
|
||||
public SearchSpecification(SearchQuery query,
|
||||
List<Integer> domains,
|
||||
@@ -38,9 +40,9 @@ public class SearchSpecification {
|
||||
SpecificationLimit year,
|
||||
SpecificationLimit size,
|
||||
SpecificationLimit rank,
|
||||
QueryLimits queryLimits,
|
||||
RpcQueryLimits queryLimits,
|
||||
QueryStrategy queryStrategy,
|
||||
ResultRankingParameters rankingParams)
|
||||
@Nullable RpcResultRankingParameters rankingParams)
|
||||
{
|
||||
this.query = query;
|
||||
this.domains = domains;
|
||||
@@ -91,7 +93,7 @@ public class SearchSpecification {
|
||||
return this.rank;
|
||||
}
|
||||
|
||||
public QueryLimits getQueryLimits() {
|
||||
public RpcQueryLimits getQueryLimits() {
|
||||
return this.queryLimits;
|
||||
}
|
||||
|
||||
@@ -99,7 +101,7 @@ public class SearchSpecification {
|
||||
return this.queryStrategy;
|
||||
}
|
||||
|
||||
public ResultRankingParameters getRankingParams() {
|
||||
public RpcResultRankingParameters getRankingParams() {
|
||||
return this.rankingParams;
|
||||
}
|
||||
|
||||
@@ -120,9 +122,9 @@ public class SearchSpecification {
|
||||
private boolean size$set;
|
||||
private SpecificationLimit rank$value;
|
||||
private boolean rank$set;
|
||||
private QueryLimits queryLimits;
|
||||
private RpcQueryLimits queryLimits;
|
||||
private QueryStrategy queryStrategy;
|
||||
private ResultRankingParameters rankingParams;
|
||||
private RpcResultRankingParameters rankingParams;
|
||||
|
||||
SearchSpecificationBuilder() {
|
||||
}
|
||||
@@ -171,7 +173,7 @@ public class SearchSpecification {
|
||||
return this;
|
||||
}
|
||||
|
||||
public SearchSpecificationBuilder queryLimits(QueryLimits queryLimits) {
|
||||
public SearchSpecificationBuilder queryLimits(RpcQueryLimits queryLimits) {
|
||||
this.queryLimits = queryLimits;
|
||||
return this;
|
||||
}
|
||||
@@ -181,7 +183,7 @@ public class SearchSpecification {
|
||||
return this;
|
||||
}
|
||||
|
||||
public SearchSpecificationBuilder rankingParams(ResultRankingParameters rankingParams) {
|
||||
public SearchSpecificationBuilder rankingParams(RpcResultRankingParameters rankingParams) {
|
||||
this.rankingParams = rankingParams;
|
||||
return this;
|
||||
}
|
||||
|
@@ -0,0 +1,33 @@
|
||||
package nu.marginalia.api.searchquery.model.results;
|
||||
|
||||
import nu.marginalia.api.searchquery.RpcResultRankingParameters;
|
||||
import nu.marginalia.api.searchquery.RpcTemporalBias;
|
||||
|
||||
public class PrototypeRankingParameters {
|
||||
|
||||
/** These are the default ranking parameters that are used when no parameters are specified. */
|
||||
|
||||
private static final RpcResultRankingParameters _sensibleDefaults = RpcResultRankingParameters.newBuilder()
|
||||
.setBm25B(0.5)
|
||||
.setBm25K(1.2)
|
||||
.setShortDocumentThreshold(2000)
|
||||
.setShortDocumentPenalty(2.)
|
||||
.setDomainRankBonus(1 / 100.)
|
||||
.setQualityPenalty(1 / 15.)
|
||||
.setShortSentenceThreshold(2)
|
||||
.setShortSentencePenalty(5)
|
||||
.setBm25Weight(1.)
|
||||
.setTcfVerbatimWeight(1.)
|
||||
.setTcfProximityWeight(1.)
|
||||
.setTcfFirstPositionWeight(5)
|
||||
.setTemporalBias(RpcTemporalBias.newBuilder().setBias(RpcTemporalBias.Bias.NONE))
|
||||
.setTemporalBiasWeight(5.0)
|
||||
.setExportDebugData(false)
|
||||
.setDisablePenalties(false)
|
||||
.build();
|
||||
|
||||
public static RpcResultRankingParameters sensibleDefaults() {
|
||||
return _sensibleDefaults;
|
||||
}
|
||||
|
||||
}
|
@@ -1,12 +1,13 @@
|
||||
package nu.marginalia.api.searchquery.model.results;
|
||||
|
||||
import nu.marginalia.api.searchquery.RpcResultRankingParameters;
|
||||
import nu.marginalia.api.searchquery.model.compiled.CqDataInt;
|
||||
|
||||
import java.util.BitSet;
|
||||
|
||||
public class ResultRankingContext {
|
||||
private final int docCount;
|
||||
public final ResultRankingParameters params;
|
||||
public final RpcResultRankingParameters params;
|
||||
|
||||
|
||||
public final BitSet regularMask;
|
||||
@@ -21,7 +22,7 @@ public class ResultRankingContext {
|
||||
public final CqDataInt priorityCounts;
|
||||
|
||||
public ResultRankingContext(int docCount,
|
||||
ResultRankingParameters params,
|
||||
RpcResultRankingParameters params,
|
||||
BitSet ngramsMask,
|
||||
BitSet regularMask,
|
||||
CqDataInt fullCounts,
|
||||
|
@@ -1,278 +0,0 @@
|
||||
package nu.marginalia.api.searchquery.model.results;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
public class ResultRankingParameters {
|
||||
|
||||
/**
|
||||
* Tuning for BM25 when applied to full document matches
|
||||
*/
|
||||
public final Bm25Parameters bm25Params;
|
||||
|
||||
/**
|
||||
* Documents below this length are penalized
|
||||
*/
|
||||
public int shortDocumentThreshold;
|
||||
|
||||
public double shortDocumentPenalty;
|
||||
|
||||
|
||||
/**
|
||||
* Scaling factor associated with domain rank (unscaled rank value is 0-255; high is good)
|
||||
*/
|
||||
public double domainRankBonus;
|
||||
|
||||
/**
|
||||
* Scaling factor associated with document quality (unscaled rank value is 0-15; high is bad)
|
||||
*/
|
||||
public double qualityPenalty;
|
||||
|
||||
/**
|
||||
* Average sentence length values below this threshold are penalized, range [0-4), 2 or 3 is probably what you want
|
||||
*/
|
||||
public int shortSentenceThreshold;
|
||||
|
||||
/**
|
||||
* Magnitude of penalty for documents with low average sentence length
|
||||
*/
|
||||
public double shortSentencePenalty;
|
||||
|
||||
public double bm25Weight;
|
||||
public double tcfFirstPosition;
|
||||
public double tcfVerbatim;
|
||||
public double tcfProximity;
|
||||
|
||||
public TemporalBias temporalBias;
|
||||
public double temporalBiasWeight;
|
||||
|
||||
public boolean exportDebugData;
|
||||
|
||||
public ResultRankingParameters(Bm25Parameters bm25Params, int shortDocumentThreshold, double shortDocumentPenalty, double domainRankBonus, double qualityPenalty, int shortSentenceThreshold, double shortSentencePenalty, double bm25Weight, double tcfFirstPosition, double tcfVerbatim, double tcfProximity, TemporalBias temporalBias, double temporalBiasWeight, boolean exportDebugData) {
|
||||
this.bm25Params = bm25Params;
|
||||
this.shortDocumentThreshold = shortDocumentThreshold;
|
||||
this.shortDocumentPenalty = shortDocumentPenalty;
|
||||
this.domainRankBonus = domainRankBonus;
|
||||
this.qualityPenalty = qualityPenalty;
|
||||
this.shortSentenceThreshold = shortSentenceThreshold;
|
||||
this.shortSentencePenalty = shortSentencePenalty;
|
||||
this.bm25Weight = bm25Weight;
|
||||
this.tcfFirstPosition = tcfFirstPosition;
|
||||
this.tcfVerbatim = tcfVerbatim;
|
||||
this.tcfProximity = tcfProximity;
|
||||
this.temporalBias = temporalBias;
|
||||
this.temporalBiasWeight = temporalBiasWeight;
|
||||
this.exportDebugData = exportDebugData;
|
||||
}
|
||||
|
||||
public static ResultRankingParameters sensibleDefaults() {
|
||||
return builder()
|
||||
.bm25Params(new Bm25Parameters(1.2, 0.5))
|
||||
.shortDocumentThreshold(2000)
|
||||
.shortDocumentPenalty(2.)
|
||||
.domainRankBonus(1 / 100.)
|
||||
.qualityPenalty(1 / 15.)
|
||||
.shortSentenceThreshold(2)
|
||||
.shortSentencePenalty(5)
|
||||
.bm25Weight(1.)
|
||||
.tcfVerbatim(1.)
|
||||
.tcfProximity(1.)
|
||||
.tcfFirstPosition(5)
|
||||
.temporalBias(TemporalBias.NONE)
|
||||
.temporalBiasWeight(5.0)
|
||||
.exportDebugData(false)
|
||||
.build();
|
||||
}
|
||||
|
||||
public static ResultRankingParametersBuilder builder() {
|
||||
return new ResultRankingParametersBuilder();
|
||||
}
|
||||
|
||||
public Bm25Parameters getBm25Params() {
|
||||
return this.bm25Params;
|
||||
}
|
||||
|
||||
public int getShortDocumentThreshold() {
|
||||
return this.shortDocumentThreshold;
|
||||
}
|
||||
|
||||
public double getShortDocumentPenalty() {
|
||||
return this.shortDocumentPenalty;
|
||||
}
|
||||
|
||||
public double getDomainRankBonus() {
|
||||
return this.domainRankBonus;
|
||||
}
|
||||
|
||||
public double getQualityPenalty() {
|
||||
return this.qualityPenalty;
|
||||
}
|
||||
|
||||
public int getShortSentenceThreshold() {
|
||||
return this.shortSentenceThreshold;
|
||||
}
|
||||
|
||||
public double getShortSentencePenalty() {
|
||||
return this.shortSentencePenalty;
|
||||
}
|
||||
|
||||
public double getBm25Weight() {
|
||||
return this.bm25Weight;
|
||||
}
|
||||
|
||||
public double getTcfFirstPosition() {
|
||||
return this.tcfFirstPosition;
|
||||
}
|
||||
|
||||
public double getTcfVerbatim() {
|
||||
return this.tcfVerbatim;
|
||||
}
|
||||
|
||||
public double getTcfProximity() {
|
||||
return this.tcfProximity;
|
||||
}
|
||||
|
||||
public TemporalBias getTemporalBias() {
|
||||
return this.temporalBias;
|
||||
}
|
||||
|
||||
public double getTemporalBiasWeight() {
|
||||
return this.temporalBiasWeight;
|
||||
}
|
||||
|
||||
public boolean isExportDebugData() {
|
||||
return this.exportDebugData;
|
||||
}
|
||||
|
||||
@Override
|
||||
public final boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (!(o instanceof ResultRankingParameters that)) return false;
|
||||
|
||||
return shortDocumentThreshold == that.shortDocumentThreshold && Double.compare(shortDocumentPenalty, that.shortDocumentPenalty) == 0 && Double.compare(domainRankBonus, that.domainRankBonus) == 0 && Double.compare(qualityPenalty, that.qualityPenalty) == 0 && shortSentenceThreshold == that.shortSentenceThreshold && Double.compare(shortSentencePenalty, that.shortSentencePenalty) == 0 && Double.compare(bm25Weight, that.bm25Weight) == 0 && Double.compare(tcfFirstPosition, that.tcfFirstPosition) == 0 && Double.compare(tcfVerbatim, that.tcfVerbatim) == 0 && Double.compare(tcfProximity, that.tcfProximity) == 0 && Double.compare(temporalBiasWeight, that.temporalBiasWeight) == 0 && exportDebugData == that.exportDebugData && Objects.equals(bm25Params, that.bm25Params) && temporalBias == that.temporalBias;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = Objects.hashCode(bm25Params);
|
||||
result = 31 * result + shortDocumentThreshold;
|
||||
result = 31 * result + Double.hashCode(shortDocumentPenalty);
|
||||
result = 31 * result + Double.hashCode(domainRankBonus);
|
||||
result = 31 * result + Double.hashCode(qualityPenalty);
|
||||
result = 31 * result + shortSentenceThreshold;
|
||||
result = 31 * result + Double.hashCode(shortSentencePenalty);
|
||||
result = 31 * result + Double.hashCode(bm25Weight);
|
||||
result = 31 * result + Double.hashCode(tcfFirstPosition);
|
||||
result = 31 * result + Double.hashCode(tcfVerbatim);
|
||||
result = 31 * result + Double.hashCode(tcfProximity);
|
||||
result = 31 * result + Objects.hashCode(temporalBias);
|
||||
result = 31 * result + Double.hashCode(temporalBiasWeight);
|
||||
result = 31 * result + Boolean.hashCode(exportDebugData);
|
||||
return result;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return "ResultRankingParameters(bm25Params=" + this.getBm25Params() + ", shortDocumentThreshold=" + this.getShortDocumentThreshold() + ", shortDocumentPenalty=" + this.getShortDocumentPenalty() + ", domainRankBonus=" + this.getDomainRankBonus() + ", qualityPenalty=" + this.getQualityPenalty() + ", shortSentenceThreshold=" + this.getShortSentenceThreshold() + ", shortSentencePenalty=" + this.getShortSentencePenalty() + ", bm25Weight=" + this.getBm25Weight() + ", tcfFirstPosition=" + this.getTcfFirstPosition() + ", tcfVerbatim=" + this.getTcfVerbatim() + ", tcfProximity=" + this.getTcfProximity() + ", temporalBias=" + this.getTemporalBias() + ", temporalBiasWeight=" + this.getTemporalBiasWeight() + ", exportDebugData=" + this.isExportDebugData() + ")";
|
||||
}
|
||||
|
||||
public enum TemporalBias {
|
||||
RECENT, OLD, NONE
|
||||
}
|
||||
|
||||
public static class ResultRankingParametersBuilder {
|
||||
private Bm25Parameters bm25Params;
|
||||
private int shortDocumentThreshold;
|
||||
private double shortDocumentPenalty;
|
||||
private double domainRankBonus;
|
||||
private double qualityPenalty;
|
||||
private int shortSentenceThreshold;
|
||||
private double shortSentencePenalty;
|
||||
private double bm25Weight;
|
||||
private double tcfFirstPosition;
|
||||
private double tcfVerbatim;
|
||||
private double tcfProximity;
|
||||
private TemporalBias temporalBias;
|
||||
private double temporalBiasWeight;
|
||||
private boolean exportDebugData;
|
||||
|
||||
ResultRankingParametersBuilder() {
|
||||
}
|
||||
|
||||
public ResultRankingParametersBuilder bm25Params(Bm25Parameters bm25Params) {
|
||||
this.bm25Params = bm25Params;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ResultRankingParametersBuilder shortDocumentThreshold(int shortDocumentThreshold) {
|
||||
this.shortDocumentThreshold = shortDocumentThreshold;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ResultRankingParametersBuilder shortDocumentPenalty(double shortDocumentPenalty) {
|
||||
this.shortDocumentPenalty = shortDocumentPenalty;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ResultRankingParametersBuilder domainRankBonus(double domainRankBonus) {
|
||||
this.domainRankBonus = domainRankBonus;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ResultRankingParametersBuilder qualityPenalty(double qualityPenalty) {
|
||||
this.qualityPenalty = qualityPenalty;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ResultRankingParametersBuilder shortSentenceThreshold(int shortSentenceThreshold) {
|
||||
this.shortSentenceThreshold = shortSentenceThreshold;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ResultRankingParametersBuilder shortSentencePenalty(double shortSentencePenalty) {
|
||||
this.shortSentencePenalty = shortSentencePenalty;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ResultRankingParametersBuilder bm25Weight(double bm25Weight) {
|
||||
this.bm25Weight = bm25Weight;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ResultRankingParametersBuilder tcfFirstPosition(double tcfFirstPosition) {
|
||||
this.tcfFirstPosition = tcfFirstPosition;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ResultRankingParametersBuilder tcfVerbatim(double tcfVerbatim) {
|
||||
this.tcfVerbatim = tcfVerbatim;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ResultRankingParametersBuilder tcfProximity(double tcfProximity) {
|
||||
this.tcfProximity = tcfProximity;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ResultRankingParametersBuilder temporalBias(TemporalBias temporalBias) {
|
||||
this.temporalBias = temporalBias;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ResultRankingParametersBuilder temporalBiasWeight(double temporalBiasWeight) {
|
||||
this.temporalBiasWeight = temporalBiasWeight;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ResultRankingParametersBuilder exportDebugData(boolean exportDebugData) {
|
||||
this.exportDebugData = exportDebugData;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ResultRankingParameters build() {
|
||||
return new ResultRankingParameters(this.bm25Params, this.shortDocumentThreshold, this.shortDocumentPenalty, this.domainRankBonus, this.qualityPenalty, this.shortSentenceThreshold, this.shortSentencePenalty, this.bm25Weight, this.tcfFirstPosition, this.tcfVerbatim, this.tcfProximity, this.temporalBias, this.temporalBiasWeight, this.exportDebugData);
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return "ResultRankingParameters.ResultRankingParametersBuilder(bm25Params=" + this.bm25Params + ", shortDocumentThreshold=" + this.shortDocumentThreshold + ", shortDocumentPenalty=" + this.shortDocumentPenalty + ", domainRankBonus=" + this.domainRankBonus + ", qualityPenalty=" + this.qualityPenalty + ", shortSentenceThreshold=" + this.shortSentenceThreshold + ", shortSentencePenalty=" + this.shortSentencePenalty + ", bm25Weight=" + this.bm25Weight + ", tcfFirstPosition=" + this.tcfFirstPosition + ", tcfVerbatim=" + this.tcfVerbatim + ", tcfProximity=" + this.tcfProximity + ", temporalBias=" + this.temporalBias + ", temporalBiasWeight=" + this.temporalBiasWeight + ", exportDebugData=" + this.exportDebugData + ")";
|
||||
}
|
||||
}
|
||||
}
|
@@ -162,6 +162,7 @@ message RpcResultRankingParameters {
|
||||
double temporalBiasWeight = 17;
|
||||
|
||||
bool exportDebugData = 18;
|
||||
bool disablePenalties = 19;
|
||||
|
||||
}
|
||||
|
||||
|
@@ -3,8 +3,6 @@ package nu.marginalia.index.client;
|
||||
import nu.marginalia.api.searchquery.IndexProtobufCodec;
|
||||
import nu.marginalia.api.searchquery.model.query.SearchPhraseConstraint;
|
||||
import nu.marginalia.api.searchquery.model.query.SearchQuery;
|
||||
import nu.marginalia.api.searchquery.model.results.ResultRankingParameters;
|
||||
import nu.marginalia.index.query.limit.QueryLimits;
|
||||
import nu.marginalia.index.query.limit.SpecificationLimit;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
@@ -22,18 +20,6 @@ class IndexProtobufCodecTest {
|
||||
verifyIsIdentityTransformation(SpecificationLimit.lessThan(1), l -> IndexProtobufCodec.convertSpecLimit(IndexProtobufCodec.convertSpecLimit(l)));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRankingParameters() {
|
||||
verifyIsIdentityTransformation(ResultRankingParameters.sensibleDefaults(),
|
||||
p -> IndexProtobufCodec.convertRankingParameterss(IndexProtobufCodec.convertRankingParameterss(p, null)));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testQueryLimits() {
|
||||
verifyIsIdentityTransformation(new QueryLimits(1,2,3,4),
|
||||
l -> IndexProtobufCodec.convertQueryLimits(IndexProtobufCodec.convertQueryLimits(l))
|
||||
);
|
||||
}
|
||||
@Test
|
||||
public void testSubqery() {
|
||||
verifyIsIdentityTransformation(new SearchQuery(
|
||||
|
@@ -2,8 +2,9 @@ package nu.marginalia.functions.searchquery;
|
||||
|
||||
import com.google.inject.Inject;
|
||||
import com.google.inject.Singleton;
|
||||
import nu.marginalia.api.searchquery.RpcQueryLimits;
|
||||
import nu.marginalia.api.searchquery.RpcResultRankingParameters;
|
||||
import nu.marginalia.api.searchquery.model.query.*;
|
||||
import nu.marginalia.api.searchquery.model.results.ResultRankingParameters;
|
||||
import nu.marginalia.functions.searchquery.query_parser.QueryExpansion;
|
||||
import nu.marginalia.functions.searchquery.query_parser.QueryParser;
|
||||
import nu.marginalia.functions.searchquery.query_parser.token.QueryToken;
|
||||
@@ -36,7 +37,7 @@ public class QueryFactory {
|
||||
|
||||
|
||||
public ProcessedQuery createQuery(QueryParams params,
|
||||
@Nullable ResultRankingParameters rankingParams) {
|
||||
@Nullable RpcResultRankingParameters rankingParams) {
|
||||
final var query = params.humanQuery();
|
||||
|
||||
if (query.length() > 1000) {
|
||||
@@ -71,6 +72,17 @@ public class QueryFactory {
|
||||
|
||||
String[] parts = StringUtils.split(str, '_');
|
||||
|
||||
// Trim down tokens to match the behavior of the tokenizer used in indexing
|
||||
for (int i = 0; i < parts.length; i++) {
|
||||
String part = parts[i];
|
||||
|
||||
if (part.endsWith("'s") && part.length() > 2) {
|
||||
part = part.substring(0, part.length()-2);
|
||||
}
|
||||
|
||||
parts[i] = part;
|
||||
}
|
||||
|
||||
if (parts.length > 1) {
|
||||
// Require that the terms appear in sequence
|
||||
queryBuilder.phraseConstraint(SearchPhraseConstraint.mandatory(parts));
|
||||
@@ -121,7 +133,9 @@ public class QueryFactory {
|
||||
var limits = params.limits();
|
||||
// Disable limits on number of results per domain if we're searching with a site:-type term
|
||||
if (domain != null) {
|
||||
limits = limits.forSingleDomain();
|
||||
limits = RpcQueryLimits.newBuilder(limits)
|
||||
.setResultsByDomain(limits.getResultsTotal())
|
||||
.build();
|
||||
}
|
||||
|
||||
var expansion = queryExpansion.expandQuery(queryBuilder.searchTermsInclude);
|
||||
|
@@ -9,7 +9,7 @@ import nu.marginalia.api.searchquery.*;
|
||||
import nu.marginalia.api.searchquery.model.query.ProcessedQuery;
|
||||
import nu.marginalia.api.searchquery.model.query.QueryParams;
|
||||
import nu.marginalia.api.searchquery.model.results.DecoratedSearchResultItem;
|
||||
import nu.marginalia.api.searchquery.model.results.ResultRankingParameters;
|
||||
import nu.marginalia.api.searchquery.model.results.PrototypeRankingParameters;
|
||||
import nu.marginalia.index.api.IndexClient;
|
||||
import nu.marginalia.service.server.DiscoverableService;
|
||||
import org.slf4j.Logger;
|
||||
@@ -55,7 +55,7 @@ public class QueryGRPCService
|
||||
.time(() -> {
|
||||
|
||||
var params = QueryProtobufCodec.convertRequest(request);
|
||||
var query = queryFactory.createQuery(params, ResultRankingParameters.sensibleDefaults());
|
||||
var query = queryFactory.createQuery(params, PrototypeRankingParameters.sensibleDefaults());
|
||||
|
||||
var indexRequest = QueryProtobufCodec.convertQuery(request, query);
|
||||
|
||||
@@ -102,7 +102,7 @@ public class QueryGRPCService
|
||||
String originalQuery,
|
||||
QueryParams params,
|
||||
IndexClient.Pagination pagination,
|
||||
ResultRankingParameters rankingParameters) {
|
||||
RpcResultRankingParameters rankingParameters) {
|
||||
|
||||
var query = queryFactory.createQuery(params, rankingParameters);
|
||||
IndexClient.AggregateQueryResponse response = indexClient.executeQueries(QueryProtobufCodec.convertQuery(originalQuery, query), pagination);
|
||||
|
@@ -233,9 +233,19 @@ public class QueryParser {
|
||||
entity.replace(new QueryToken.RankTerm(limit, str));
|
||||
} else if (str.startsWith("qs=")) {
|
||||
entity.replace(new QueryToken.QsTerm(str.substring(3)));
|
||||
} else if (str.contains(":")) {
|
||||
} else if (str.startsWith("site:")
|
||||
|| str.startsWith("format:")
|
||||
|| str.startsWith("file:")
|
||||
|| str.startsWith("tld:")
|
||||
|| str.startsWith("ip:")
|
||||
|| str.startsWith("as:")
|
||||
|| str.startsWith("asn:")
|
||||
|| str.startsWith("generator:")
|
||||
)
|
||||
{
|
||||
entity.replace(new QueryToken.AdviceTerm(str, t.displayStr()));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static SpecificationLimit parseSpecificationLimit(String str) {
|
||||
|
@@ -1,12 +1,12 @@
|
||||
package nu.marginalia.query.svc;
|
||||
|
||||
import nu.marginalia.WmsaHome;
|
||||
import nu.marginalia.api.searchquery.RpcQueryLimits;
|
||||
import nu.marginalia.api.searchquery.RpcTemporalBias;
|
||||
import nu.marginalia.api.searchquery.model.query.QueryParams;
|
||||
import nu.marginalia.api.searchquery.model.query.SearchSpecification;
|
||||
import nu.marginalia.api.searchquery.model.results.ResultRankingParameters;
|
||||
import nu.marginalia.functions.searchquery.QueryFactory;
|
||||
import nu.marginalia.functions.searchquery.query_parser.QueryExpansion;
|
||||
import nu.marginalia.index.query.limit.QueryLimits;
|
||||
import nu.marginalia.index.query.limit.QueryStrategy;
|
||||
import nu.marginalia.index.query.limit.SpecificationLimit;
|
||||
import nu.marginalia.index.query.limit.SpecificationLimitType;
|
||||
@@ -49,10 +49,15 @@ public class QueryFactoryTest {
|
||||
SpecificationLimit.none(),
|
||||
SpecificationLimit.none(),
|
||||
null,
|
||||
new QueryLimits(100, 100, 100, 100),
|
||||
RpcQueryLimits.newBuilder()
|
||||
.setResultsTotal(100)
|
||||
.setResultsByDomain(100)
|
||||
.setTimeoutMs(100)
|
||||
.setFetchSize(100)
|
||||
.build(),
|
||||
"NONE",
|
||||
QueryStrategy.AUTO,
|
||||
ResultRankingParameters.TemporalBias.NONE,
|
||||
RpcTemporalBias.Bias.NONE,
|
||||
0), null).specs;
|
||||
}
|
||||
|
||||
@@ -208,6 +213,23 @@ public class QueryFactoryTest {
|
||||
System.out.println(subquery);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCplusPlus() {
|
||||
var subquery = parseAndGetSpecs("std::vector::push_back vector");
|
||||
System.out.println(subquery);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testQuotedApostrophe() {
|
||||
var subquery = parseAndGetSpecs("\"bob's cars\"");
|
||||
|
||||
System.out.println(subquery);
|
||||
|
||||
Assertions.assertTrue(subquery.query.compiledQuery.contains(" bob "));
|
||||
Assertions.assertFalse(subquery.query.compiledQuery.contains(" bob's "));
|
||||
Assertions.assertEquals("\"bob's cars\"", subquery.humanQuery);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testExpansion9() {
|
||||
var subquery = parseAndGetSpecs("pie recipe");
|
||||
|
@@ -10,12 +10,12 @@ import it.unimi.dsi.fastutil.longs.LongArrayList;
|
||||
import nu.marginalia.api.searchquery.IndexApiGrpc;
|
||||
import nu.marginalia.api.searchquery.RpcDecoratedResultItem;
|
||||
import nu.marginalia.api.searchquery.RpcIndexQuery;
|
||||
import nu.marginalia.api.searchquery.RpcResultRankingParameters;
|
||||
import nu.marginalia.api.searchquery.model.compiled.CompiledQuery;
|
||||
import nu.marginalia.api.searchquery.model.compiled.CompiledQueryLong;
|
||||
import nu.marginalia.api.searchquery.model.compiled.CqDataInt;
|
||||
import nu.marginalia.api.searchquery.model.query.SearchSpecification;
|
||||
import nu.marginalia.api.searchquery.model.results.ResultRankingContext;
|
||||
import nu.marginalia.api.searchquery.model.results.ResultRankingParameters;
|
||||
import nu.marginalia.array.page.LongQueryBuffer;
|
||||
import nu.marginalia.index.index.StatefulIndex;
|
||||
import nu.marginalia.index.model.SearchParameters;
|
||||
@@ -211,7 +211,7 @@ public class IndexGrpcService
|
||||
/** This class is responsible for ranking the results and adding the best results to the
|
||||
* resultHeap, which depending on the state of the indexLookup threads may or may not block
|
||||
*/
|
||||
private ResultRankingContext createRankingContext(ResultRankingParameters rankingParams,
|
||||
private ResultRankingContext createRankingContext(RpcResultRankingParameters rankingParams,
|
||||
CompiledQuery<String> compiledQuery,
|
||||
CompiledQueryLong compiledQueryIds)
|
||||
{
|
||||
|
@@ -2,12 +2,13 @@ package nu.marginalia.index.model;
|
||||
|
||||
import nu.marginalia.api.searchquery.IndexProtobufCodec;
|
||||
import nu.marginalia.api.searchquery.RpcIndexQuery;
|
||||
import nu.marginalia.api.searchquery.RpcResultRankingParameters;
|
||||
import nu.marginalia.api.searchquery.model.compiled.CompiledQuery;
|
||||
import nu.marginalia.api.searchquery.model.compiled.CompiledQueryLong;
|
||||
import nu.marginalia.api.searchquery.model.compiled.CompiledQueryParser;
|
||||
import nu.marginalia.api.searchquery.model.query.SearchSpecification;
|
||||
import nu.marginalia.api.searchquery.model.query.SearchQuery;
|
||||
import nu.marginalia.api.searchquery.model.results.ResultRankingParameters;
|
||||
import nu.marginalia.api.searchquery.model.query.SearchSpecification;
|
||||
import nu.marginalia.api.searchquery.model.results.PrototypeRankingParameters;
|
||||
import nu.marginalia.index.query.IndexSearchBudget;
|
||||
import nu.marginalia.index.query.limit.QueryStrategy;
|
||||
import nu.marginalia.index.searchset.SearchSet;
|
||||
@@ -23,7 +24,7 @@ public class SearchParameters {
|
||||
public final IndexSearchBudget budget;
|
||||
public final SearchQuery query;
|
||||
public final QueryParams queryParams;
|
||||
public final ResultRankingParameters rankingParams;
|
||||
public final RpcResultRankingParameters rankingParams;
|
||||
|
||||
public final int limitByDomain;
|
||||
public final int limitTotal;
|
||||
@@ -41,11 +42,11 @@ public class SearchParameters {
|
||||
public SearchParameters(SearchSpecification specsSet, SearchSet searchSet) {
|
||||
var limits = specsSet.queryLimits;
|
||||
|
||||
this.fetchSize = limits.fetchSize();
|
||||
this.budget = new IndexSearchBudget(limits.timeoutMs());
|
||||
this.fetchSize = limits.getFetchSize();
|
||||
this.budget = new IndexSearchBudget(limits.getTimeoutMs());
|
||||
this.query = specsSet.query;
|
||||
this.limitByDomain = limits.resultsByDomain();
|
||||
this.limitTotal = limits.resultsTotal();
|
||||
this.limitByDomain = limits.getResultsByDomain();
|
||||
this.limitTotal = limits.getResultsTotal();
|
||||
|
||||
queryParams = new QueryParams(
|
||||
specsSet.quality,
|
||||
@@ -62,17 +63,17 @@ public class SearchParameters {
|
||||
}
|
||||
|
||||
public SearchParameters(RpcIndexQuery request, SearchSet searchSet) {
|
||||
var limits = IndexProtobufCodec.convertQueryLimits(request.getQueryLimits());
|
||||
var limits = request.getQueryLimits();
|
||||
|
||||
this.fetchSize = limits.fetchSize();
|
||||
this.fetchSize = limits.getFetchSize();
|
||||
|
||||
// The time budget is halved because this is the point when we start to
|
||||
// wrap up the search and return the results.
|
||||
this.budget = new IndexSearchBudget(limits.timeoutMs() / 2);
|
||||
this.budget = new IndexSearchBudget(limits.getTimeoutMs() / 2);
|
||||
this.query = IndexProtobufCodec.convertRpcQuery(request.getQuery());
|
||||
|
||||
this.limitByDomain = limits.resultsByDomain();
|
||||
this.limitTotal = limits.resultsTotal();
|
||||
this.limitByDomain = limits.getResultsByDomain();
|
||||
this.limitTotal = limits.getResultsTotal();
|
||||
|
||||
queryParams = new QueryParams(
|
||||
convertSpecLimit(request.getQuality()),
|
||||
@@ -85,7 +86,7 @@ public class SearchParameters {
|
||||
compiledQuery = CompiledQueryParser.parse(this.query.compiledQuery);
|
||||
compiledQueryIds = compiledQuery.mapToLong(SearchTermsUtil::getWordId);
|
||||
|
||||
rankingParams = IndexProtobufCodec.convertRankingParameterss(request.getParameters());
|
||||
rankingParams = request.hasParameters() ? request.getParameters() : PrototypeRankingParameters.sensibleDefaults();
|
||||
}
|
||||
|
||||
|
||||
|
@@ -2,7 +2,6 @@ package nu.marginalia.index.results;
|
||||
|
||||
import nu.marginalia.api.searchquery.model.compiled.CqDataInt;
|
||||
import nu.marginalia.api.searchquery.model.compiled.CqExpression;
|
||||
import nu.marginalia.api.searchquery.model.results.Bm25Parameters;
|
||||
import nu.marginalia.api.searchquery.model.results.ResultRankingContext;
|
||||
|
||||
import java.util.BitSet;
|
||||
@@ -24,14 +23,14 @@ public class Bm25GraphVisitor implements CqExpression.DoubleVisitor {
|
||||
|
||||
private final BitSet mask;
|
||||
|
||||
public Bm25GraphVisitor(Bm25Parameters bm25Parameters,
|
||||
public Bm25GraphVisitor(double k1, double b,
|
||||
float[] counts,
|
||||
int length,
|
||||
ResultRankingContext ctx) {
|
||||
this.length = length;
|
||||
|
||||
this.k1 = bm25Parameters.k();
|
||||
this.b = bm25Parameters.b();
|
||||
this.k1 = k1;
|
||||
this.b = b;
|
||||
|
||||
this.docCount = ctx.termFreqDocCount();
|
||||
this.counts = counts;
|
||||
|
@@ -156,7 +156,7 @@ public class IndexResultRankingService {
|
||||
// for the selected results, as this would be comically expensive to do for all the results we
|
||||
// discard along the way
|
||||
|
||||
if (params.rankingParams.exportDebugData) {
|
||||
if (params.rankingParams.getExportDebugData()) {
|
||||
var combinedIdsList = new LongArrayList(resultsList.size());
|
||||
for (var item : resultsList) {
|
||||
combinedIdsList.add(item.combinedId);
|
||||
|
@@ -2,10 +2,11 @@ package nu.marginalia.index.results;
|
||||
|
||||
import it.unimi.dsi.fastutil.ints.IntIterator;
|
||||
import it.unimi.dsi.fastutil.ints.IntList;
|
||||
import nu.marginalia.api.searchquery.RpcResultRankingParameters;
|
||||
import nu.marginalia.api.searchquery.RpcTemporalBias;
|
||||
import nu.marginalia.api.searchquery.model.compiled.CompiledQuery;
|
||||
import nu.marginalia.api.searchquery.model.compiled.CompiledQueryLong;
|
||||
import nu.marginalia.api.searchquery.model.results.ResultRankingContext;
|
||||
import nu.marginalia.api.searchquery.model.results.ResultRankingParameters;
|
||||
import nu.marginalia.api.searchquery.model.results.SearchResultItem;
|
||||
import nu.marginalia.api.searchquery.model.results.debug.DebugRankingFactors;
|
||||
import nu.marginalia.index.forward.spans.DocumentSpans;
|
||||
@@ -116,14 +117,14 @@ public class IndexResultScoreCalculator {
|
||||
|
||||
float proximitiyFac = getProximitiyFac(decodedPositions, searchTerms.phraseConstraints, verbatimMatches, unorderedMatches, spans);
|
||||
|
||||
double score_firstPosition = params.tcfFirstPosition * (1.0 / Math.sqrt(unorderedMatches.firstPosition));
|
||||
double score_verbatim = params.tcfVerbatim * verbatimMatches.getScore();
|
||||
double score_proximity = params.tcfProximity * proximitiyFac;
|
||||
double score_bM25 = params.bm25Weight
|
||||
* wordFlagsQuery.root.visit(new Bm25GraphVisitor(params.bm25Params, unorderedMatches.getWeightedCounts(), docSize, rankingContext))
|
||||
double score_firstPosition = params.getTcfFirstPositionWeight() * (1.0 / Math.sqrt(unorderedMatches.firstPosition));
|
||||
double score_verbatim = params.getTcfVerbatimWeight() * verbatimMatches.getScore();
|
||||
double score_proximity = params.getTcfProximityWeight() * proximitiyFac;
|
||||
double score_bM25 = params.getBm25Weight()
|
||||
* wordFlagsQuery.root.visit(new Bm25GraphVisitor(params.getBm25K(), params.getBm25B(), unorderedMatches.getWeightedCounts(), docSize, rankingContext))
|
||||
/ (Math.sqrt(unorderedMatches.searchableKeywordCount + 1));
|
||||
double score_bFlags = params.bm25Weight
|
||||
* wordFlagsQuery.root.visit(new TermFlagsGraphVisitor(params.bm25Params, wordFlagsQuery.data, unorderedMatches.getWeightedCounts(), rankingContext))
|
||||
double score_bFlags = params.getBm25Weight()
|
||||
* wordFlagsQuery.root.visit(new TermFlagsGraphVisitor(params.getBm25K(), wordFlagsQuery.data, unorderedMatches.getWeightedCounts(), rankingContext))
|
||||
/ (Math.sqrt(unorderedMatches.searchableKeywordCount + 1));
|
||||
|
||||
double score = normalize(
|
||||
@@ -245,9 +246,13 @@ public class IndexResultScoreCalculator {
|
||||
private double calculateDocumentBonus(long documentMetadata,
|
||||
int features,
|
||||
int length,
|
||||
ResultRankingParameters rankingParams,
|
||||
RpcResultRankingParameters rankingParams,
|
||||
@Nullable DebugRankingFactors debugRankingFactors) {
|
||||
|
||||
if (rankingParams.getDisablePenalties()) {
|
||||
return 0.;
|
||||
}
|
||||
|
||||
int rank = DocumentMetadata.decodeRank(documentMetadata);
|
||||
int asl = DocumentMetadata.decodeAvgSentenceLength(documentMetadata);
|
||||
int quality = DocumentMetadata.decodeQuality(documentMetadata);
|
||||
@@ -256,18 +261,18 @@ public class IndexResultScoreCalculator {
|
||||
int topology = DocumentMetadata.decodeTopology(documentMetadata);
|
||||
int year = DocumentMetadata.decodeYear(documentMetadata);
|
||||
|
||||
double averageSentenceLengthPenalty = (asl >= rankingParams.shortSentenceThreshold ? 0 : -rankingParams.shortSentencePenalty);
|
||||
double averageSentenceLengthPenalty = (asl >= rankingParams.getShortSentenceThreshold() ? 0 : -rankingParams.getShortSentencePenalty());
|
||||
|
||||
final double qualityPenalty = calculateQualityPenalty(size, quality, rankingParams);
|
||||
final double rankingBonus = (255. - rank) * rankingParams.domainRankBonus;
|
||||
final double rankingBonus = (255. - rank) * rankingParams.getDomainRankBonus();
|
||||
final double topologyBonus = Math.log(1 + topology);
|
||||
final double documentLengthPenalty = length > rankingParams.shortDocumentThreshold ? 0 : -rankingParams.shortDocumentPenalty;
|
||||
final double documentLengthPenalty = length > rankingParams.getShortDocumentThreshold() ? 0 : -rankingParams.getShortDocumentPenalty();
|
||||
final double temporalBias;
|
||||
|
||||
if (rankingParams.temporalBias == ResultRankingParameters.TemporalBias.RECENT) {
|
||||
temporalBias = - Math.abs(year - PubDate.MAX_YEAR) * rankingParams.temporalBiasWeight;
|
||||
} else if (rankingParams.temporalBias == ResultRankingParameters.TemporalBias.OLD) {
|
||||
temporalBias = - Math.abs(year - PubDate.MIN_YEAR) * rankingParams.temporalBiasWeight;
|
||||
if (rankingParams.getTemporalBias().getBias() == RpcTemporalBias.Bias.RECENT) {
|
||||
temporalBias = - Math.abs(year - PubDate.MAX_YEAR) * rankingParams.getTemporalBiasWeight();
|
||||
} else if (rankingParams.getTemporalBias().getBias() == RpcTemporalBias.Bias.OLD) {
|
||||
temporalBias = - Math.abs(year - PubDate.MIN_YEAR) * rankingParams.getTemporalBiasWeight();
|
||||
} else {
|
||||
temporalBias = 0;
|
||||
}
|
||||
@@ -506,14 +511,14 @@ public class IndexResultScoreCalculator {
|
||||
}
|
||||
|
||||
|
||||
private double calculateQualityPenalty(int size, int quality, ResultRankingParameters rankingParams) {
|
||||
private double calculateQualityPenalty(int size, int quality, RpcResultRankingParameters rankingParams) {
|
||||
if (size < 400) {
|
||||
if (quality < 5)
|
||||
return 0;
|
||||
return -quality * rankingParams.qualityPenalty;
|
||||
return -quality * rankingParams.getQualityPenalty();
|
||||
}
|
||||
else {
|
||||
return -quality * rankingParams.qualityPenalty * 20;
|
||||
return -quality * rankingParams.getQualityPenalty() * 20;
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -3,7 +3,6 @@ package nu.marginalia.index.results;
|
||||
import nu.marginalia.api.searchquery.model.compiled.CqDataInt;
|
||||
import nu.marginalia.api.searchquery.model.compiled.CqDataLong;
|
||||
import nu.marginalia.api.searchquery.model.compiled.CqExpression;
|
||||
import nu.marginalia.api.searchquery.model.results.Bm25Parameters;
|
||||
import nu.marginalia.api.searchquery.model.results.ResultRankingContext;
|
||||
import nu.marginalia.model.idx.WordFlags;
|
||||
|
||||
@@ -15,15 +14,14 @@ public class TermFlagsGraphVisitor implements CqExpression.DoubleVisitor {
|
||||
private final CqDataLong wordMetaData;
|
||||
private final CqDataInt frequencies;
|
||||
private final float[] counts;
|
||||
private final Bm25Parameters bm25Parameters;
|
||||
|
||||
private final double k1;
|
||||
private final int docCount;
|
||||
|
||||
public TermFlagsGraphVisitor(Bm25Parameters bm25Parameters,
|
||||
public TermFlagsGraphVisitor(double k1,
|
||||
CqDataLong wordMetaData,
|
||||
float[] counts,
|
||||
ResultRankingContext ctx) {
|
||||
this.bm25Parameters = bm25Parameters;
|
||||
this.k1 = k1;
|
||||
this.counts = counts;
|
||||
this.docCount = ctx.termFreqDocCount();
|
||||
this.wordMetaData = wordMetaData;
|
||||
@@ -55,7 +53,7 @@ public class TermFlagsGraphVisitor implements CqExpression.DoubleVisitor {
|
||||
int freq = frequencies.get(idx);
|
||||
|
||||
// note we override b to zero for priority terms as they are independent of document length
|
||||
return invFreq(docCount, freq) * f(bm25Parameters.k(), 0, count, 0);
|
||||
return invFreq(docCount, freq) * f(k1, 0, count, 0);
|
||||
}
|
||||
|
||||
private double evaluatePriorityScore(int idx) {
|
||||
|
@@ -1,7 +0,0 @@
|
||||
package nu.marginalia.index.query.limit;
|
||||
|
||||
public record QueryLimits(int resultsByDomain, int resultsTotal, int timeoutMs, int fetchSize) {
|
||||
public QueryLimits forSingleDomain() {
|
||||
return new QueryLimits(resultsTotal, resultsTotal, timeoutMs, fetchSize);
|
||||
}
|
||||
}
|
@@ -4,10 +4,11 @@ import com.google.inject.Guice;
|
||||
import com.google.inject.Inject;
|
||||
import nu.marginalia.IndexLocations;
|
||||
import nu.marginalia.api.searchquery.RpcDecoratedResultItem;
|
||||
import nu.marginalia.api.searchquery.RpcQueryLimits;
|
||||
import nu.marginalia.api.searchquery.model.query.SearchPhraseConstraint;
|
||||
import nu.marginalia.api.searchquery.model.query.SearchQuery;
|
||||
import nu.marginalia.api.searchquery.model.query.SearchSpecification;
|
||||
import nu.marginalia.api.searchquery.model.results.ResultRankingParameters;
|
||||
import nu.marginalia.api.searchquery.model.results.PrototypeRankingParameters;
|
||||
import nu.marginalia.index.construction.DocIdRewriter;
|
||||
import nu.marginalia.index.construction.full.FullIndexConstructor;
|
||||
import nu.marginalia.index.construction.prio.PrioIndexConstructor;
|
||||
@@ -17,7 +18,6 @@ import nu.marginalia.index.forward.construction.ForwardIndexConverter;
|
||||
import nu.marginalia.index.index.StatefulIndex;
|
||||
import nu.marginalia.index.journal.IndexJournal;
|
||||
import nu.marginalia.index.journal.IndexJournalSlopWriter;
|
||||
import nu.marginalia.index.query.limit.QueryLimits;
|
||||
import nu.marginalia.index.query.limit.QueryStrategy;
|
||||
import nu.marginalia.index.query.limit.SpecificationLimit;
|
||||
import nu.marginalia.linkdb.docs.DocumentDbReader;
|
||||
@@ -115,9 +115,16 @@ public class IndexQueryServiceIntegrationSmokeTest {
|
||||
|
||||
var rsp = queryService.justQuery(
|
||||
SearchSpecification.builder()
|
||||
.queryLimits(new QueryLimits(10, 10, Integer.MAX_VALUE, 4000))
|
||||
.queryLimits(
|
||||
RpcQueryLimits.newBuilder()
|
||||
.setResultsByDomain(10)
|
||||
.setResultsTotal(10)
|
||||
.setTimeoutMs(Integer.MAX_VALUE)
|
||||
.setFetchSize(4000)
|
||||
.build()
|
||||
)
|
||||
.queryStrategy(QueryStrategy.SENTENCE)
|
||||
.rankingParams(ResultRankingParameters.sensibleDefaults())
|
||||
.rankingParams(PrototypeRankingParameters.sensibleDefaults())
|
||||
.domains(new ArrayList<>())
|
||||
.searchSetIdentifier("NONE")
|
||||
.query(
|
||||
@@ -171,9 +178,16 @@ public class IndexQueryServiceIntegrationSmokeTest {
|
||||
|
||||
var rsp = queryService.justQuery(
|
||||
SearchSpecification.builder()
|
||||
.queryLimits(new QueryLimits(10, 10, Integer.MAX_VALUE, 4000))
|
||||
.queryLimits(
|
||||
RpcQueryLimits.newBuilder()
|
||||
.setResultsByDomain(10)
|
||||
.setResultsTotal(10)
|
||||
.setTimeoutMs(Integer.MAX_VALUE)
|
||||
.setFetchSize(4000)
|
||||
.build()
|
||||
)
|
||||
.queryStrategy(QueryStrategy.SENTENCE)
|
||||
.rankingParams(ResultRankingParameters.sensibleDefaults())
|
||||
.rankingParams(PrototypeRankingParameters.sensibleDefaults())
|
||||
.domains(new ArrayList<>())
|
||||
.searchSetIdentifier("NONE")
|
||||
.query(
|
||||
@@ -225,8 +239,15 @@ public class IndexQueryServiceIntegrationSmokeTest {
|
||||
|
||||
var rsp = queryService.justQuery(
|
||||
SearchSpecification.builder()
|
||||
.queryLimits(new QueryLimits(10, 10, Integer.MAX_VALUE, 4000))
|
||||
.rankingParams(ResultRankingParameters.sensibleDefaults())
|
||||
.queryLimits(
|
||||
RpcQueryLimits.newBuilder()
|
||||
.setResultsByDomain(10)
|
||||
.setResultsTotal(10)
|
||||
.setTimeoutMs(Integer.MAX_VALUE)
|
||||
.setFetchSize(4000)
|
||||
.build()
|
||||
)
|
||||
.rankingParams(PrototypeRankingParameters.sensibleDefaults())
|
||||
.queryStrategy(QueryStrategy.SENTENCE)
|
||||
.domains(List.of(2))
|
||||
.query(
|
||||
@@ -282,11 +303,18 @@ public class IndexQueryServiceIntegrationSmokeTest {
|
||||
|
||||
var rsp = queryService.justQuery(
|
||||
SearchSpecification.builder()
|
||||
.queryLimits(new QueryLimits(10, 10, Integer.MAX_VALUE, 4000))
|
||||
.queryLimits(
|
||||
RpcQueryLimits.newBuilder()
|
||||
.setResultsByDomain(10)
|
||||
.setResultsTotal(10)
|
||||
.setTimeoutMs(Integer.MAX_VALUE)
|
||||
.setFetchSize(4000)
|
||||
.build()
|
||||
)
|
||||
.year(SpecificationLimit.equals(1998))
|
||||
.queryStrategy(QueryStrategy.SENTENCE)
|
||||
.searchSetIdentifier("NONE")
|
||||
.rankingParams(ResultRankingParameters.sensibleDefaults())
|
||||
.rankingParams(PrototypeRankingParameters.sensibleDefaults())
|
||||
.query(
|
||||
SearchQuery.builder()
|
||||
.compiledQuery("4")
|
||||
|
@@ -4,10 +4,11 @@ import com.google.inject.Guice;
|
||||
import com.google.inject.Inject;
|
||||
import it.unimi.dsi.fastutil.ints.IntList;
|
||||
import nu.marginalia.IndexLocations;
|
||||
import nu.marginalia.api.searchquery.RpcQueryLimits;
|
||||
import nu.marginalia.api.searchquery.model.query.SearchPhraseConstraint;
|
||||
import nu.marginalia.api.searchquery.model.query.SearchQuery;
|
||||
import nu.marginalia.api.searchquery.model.query.SearchSpecification;
|
||||
import nu.marginalia.api.searchquery.model.results.ResultRankingParameters;
|
||||
import nu.marginalia.api.searchquery.model.results.PrototypeRankingParameters;
|
||||
import nu.marginalia.hash.MurmurHash3_128;
|
||||
import nu.marginalia.index.construction.DocIdRewriter;
|
||||
import nu.marginalia.index.construction.full.FullIndexConstructor;
|
||||
@@ -18,7 +19,6 @@ import nu.marginalia.index.forward.construction.ForwardIndexConverter;
|
||||
import nu.marginalia.index.index.StatefulIndex;
|
||||
import nu.marginalia.index.journal.IndexJournal;
|
||||
import nu.marginalia.index.journal.IndexJournalSlopWriter;
|
||||
import nu.marginalia.index.query.limit.QueryLimits;
|
||||
import nu.marginalia.index.query.limit.QueryStrategy;
|
||||
import nu.marginalia.index.query.limit.SpecificationLimit;
|
||||
import nu.marginalia.linkdb.docs.DocumentDbReader;
|
||||
@@ -389,13 +389,20 @@ public class IndexQueryServiceIntegrationTest {
|
||||
SearchSpecification basicQuery(Function<SearchSpecification.SearchSpecificationBuilder, SearchSpecification.SearchSpecificationBuilder> mutator)
|
||||
{
|
||||
var builder = SearchSpecification.builder()
|
||||
.queryLimits(new QueryLimits(10, 10, Integer.MAX_VALUE, 4000))
|
||||
.queryLimits(
|
||||
RpcQueryLimits.newBuilder()
|
||||
.setResultsByDomain(10)
|
||||
.setResultsTotal(10)
|
||||
.setTimeoutMs(Integer.MAX_VALUE)
|
||||
.setFetchSize(4000)
|
||||
.build()
|
||||
)
|
||||
.queryStrategy(QueryStrategy.SENTENCE)
|
||||
.year(SpecificationLimit.none())
|
||||
.quality(SpecificationLimit.none())
|
||||
.size(SpecificationLimit.none())
|
||||
.rank(SpecificationLimit.none())
|
||||
.rankingParams(ResultRankingParameters.sensibleDefaults())
|
||||
.rankingParams(PrototypeRankingParameters.sensibleDefaults())
|
||||
.domains(new ArrayList<>())
|
||||
.searchSetIdentifier("NONE");
|
||||
|
||||
|
@@ -27,7 +27,7 @@ public class SentenceSegmentSplitter {
|
||||
else {
|
||||
// If we flatten unicode, we do this...
|
||||
// FIXME: This can almost definitely be cleaned up and simplified.
|
||||
wordBreakPattern = Pattern.compile("([^/_#@.a-zA-Z'+\\-0-9\\u00C0-\\u00D6\\u00D8-\\u00f6\\u00f8-\\u00ff]+)|[|]|(\\.(\\s+|$))");
|
||||
wordBreakPattern = Pattern.compile("([^/<>$:_#@.a-zA-Z'+\\-0-9\\u00C0-\\u00D6\\u00D8-\\u00f6\\u00f8-\\u00ff]+)|[|]|(\\.(\\s+|$))");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -90,12 +90,17 @@ public class SentenceSegmentSplitter {
|
||||
for (int i = 0; i < ret.size(); i++) {
|
||||
String part = ret.get(i);
|
||||
|
||||
if (part.startsWith("<") && part.endsWith(">") && part.length() > 2) {
|
||||
ret.set(i, part.substring(1, part.length() - 1));
|
||||
}
|
||||
|
||||
if (part.startsWith("'") && part.length() > 1) {
|
||||
ret.set(i, part.substring(1));
|
||||
}
|
||||
if (part.endsWith("'") && part.length() > 1) {
|
||||
ret.set(i, part.substring(0, part.length()-1));
|
||||
}
|
||||
|
||||
while (part.endsWith(".")) {
|
||||
part = part.substring(0, part.length()-1);
|
||||
ret.set(i, part);
|
||||
|
@@ -28,6 +28,20 @@ class SentenceExtractorTest {
|
||||
System.out.println(dld);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testCplusplus() {
|
||||
var dld = sentenceExtractor.extractSentence("std::vector", EnumSet.noneOf(HtmlTag.class));
|
||||
assertEquals(1, dld.length());
|
||||
assertEquals("std::vector", dld.wordsLowerCase[0]);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testPHP() {
|
||||
var dld = sentenceExtractor.extractSentence("$_GET", EnumSet.noneOf(HtmlTag.class));
|
||||
assertEquals(1, dld.length());
|
||||
assertEquals("$_get", dld.wordsLowerCase[0]);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testPolishArtist() {
|
||||
var dld = sentenceExtractor.extractSentence("Uklański", EnumSet.noneOf(HtmlTag.class));
|
||||
|
@@ -152,7 +152,10 @@ public class DocumentPositionMapper {
|
||||
}
|
||||
|
||||
boolean matchesWordPattern(String s) {
|
||||
// this function is an unrolled version of the regexp [\da-zA-Z]{1,15}([.\-_/:+*][\da-zA-Z]{1,10}){0,4}
|
||||
if (s.length() > 48)
|
||||
return false;
|
||||
|
||||
// this function is an unrolled version of the regexp [\da-zA-Z]{1,15}([.\-_/:+*][\da-zA-Z]{1,10}){0,8}
|
||||
|
||||
String wordPartSeparator = ".-_/:+*";
|
||||
|
||||
@@ -169,7 +172,7 @@ public class DocumentPositionMapper {
|
||||
if (i == 0)
|
||||
return false;
|
||||
|
||||
for (int j = 0; j < 5; j++) {
|
||||
for (int j = 0; j < 8; j++) {
|
||||
if (i == s.length()) return true;
|
||||
|
||||
if (wordPartSeparator.indexOf(s.charAt(i)) < 0) {
|
||||
|
@@ -30,9 +30,11 @@ class DocumentPositionMapperTest {
|
||||
Assertions.assertFalse(positionMapper.matchesWordPattern("1234567890abcdef"));
|
||||
|
||||
Assertions.assertTrue(positionMapper.matchesWordPattern("test-test-test-test-test"));
|
||||
Assertions.assertFalse(positionMapper.matchesWordPattern("test-test-test-test-test-test"));
|
||||
Assertions.assertFalse(positionMapper.matchesWordPattern("test-test-test-test-test-test-test-test-test"));
|
||||
Assertions.assertTrue(positionMapper.matchesWordPattern("192.168.1.100/24"));
|
||||
Assertions.assertTrue(positionMapper.matchesWordPattern("std::vector"));
|
||||
Assertions.assertTrue(positionMapper.matchesWordPattern("std::vector::push_back"));
|
||||
|
||||
Assertions.assertTrue(positionMapper.matchesWordPattern("c++"));
|
||||
Assertions.assertTrue(positionMapper.matchesWordPattern("m*a*s*h"));
|
||||
Assertions.assertFalse(positionMapper.matchesWordPattern("Stulpnagelstrasse"));
|
||||
|
@@ -0,0 +1,113 @@
|
||||
package nu.marginalia.converting.processor.plugin.specialization;
|
||||
|
||||
import com.google.inject.Inject;
|
||||
import com.google.inject.Singleton;
|
||||
import nu.marginalia.converting.processor.logic.TitleExtractor;
|
||||
import nu.marginalia.converting.processor.summary.SummaryExtractor;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.logging.log4j.util.Strings;
|
||||
import org.jsoup.nodes.Document;
|
||||
import org.jsoup.nodes.Element;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
@Singleton
|
||||
public class CppreferenceSpecialization extends WikiSpecialization {
|
||||
|
||||
@Inject
|
||||
public CppreferenceSpecialization(SummaryExtractor summaryExtractor, TitleExtractor titleExtractor) {
|
||||
super(summaryExtractor, titleExtractor);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Document prune(Document original) {
|
||||
var doc = original.clone();
|
||||
|
||||
doc.getElementsByClass("t-nv").remove();
|
||||
doc.getElementsByClass("toc").remove();
|
||||
doc.getElementsByClass("mw-head").remove();
|
||||
doc.getElementsByClass("printfooter").remove();
|
||||
doc.getElementsByClass("cpp-footer-base").remove();
|
||||
|
||||
doc.title(doc.title() + " " + Strings.join(extractExtraTokens(doc.title()), ' '));
|
||||
|
||||
return doc;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getSummary(Document doc, Set<String> importantWords) {
|
||||
|
||||
Element declTable = doc.getElementsByClass("t-dcl-begin").first();
|
||||
if (declTable != null) {
|
||||
var nextPar = declTable.nextElementSibling();
|
||||
if (nextPar != null) {
|
||||
return nextPar.text();
|
||||
}
|
||||
}
|
||||
|
||||
return super.getSummary(doc, importantWords);
|
||||
}
|
||||
|
||||
|
||||
public List<String> extractExtraTokens(String title) {
|
||||
|
||||
if (!title.contains("::")) {
|
||||
return List.of();
|
||||
}
|
||||
if (!title.contains("-")) {
|
||||
return List.of();
|
||||
}
|
||||
|
||||
title = StringUtils.split(title, '-')[0];
|
||||
|
||||
String name = title;
|
||||
for (;;) {
|
||||
int lbidx = name.indexOf('<');
|
||||
int rbidx = name.indexOf('>');
|
||||
|
||||
if (lbidx > 0 && rbidx > lbidx) {
|
||||
String className = name.substring(0, lbidx);
|
||||
String methodName = name.substring(rbidx + 1);
|
||||
name = className + methodName;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
List<String> tokens = new ArrayList<>();
|
||||
|
||||
for (var part : name.split("\\s*,\\s*")) {
|
||||
if (part.endsWith(")") && !part.endsWith("()")) {
|
||||
int parenStart = part.indexOf('(');
|
||||
if (parenStart > 0) { // foo(...) -> foo
|
||||
part = part.substring(0, parenStart);
|
||||
}
|
||||
else if (parenStart == 0) { // (foo) -> foo
|
||||
part = part.substring(1, part.length() - 1);
|
||||
}
|
||||
}
|
||||
|
||||
part = part.trim();
|
||||
if (part.contains("::")) {
|
||||
tokens.add(part);
|
||||
if (part.startsWith("std::")) {
|
||||
tokens.add(part.substring(5));
|
||||
|
||||
int ss = part.indexOf("::", 5);
|
||||
if (ss > 0) {
|
||||
tokens.add(part.substring(0, ss));
|
||||
tokens.add(part.substring(ss+2));
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return tokens;
|
||||
}
|
||||
|
||||
|
||||
}
|
@@ -24,6 +24,7 @@ public class HtmlProcessorSpecializations {
|
||||
private final WikiSpecialization wikiSpecialization;
|
||||
private final BlogSpecialization blogSpecialization;
|
||||
private final GogStoreSpecialization gogStoreSpecialization;
|
||||
private final CppreferenceSpecialization cppreferenceSpecialization;
|
||||
private final DefaultSpecialization defaultSpecialization;
|
||||
|
||||
@Inject
|
||||
@@ -37,6 +38,7 @@ public class HtmlProcessorSpecializations {
|
||||
WikiSpecialization wikiSpecialization,
|
||||
BlogSpecialization blogSpecialization,
|
||||
GogStoreSpecialization gogStoreSpecialization,
|
||||
CppreferenceSpecialization cppreferenceSpecialization,
|
||||
DefaultSpecialization defaultSpecialization) {
|
||||
this.domainTypes = domainTypes;
|
||||
this.lemmySpecialization = lemmySpecialization;
|
||||
@@ -48,6 +50,7 @@ public class HtmlProcessorSpecializations {
|
||||
this.wikiSpecialization = wikiSpecialization;
|
||||
this.blogSpecialization = blogSpecialization;
|
||||
this.gogStoreSpecialization = gogStoreSpecialization;
|
||||
this.cppreferenceSpecialization = cppreferenceSpecialization;
|
||||
this.defaultSpecialization = defaultSpecialization;
|
||||
}
|
||||
|
||||
@@ -66,6 +69,10 @@ public class HtmlProcessorSpecializations {
|
||||
return mariadbKbSpecialization;
|
||||
}
|
||||
|
||||
if (url.domain.getTopDomain().equals("cppreference.com")) {
|
||||
return cppreferenceSpecialization;
|
||||
}
|
||||
|
||||
if (url.domain.toString().equals("store.steampowered.com")) {
|
||||
return steamStoreSpecialization;
|
||||
}
|
||||
@@ -86,6 +93,9 @@ public class HtmlProcessorSpecializations {
|
||||
if (generator.keywords().contains("javadoc")) {
|
||||
return javadocSpecialization;
|
||||
}
|
||||
|
||||
// Must be toward the end, as some specializations are for
|
||||
// wiki-generator content
|
||||
if (generator.type() == GeneratorType.WIKI) {
|
||||
return wikiSpecialization;
|
||||
}
|
||||
@@ -105,7 +115,7 @@ public class HtmlProcessorSpecializations {
|
||||
|
||||
boolean shouldIndex(EdgeUrl url);
|
||||
double lengthModifier();
|
||||
void amendWords(Document doc, DocumentKeywordsBuilder words);
|
||||
|
||||
default void amendWords(Document doc, DocumentKeywordsBuilder words) {}
|
||||
}
|
||||
}
|
||||
|
@@ -4,7 +4,6 @@ import com.google.inject.Inject;
|
||||
import com.google.inject.Singleton;
|
||||
import nu.marginalia.converting.processor.logic.TitleExtractor;
|
||||
import nu.marginalia.converting.processor.summary.SummaryExtractor;
|
||||
import nu.marginalia.keyword.model.DocumentKeywordsBuilder;
|
||||
import nu.marginalia.model.EdgeUrl;
|
||||
import org.jsoup.nodes.Document;
|
||||
import org.jsoup.nodes.Element;
|
||||
@@ -93,6 +92,8 @@ public class WikiSpecialization extends DefaultSpecialization {
|
||||
return true;
|
||||
}
|
||||
|
||||
public void amendWords(Document doc, DocumentKeywordsBuilder words) {
|
||||
@Override
|
||||
public double lengthModifier() {
|
||||
return 2.5;
|
||||
}
|
||||
}
|
||||
|
@@ -0,0 +1,27 @@
|
||||
package nu.marginalia.converting.processor.plugin.specialization;
|
||||
|
||||
import org.junit.jupiter.api.Assertions;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
class CppreferenceSpecializationTest {
|
||||
CppreferenceSpecialization specialization = new CppreferenceSpecialization(null, null);
|
||||
|
||||
@Test
|
||||
public void testTitleMagic() {
|
||||
|
||||
List<String> ret;
|
||||
|
||||
ret = specialization.extractExtraTokens("std::multimap<Key, T, Compare, Allocator>::crend - cppreference.com");
|
||||
Assertions.assertTrue(ret.contains("std::multimap::crend"));
|
||||
Assertions.assertTrue(ret.contains("multimap::crend"));
|
||||
Assertions.assertTrue(ret.contains("std::multimap"));
|
||||
Assertions.assertTrue(ret.contains("crend"));
|
||||
|
||||
ret = specialization.extractExtraTokens("std::coroutine_handle<Promise>::operator(), std::coroutine_handle<Promise>::resume - cppreference.com");
|
||||
Assertions.assertTrue(ret.contains("std::coroutine_handle::operator()"));
|
||||
Assertions.assertTrue(ret.contains("std::coroutine_handle::resume"));
|
||||
}
|
||||
|
||||
}
|
@@ -44,6 +44,7 @@ dependencies {
|
||||
implementation libs.bundles.jetty
|
||||
implementation libs.opencsv
|
||||
implementation libs.trove
|
||||
implementation libs.protobuf
|
||||
implementation libs.fastutil
|
||||
implementation libs.bundles.gson
|
||||
implementation libs.bundles.mariadb
|
||||
|
@@ -6,10 +6,10 @@ import nu.marginalia.api.model.ApiSearchResult;
|
||||
import nu.marginalia.api.model.ApiSearchResultQueryDetails;
|
||||
import nu.marginalia.api.model.ApiSearchResults;
|
||||
import nu.marginalia.api.searchquery.QueryClient;
|
||||
import nu.marginalia.api.searchquery.RpcQueryLimits;
|
||||
import nu.marginalia.api.searchquery.model.query.QueryParams;
|
||||
import nu.marginalia.api.searchquery.model.query.SearchSetIdentifier;
|
||||
import nu.marginalia.api.searchquery.model.results.DecoratedSearchResultItem;
|
||||
import nu.marginalia.index.query.limit.QueryLimits;
|
||||
import nu.marginalia.model.idx.WordFlags;
|
||||
|
||||
import java.util.ArrayList;
|
||||
@@ -47,11 +47,12 @@ public class ApiSearchOperator {
|
||||
|
||||
return new QueryParams(
|
||||
query,
|
||||
new QueryLimits(
|
||||
2,
|
||||
Math.min(100, count),
|
||||
150,
|
||||
8192),
|
||||
RpcQueryLimits.newBuilder()
|
||||
.setResultsByDomain(2)
|
||||
.setResultsTotal(Math.min(100, count))
|
||||
.setTimeoutMs(150)
|
||||
.setFetchSize(8192)
|
||||
.build(),
|
||||
searchSet.name());
|
||||
}
|
||||
|
||||
|
@@ -11,7 +11,7 @@ import nu.marginalia.api.svc.RateLimiterService;
|
||||
import nu.marginalia.api.svc.ResponseCache;
|
||||
import nu.marginalia.model.gson.GsonFactory;
|
||||
import nu.marginalia.service.server.BaseServiceParams;
|
||||
import nu.marginalia.service.server.Service;
|
||||
import nu.marginalia.service.server.SparkService;
|
||||
import nu.marginalia.service.server.mq.MqRequest;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
@@ -21,7 +21,7 @@ import spark.Request;
|
||||
import spark.Response;
|
||||
import spark.Spark;
|
||||
|
||||
public class ApiService extends Service {
|
||||
public class ApiService extends SparkService {
|
||||
|
||||
private final Logger logger = LoggerFactory.getLogger(getClass());
|
||||
private final Gson gson = GsonFactory.get();
|
||||
@@ -69,7 +69,7 @@ public class ApiService extends Service {
|
||||
this.searchOperator = searchOperator;
|
||||
|
||||
Spark.get("/api/", (rq, rsp) -> {
|
||||
rsp.redirect("https://memex.marginalia.nu/projects/edge/api.gmi");
|
||||
rsp.redirect("https://about.marginalia-search.com/article/api/");
|
||||
return "";
|
||||
});
|
||||
|
||||
|
@@ -9,7 +9,7 @@ import nu.marginalia.renderer.MustacheRenderer;
|
||||
import nu.marginalia.renderer.RendererFactory;
|
||||
import nu.marginalia.screenshot.ScreenshotService;
|
||||
import nu.marginalia.service.server.BaseServiceParams;
|
||||
import nu.marginalia.service.server.Service;
|
||||
import nu.marginalia.service.server.SparkService;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import spark.Request;
|
||||
import spark.Response;
|
||||
@@ -18,7 +18,7 @@ import spark.Spark;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
|
||||
public class DatingService extends Service {
|
||||
public class DatingService extends SparkService {
|
||||
private final DomainBlacklist blacklist;
|
||||
private final DbBrowseDomainsSimilarCosine browseSimilarCosine;
|
||||
private final DbBrowseDomainsRandom browseRandom;
|
||||
|
@@ -5,7 +5,7 @@ import com.zaxxer.hikari.HikariDataSource;
|
||||
import nu.marginalia.renderer.MustacheRenderer;
|
||||
import nu.marginalia.renderer.RendererFactory;
|
||||
import nu.marginalia.service.server.BaseServiceParams;
|
||||
import nu.marginalia.service.server.Service;
|
||||
import nu.marginalia.service.server.SparkService;
|
||||
import nu.marginalia.service.server.StaticResources;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import spark.Request;
|
||||
@@ -15,7 +15,7 @@ import spark.Spark;
|
||||
import java.sql.SQLException;
|
||||
import java.util.*;
|
||||
|
||||
public class ExplorerService extends Service {
|
||||
public class ExplorerService extends SparkService {
|
||||
|
||||
private final MustacheRenderer<Object> renderer;
|
||||
private final HikariDataSource dataSource;
|
||||
|
94
code/services-application/search-service-legacy/build.gradle
Normal file
94
code/services-application/search-service-legacy/build.gradle
Normal file
@@ -0,0 +1,94 @@
|
||||
plugins {
|
||||
id 'java'
|
||||
id 'io.freefair.sass-base' version '8.4'
|
||||
id 'io.freefair.sass-java' version '8.4'
|
||||
id 'application'
|
||||
id 'jvm-test-suite'
|
||||
|
||||
id 'com.google.cloud.tools.jib' version '3.4.3'
|
||||
}
|
||||
|
||||
application {
|
||||
mainClass = 'nu.marginalia.search.SearchMain'
|
||||
applicationName = 'search-service-legacy'
|
||||
}
|
||||
|
||||
tasks.distZip.enabled = false
|
||||
|
||||
|
||||
java {
|
||||
toolchain {
|
||||
languageVersion.set(JavaLanguageVersion.of(rootProject.ext.jvmVersion))
|
||||
}
|
||||
}
|
||||
sass {
|
||||
sourceMapEnabled = true
|
||||
sourceMapEmbed = true
|
||||
outputStyle = EXPANDED
|
||||
}
|
||||
|
||||
apply from: "$rootProject.projectDir/srcsets.gradle"
|
||||
apply from: "$rootProject.projectDir/docker.gradle"
|
||||
|
||||
dependencies {
|
||||
implementation project(':code:common:db')
|
||||
implementation project(':code:common:model')
|
||||
implementation project(':code:common:service')
|
||||
implementation project(':code:common:config')
|
||||
implementation project(':code:index:query')
|
||||
|
||||
implementation project(':code:libraries:easy-lsh')
|
||||
implementation project(':code:libraries:language-processing')
|
||||
implementation project(':code:libraries:braille-block-punch-cards')
|
||||
implementation project(':code:libraries:term-frequency-dict')
|
||||
|
||||
implementation project(':code:functions:live-capture:api')
|
||||
implementation project(':code:functions:math:api')
|
||||
implementation project(':code:functions:domain-info:api')
|
||||
implementation project(':code:functions:search-query:api')
|
||||
|
||||
|
||||
implementation project(':code:index:api')
|
||||
implementation project(':code:common:renderer')
|
||||
|
||||
implementation project(':code:features-search:screenshots')
|
||||
implementation project(':code:features-search:random-websites')
|
||||
|
||||
implementation libs.bundles.slf4j
|
||||
|
||||
implementation libs.roaringbitmap
|
||||
implementation libs.prometheus
|
||||
implementation libs.notnull
|
||||
implementation libs.guava
|
||||
implementation dependencies.create(libs.guice.get()) {
|
||||
exclude group: 'com.google.guava'
|
||||
}
|
||||
implementation libs.handlebars
|
||||
implementation dependencies.create(libs.spark.get()) {
|
||||
exclude group: 'org.eclipse.jetty'
|
||||
}
|
||||
implementation libs.bundles.jetty
|
||||
implementation libs.opencsv
|
||||
implementation libs.trove
|
||||
implementation libs.fastutil
|
||||
implementation libs.bundles.gson
|
||||
implementation libs.bundles.mariadb
|
||||
implementation libs.bundles.nlp
|
||||
|
||||
testImplementation libs.bundles.slf4j.test
|
||||
testImplementation libs.bundles.junit
|
||||
testImplementation libs.mockito
|
||||
|
||||
testImplementation platform('org.testcontainers:testcontainers-bom:1.17.4')
|
||||
testImplementation libs.commons.codec
|
||||
testImplementation 'org.testcontainers:mariadb:1.17.4'
|
||||
testImplementation 'org.testcontainers:junit-jupiter:1.17.4'
|
||||
testImplementation project(':code:libraries:test-helpers')
|
||||
}
|
||||
|
||||
tasks.register('paperDoll', Test) {
|
||||
useJUnitPlatform {
|
||||
includeTags "paperdoll"
|
||||
}
|
||||
jvmArgs = [ '-DrunPaperDoll=true', '--enable-preview' ]
|
||||
}
|
@@ -0,0 +1,47 @@
|
||||
package nu.marginalia.search;
|
||||
|
||||
import com.google.inject.Guice;
|
||||
import com.google.inject.Inject;
|
||||
import com.google.inject.Injector;
|
||||
import nu.marginalia.service.MainClass;
|
||||
import nu.marginalia.service.discovery.ServiceRegistryIf;
|
||||
import nu.marginalia.service.module.ServiceConfiguration;
|
||||
import nu.marginalia.service.module.ServiceDiscoveryModule;
|
||||
import nu.marginalia.service.ServiceId;
|
||||
import nu.marginalia.service.module.ServiceConfigurationModule;
|
||||
import nu.marginalia.service.module.DatabaseModule;
|
||||
import nu.marginalia.service.server.Initialization;
|
||||
import spark.Spark;
|
||||
|
||||
public class SearchMain extends MainClass {
|
||||
private final SearchService service;
|
||||
|
||||
@Inject
|
||||
public SearchMain(SearchService service) {
|
||||
this.service = service;
|
||||
}
|
||||
|
||||
public static void main(String... args) {
|
||||
|
||||
init(ServiceId.Search, args);
|
||||
|
||||
Spark.staticFileLocation("/static/search/");
|
||||
|
||||
Injector injector = Guice.createInjector(
|
||||
new SearchModule(),
|
||||
new ServiceConfigurationModule(ServiceId.Search),
|
||||
new ServiceDiscoveryModule(),
|
||||
new DatabaseModule(false)
|
||||
);
|
||||
|
||||
|
||||
// Orchestrate the boot order for the services
|
||||
var registry = injector.getInstance(ServiceRegistryIf.class);
|
||||
var configuration = injector.getInstance(ServiceConfiguration.class);
|
||||
orchestrateBoot(registry, configuration);
|
||||
|
||||
injector.getInstance(SearchMain.class);
|
||||
injector.getInstance(Initialization.class).setReady();
|
||||
|
||||
}
|
||||
}
|
@@ -0,0 +1,20 @@
|
||||
package nu.marginalia.search;
|
||||
|
||||
import com.google.inject.AbstractModule;
|
||||
import nu.marginalia.LanguageModels;
|
||||
import nu.marginalia.WebsiteUrl;
|
||||
import nu.marginalia.WmsaHome;
|
||||
import nu.marginalia.renderer.config.HandlebarsConfigurator;
|
||||
|
||||
public class SearchModule extends AbstractModule {
|
||||
|
||||
public void configure() {
|
||||
bind(HandlebarsConfigurator.class).to(SearchHandlebarsConfigurator.class);
|
||||
|
||||
bind(LanguageModels.class).toInstance(WmsaHome.getLanguageModels());
|
||||
|
||||
bind(WebsiteUrl.class).toInstance(new WebsiteUrl(
|
||||
System.getProperty("search.legacyWebsiteUrl", "https://old-search.marginalia.nu/")));
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,266 @@
|
||||
package nu.marginalia.search;
|
||||
|
||||
import com.google.inject.Inject;
|
||||
import com.google.inject.Singleton;
|
||||
import nu.marginalia.WebsiteUrl;
|
||||
import nu.marginalia.api.math.MathClient;
|
||||
import nu.marginalia.api.searchquery.QueryClient;
|
||||
import nu.marginalia.api.searchquery.RpcQueryLimits;
|
||||
import nu.marginalia.api.searchquery.model.query.QueryResponse;
|
||||
import nu.marginalia.api.searchquery.model.results.DecoratedSearchResultItem;
|
||||
import nu.marginalia.bbpc.BrailleBlockPunchCards;
|
||||
import nu.marginalia.db.DbDomainQueries;
|
||||
import nu.marginalia.model.EdgeDomain;
|
||||
import nu.marginalia.model.EdgeUrl;
|
||||
import nu.marginalia.model.crawl.DomainIndexingState;
|
||||
import nu.marginalia.search.command.SearchParameters;
|
||||
import nu.marginalia.search.model.ClusteredUrlDetails;
|
||||
import nu.marginalia.search.model.DecoratedSearchResults;
|
||||
import nu.marginalia.search.model.SearchFilters;
|
||||
import nu.marginalia.search.model.UrlDetails;
|
||||
import nu.marginalia.search.results.UrlDeduplicator;
|
||||
import nu.marginalia.search.svc.SearchQueryCountService;
|
||||
import nu.marginalia.search.svc.SearchUnitConversionService;
|
||||
import org.apache.logging.log4j.util.Strings;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.slf4j.Marker;
|
||||
import org.slf4j.MarkerFactory;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
import java.time.Duration;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.IntStream;
|
||||
|
||||
@Singleton
|
||||
public class SearchOperator {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(SearchOperator.class);
|
||||
|
||||
// Marker for filtering out sensitive content from the persistent logs
|
||||
private final Marker queryMarker = MarkerFactory.getMarker("QUERY");
|
||||
|
||||
private final MathClient mathClient;
|
||||
private final DbDomainQueries domainQueries;
|
||||
private final QueryClient queryClient;
|
||||
private final SearchQueryParamFactory paramFactory;
|
||||
private final WebsiteUrl websiteUrl;
|
||||
private final SearchUnitConversionService searchUnitConversionService;
|
||||
private final SearchQueryCountService searchVisitorCount;
|
||||
|
||||
|
||||
@Inject
|
||||
public SearchOperator(MathClient mathClient,
|
||||
DbDomainQueries domainQueries,
|
||||
QueryClient queryClient,
|
||||
SearchQueryParamFactory paramFactory,
|
||||
WebsiteUrl websiteUrl,
|
||||
SearchUnitConversionService searchUnitConversionService,
|
||||
SearchQueryCountService searchVisitorCount
|
||||
)
|
||||
{
|
||||
|
||||
this.mathClient = mathClient;
|
||||
this.domainQueries = domainQueries;
|
||||
this.queryClient = queryClient;
|
||||
this.paramFactory = paramFactory;
|
||||
this.websiteUrl = websiteUrl;
|
||||
this.searchUnitConversionService = searchUnitConversionService;
|
||||
this.searchVisitorCount = searchVisitorCount;
|
||||
}
|
||||
|
||||
public List<UrlDetails> doSiteSearch(String domain,
|
||||
int domainId,
|
||||
int count) {
|
||||
|
||||
var queryParams = paramFactory.forSiteSearch(domain, domainId, count);
|
||||
var queryResponse = queryClient.search(queryParams);
|
||||
|
||||
return getResultsFromQuery(queryResponse);
|
||||
}
|
||||
|
||||
public List<UrlDetails> doBacklinkSearch(String domain) {
|
||||
|
||||
var queryParams = paramFactory.forBacklinkSearch(domain);
|
||||
var queryResponse = queryClient.search(queryParams);
|
||||
|
||||
return getResultsFromQuery(queryResponse);
|
||||
}
|
||||
|
||||
public List<UrlDetails> doLinkSearch(String source, String dest) {
|
||||
var queryParams = paramFactory.forLinkSearch(source, dest);
|
||||
var queryResponse = queryClient.search(queryParams);
|
||||
|
||||
return getResultsFromQuery(queryResponse);
|
||||
}
|
||||
|
||||
public DecoratedSearchResults doSearch(SearchParameters userParams) throws InterruptedException {
|
||||
// The full user-facing search query does additional work to try to evaluate the query
|
||||
// e.g. as a unit conversion query. This is done in parallel with the regular search.
|
||||
|
||||
Future<String> eval = searchUnitConversionService.tryEval(userParams.query());
|
||||
|
||||
// Perform the regular search
|
||||
|
||||
var queryParams = paramFactory.forRegularSearch(userParams);
|
||||
QueryResponse queryResponse = queryClient.search(queryParams);
|
||||
var queryResults = getResultsFromQuery(queryResponse);
|
||||
|
||||
// Cluster the results based on the query response
|
||||
List<ClusteredUrlDetails> clusteredResults = SearchResultClusterer
|
||||
.selectStrategy(queryResponse)
|
||||
.clusterResults(queryResults, 25);
|
||||
|
||||
// Log the query and results
|
||||
|
||||
logger.info(queryMarker, "Human terms: {}", Strings.join(queryResponse.searchTermsHuman(), ','));
|
||||
logger.info(queryMarker, "Search Result Count: {}", queryResults.size());
|
||||
|
||||
// Get the evaluation result and other data to return to the user
|
||||
String evalResult = getFutureOrDefault(eval, "");
|
||||
|
||||
String focusDomain = queryResponse.domain();
|
||||
int focusDomainId = focusDomain == null
|
||||
? -1
|
||||
: domainQueries.tryGetDomainId(new EdgeDomain(focusDomain)).orElse(-1);
|
||||
|
||||
List<String> problems = getProblems(evalResult, queryResults, queryResponse);
|
||||
|
||||
List<DecoratedSearchResults.Page> resultPages = IntStream.rangeClosed(1, queryResponse.totalPages())
|
||||
.mapToObj(number -> new DecoratedSearchResults.Page(
|
||||
number,
|
||||
number == userParams.page(),
|
||||
userParams.withPage(number).renderUrl(websiteUrl)
|
||||
))
|
||||
.toList();
|
||||
|
||||
// Return the results to the user
|
||||
return DecoratedSearchResults.builder()
|
||||
.params(userParams)
|
||||
.problems(problems)
|
||||
.evalResult(evalResult)
|
||||
.results(clusteredResults)
|
||||
.filters(new SearchFilters(websiteUrl, userParams))
|
||||
.focusDomain(focusDomain)
|
||||
.focusDomainId(focusDomainId)
|
||||
.resultPages(resultPages)
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
public List<UrlDetails> getResultsFromQuery(QueryResponse queryResponse) {
|
||||
final RpcQueryLimits limits = queryResponse.specs().queryLimits;
|
||||
final UrlDeduplicator deduplicator = new UrlDeduplicator(limits.getResultsByDomain());
|
||||
|
||||
// Update the query count (this is what you see on the front page)
|
||||
searchVisitorCount.registerQuery();
|
||||
|
||||
return queryResponse.results().stream()
|
||||
.filter(deduplicator::shouldRetain)
|
||||
.limit(limits.getResultsTotal())
|
||||
.map(SearchOperator::createDetails)
|
||||
.toList();
|
||||
}
|
||||
|
||||
private static UrlDetails createDetails(DecoratedSearchResultItem item) {
|
||||
return new UrlDetails(
|
||||
item.documentId(),
|
||||
item.domainId(),
|
||||
cleanUrl(item.url),
|
||||
item.title,
|
||||
item.description,
|
||||
item.format,
|
||||
item.features,
|
||||
DomainIndexingState.ACTIVE,
|
||||
item.rankingScore, // termScore
|
||||
item.resultsFromDomain,
|
||||
BrailleBlockPunchCards.printBits(item.bestPositions, 64),
|
||||
Long.bitCount(item.bestPositions),
|
||||
item.rawIndexResult,
|
||||
item.rawIndexResult.keywordScores
|
||||
);
|
||||
}
|
||||
|
||||
/** Replace nuisance domains with replacements where available */
|
||||
private static EdgeUrl cleanUrl(EdgeUrl url) {
|
||||
String topdomain = url.domain.topDomain;
|
||||
String subdomain = url.domain.subDomain;
|
||||
String path = url.path;
|
||||
|
||||
if (topdomain.equals("fandom.com")) {
|
||||
int wikiIndex = path.indexOf("/wiki/");
|
||||
if (wikiIndex >= 0) {
|
||||
return new EdgeUrl("https", new EdgeDomain("breezewiki.com"), null, "/" + subdomain + path.substring(wikiIndex), null);
|
||||
}
|
||||
}
|
||||
else if (topdomain.equals("medium.com")) {
|
||||
if (!subdomain.isBlank()) {
|
||||
return new EdgeUrl("https", new EdgeDomain("scribe.rip"), null, path, null);
|
||||
}
|
||||
else {
|
||||
String article = path.substring(path.indexOf("/", 1));
|
||||
return new EdgeUrl("https", new EdgeDomain("scribe.rip"), null, article, null);
|
||||
}
|
||||
|
||||
}
|
||||
return url;
|
||||
}
|
||||
|
||||
private List<String> getProblems(String evalResult, List<UrlDetails> queryResults, QueryResponse response) throws InterruptedException {
|
||||
|
||||
// We don't debug the query if it's a site search
|
||||
if (response.domain() == null)
|
||||
return List.of();
|
||||
|
||||
final List<String> problems = new ArrayList<>(response.problems());
|
||||
|
||||
if (queryResults.size() <= 5 && null == evalResult) {
|
||||
problems.add("Try rephrasing the query, changing the word order or using synonyms to get different results.");
|
||||
|
||||
// Try to spell check the search terms
|
||||
var suggestions = getFutureOrDefault(
|
||||
mathClient.spellCheck(response.searchTermsHuman()),
|
||||
Map.of()
|
||||
);
|
||||
|
||||
suggestions.forEach((term, suggestion) -> {
|
||||
if (suggestion.size() > 1) {
|
||||
String suggestionsStr = "\"%s\" could be spelled %s".formatted(term, suggestion.stream().map(s -> "\"" + s + "\"").collect(Collectors.joining(", ")));
|
||||
problems.add(suggestionsStr);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
Set<String> representativeKeywords = response.getAllKeywords();
|
||||
if (representativeKeywords.size() > 1 && (representativeKeywords.contains("definition") || representativeKeywords.contains("define") || representativeKeywords.contains("meaning")))
|
||||
{
|
||||
problems.add("Tip: Try using a query that looks like <tt>define:word</tt> if you want a dictionary definition");
|
||||
}
|
||||
|
||||
return problems;
|
||||
}
|
||||
|
||||
private <T> T getFutureOrDefault(@Nullable Future<T> fut, T defaultValue) {
|
||||
return getFutureOrDefault(fut, Duration.ofMillis(50), defaultValue);
|
||||
}
|
||||
|
||||
private <T> T getFutureOrDefault(@Nullable Future<T> fut, Duration timeout, T defaultValue) {
|
||||
if (fut == null || fut.isCancelled()) {
|
||||
return defaultValue;
|
||||
}
|
||||
try {
|
||||
return fut.get(timeout.toMillis(), TimeUnit.MILLISECONDS);
|
||||
}
|
||||
catch (Exception ex) {
|
||||
logger.warn("Error fetching eval result", ex);
|
||||
return defaultValue;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,124 @@
|
||||
package nu.marginalia.search;
|
||||
|
||||
import nu.marginalia.api.searchquery.RpcQueryLimits;
|
||||
import nu.marginalia.api.searchquery.RpcTemporalBias;
|
||||
import nu.marginalia.api.searchquery.model.query.QueryParams;
|
||||
import nu.marginalia.api.searchquery.model.query.SearchQuery;
|
||||
import nu.marginalia.api.searchquery.model.query.SearchSetIdentifier;
|
||||
import nu.marginalia.index.query.limit.QueryStrategy;
|
||||
import nu.marginalia.index.query.limit.SpecificationLimit;
|
||||
import nu.marginalia.search.command.SearchParameters;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public class SearchQueryParamFactory {
|
||||
static final RpcQueryLimits defaultLimits = RpcQueryLimits.newBuilder()
|
||||
.setResultsTotal(100)
|
||||
.setResultsByDomain(5)
|
||||
.setTimeoutMs(200)
|
||||
.setFetchSize(8192)
|
||||
.build();
|
||||
|
||||
|
||||
static final RpcQueryLimits shallowLimit = RpcQueryLimits.newBuilder()
|
||||
.setResultsTotal(100)
|
||||
.setResultsByDomain(100)
|
||||
.setTimeoutMs(100)
|
||||
.setFetchSize(512)
|
||||
.build();
|
||||
|
||||
|
||||
public QueryParams forRegularSearch(SearchParameters userParams) {
|
||||
SearchQuery prototype = new SearchQuery();
|
||||
var profile = userParams.profile();
|
||||
|
||||
profile.addTacitTerms(prototype);
|
||||
userParams.js().addTacitTerms(prototype);
|
||||
userParams.adtech().addTacitTerms(prototype);
|
||||
|
||||
return new QueryParams(
|
||||
userParams.query(),
|
||||
null,
|
||||
prototype.searchTermsInclude,
|
||||
prototype.searchTermsExclude,
|
||||
prototype.searchTermsPriority,
|
||||
prototype.searchTermsAdvice,
|
||||
profile.getQualityLimit(),
|
||||
profile.getYearLimit(),
|
||||
profile.getSizeLimit(),
|
||||
SpecificationLimit.none(),
|
||||
List.of(),
|
||||
defaultLimits,
|
||||
profile.searchSetIdentifier.name(),
|
||||
userParams.strategy(),
|
||||
userParams.temporalBias(),
|
||||
userParams.page()
|
||||
);
|
||||
|
||||
}
|
||||
|
||||
public QueryParams forSiteSearch(String domain, int domainId, int count) {
|
||||
return new QueryParams("site:"+domain,
|
||||
null,
|
||||
List.of(),
|
||||
List.of(),
|
||||
List.of(),
|
||||
List.of(),
|
||||
SpecificationLimit.none(),
|
||||
SpecificationLimit.none(),
|
||||
SpecificationLimit.none(),
|
||||
SpecificationLimit.none(),
|
||||
List.of(domainId),
|
||||
RpcQueryLimits.newBuilder()
|
||||
.setResultsTotal(count)
|
||||
.setResultsByDomain(count)
|
||||
.setTimeoutMs(100)
|
||||
.setFetchSize(512)
|
||||
.build(),
|
||||
SearchSetIdentifier.NONE.name(),
|
||||
QueryStrategy.AUTO,
|
||||
RpcTemporalBias.Bias.NONE,
|
||||
1
|
||||
);
|
||||
}
|
||||
|
||||
public QueryParams forBacklinkSearch(String domain) {
|
||||
return new QueryParams("links:"+domain,
|
||||
null,
|
||||
List.of(),
|
||||
List.of(),
|
||||
List.of(),
|
||||
List.of(),
|
||||
SpecificationLimit.none(),
|
||||
SpecificationLimit.none(),
|
||||
SpecificationLimit.none(),
|
||||
SpecificationLimit.none(),
|
||||
List.of(),
|
||||
shallowLimit,
|
||||
SearchSetIdentifier.NONE.name(),
|
||||
QueryStrategy.AUTO,
|
||||
RpcTemporalBias.Bias.NONE,
|
||||
1
|
||||
);
|
||||
}
|
||||
|
||||
public QueryParams forLinkSearch(String sourceDomain, String destDomain) {
|
||||
return new QueryParams("site:" + sourceDomain + " links:" + destDomain,
|
||||
null,
|
||||
List.of(),
|
||||
List.of(),
|
||||
List.of(),
|
||||
List.of(),
|
||||
SpecificationLimit.none(),
|
||||
SpecificationLimit.none(),
|
||||
SpecificationLimit.none(),
|
||||
SpecificationLimit.none(),
|
||||
List.of(),
|
||||
shallowLimit,
|
||||
SearchSetIdentifier.NONE.name(),
|
||||
QueryStrategy.AUTO,
|
||||
RpcTemporalBias.Bias.NONE,
|
||||
1
|
||||
);
|
||||
}
|
||||
}
|
@@ -0,0 +1,53 @@
|
||||
package nu.marginalia.search;
|
||||
|
||||
import nu.marginalia.api.searchquery.model.query.QueryResponse;
|
||||
import nu.marginalia.search.model.ClusteredUrlDetails;
|
||||
import nu.marginalia.search.model.UrlDetails;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/** Functions for clustering search results */
|
||||
public class SearchResultClusterer {
|
||||
private SearchResultClusterer() {}
|
||||
|
||||
public interface SearchResultClusterStrategy {
|
||||
List<ClusteredUrlDetails> clusterResults(List<UrlDetails> results, int total);
|
||||
}
|
||||
|
||||
public static SearchResultClusterStrategy selectStrategy(QueryResponse response) {
|
||||
if (response.domain() != null && !response.domain().isBlank())
|
||||
return SearchResultClusterer::noOp;
|
||||
|
||||
return SearchResultClusterer::byDomain;
|
||||
}
|
||||
|
||||
/** No clustering, just return the results as is */
|
||||
private static List<ClusteredUrlDetails> noOp(List<UrlDetails> results, int total) {
|
||||
if (results.isEmpty())
|
||||
return List.of();
|
||||
|
||||
return results.stream()
|
||||
.map(ClusteredUrlDetails::new)
|
||||
.toList();
|
||||
}
|
||||
|
||||
/** Cluster the results by domain, and return the top "total" clusters
|
||||
* sorted by the relevance of the best result
|
||||
*/
|
||||
private static List<ClusteredUrlDetails> byDomain(List<UrlDetails> results, int total) {
|
||||
if (results.isEmpty())
|
||||
return List.of();
|
||||
|
||||
return results.stream()
|
||||
.collect(
|
||||
Collectors.groupingBy(details -> details.domainId)
|
||||
)
|
||||
.values().stream()
|
||||
.map(ClusteredUrlDetails::new)
|
||||
.sorted()
|
||||
.limit(total)
|
||||
.toList();
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,128 @@
|
||||
package nu.marginalia.search;
|
||||
|
||||
import com.google.inject.Inject;
|
||||
import io.prometheus.client.Counter;
|
||||
import io.prometheus.client.Histogram;
|
||||
import nu.marginalia.WebsiteUrl;
|
||||
import nu.marginalia.search.svc.*;
|
||||
import nu.marginalia.service.server.BaseServiceParams;
|
||||
import nu.marginalia.service.server.SparkService;
|
||||
import nu.marginalia.service.server.StaticResources;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import spark.Request;
|
||||
import spark.Response;
|
||||
import spark.Route;
|
||||
import spark.Spark;
|
||||
|
||||
import java.net.URLEncoder;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
public class SearchService extends SparkService {
|
||||
|
||||
private final WebsiteUrl websiteUrl;
|
||||
private final StaticResources staticResources;
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(SearchService.class);
|
||||
private static final Histogram wmsa_search_service_request_time = Histogram.build()
|
||||
.name("wmsa_search_service_request_time")
|
||||
.linearBuckets(0.05, 0.05, 15)
|
||||
.labelNames("matchedPath", "method")
|
||||
.help("Search service request time (seconds)")
|
||||
.register();
|
||||
private static final Counter wmsa_search_service_error_count = Counter.build()
|
||||
.name("wmsa_search_service_error_count")
|
||||
.labelNames("matchedPath", "method")
|
||||
.help("Search service error count")
|
||||
.register();
|
||||
|
||||
@Inject
|
||||
public SearchService(BaseServiceParams params,
|
||||
WebsiteUrl websiteUrl,
|
||||
StaticResources staticResources,
|
||||
SearchFrontPageService frontPageService,
|
||||
SearchErrorPageService errorPageService,
|
||||
SearchAddToCrawlQueueService addToCrawlQueueService,
|
||||
SearchSiteInfoService siteInfoService,
|
||||
SearchCrosstalkService crosstalkService,
|
||||
SearchQueryService searchQueryService)
|
||||
throws Exception
|
||||
{
|
||||
super(params);
|
||||
|
||||
this.websiteUrl = websiteUrl;
|
||||
this.staticResources = staticResources;
|
||||
|
||||
Spark.staticFiles.expireTime(600);
|
||||
|
||||
SearchServiceMetrics.get("/search", searchQueryService::pathSearch);
|
||||
|
||||
SearchServiceMetrics.get("/", frontPageService::render);
|
||||
SearchServiceMetrics.get("/news.xml", frontPageService::renderNewsFeed);
|
||||
SearchServiceMetrics.get("/:resource", this::serveStatic);
|
||||
|
||||
SearchServiceMetrics.post("/site/suggest/", addToCrawlQueueService::suggestCrawling);
|
||||
|
||||
SearchServiceMetrics.get("/site-search/:site/*", this::siteSearchRedir);
|
||||
|
||||
SearchServiceMetrics.get("/site/:site", siteInfoService::handle);
|
||||
SearchServiceMetrics.post("/site/:site", siteInfoService::handlePost);
|
||||
|
||||
SearchServiceMetrics.get("/crosstalk/", crosstalkService::handle);
|
||||
|
||||
Spark.exception(Exception.class, (e,p,q) -> {
|
||||
logger.error("Error during processing", e);
|
||||
wmsa_search_service_error_count.labels(p.pathInfo(), p.requestMethod()).inc();
|
||||
errorPageService.serveError(p, q);
|
||||
});
|
||||
|
||||
Spark.awaitInitialization();
|
||||
}
|
||||
|
||||
|
||||
|
||||
/** Wraps a route with a timer and a counter */
|
||||
private static class SearchServiceMetrics implements Route {
|
||||
private final Route delegatedRoute;
|
||||
|
||||
static void get(String path, Route route) {
|
||||
Spark.get(path, new SearchServiceMetrics(route));
|
||||
}
|
||||
static void post(String path, Route route) {
|
||||
Spark.post(path, new SearchServiceMetrics(route));
|
||||
}
|
||||
|
||||
private SearchServiceMetrics(Route delegatedRoute) {
|
||||
this.delegatedRoute = delegatedRoute;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object handle(Request request, Response response) throws Exception {
|
||||
return wmsa_search_service_request_time
|
||||
.labels(request.matchedPath(), request.requestMethod())
|
||||
.time(() -> delegatedRoute.handle(request, response));
|
||||
}
|
||||
}
|
||||
|
||||
private Object serveStatic(Request request, Response response) {
|
||||
String resource = request.params("resource");
|
||||
staticResources.serveStatic("search", resource, request, response);
|
||||
return "";
|
||||
}
|
||||
|
||||
private Object siteSearchRedir(Request request, Response response) {
|
||||
final String site = request.params("site");
|
||||
final String searchTerms;
|
||||
|
||||
if (request.splat().length == 0) searchTerms = "";
|
||||
else searchTerms = request.splat()[0];
|
||||
|
||||
final String query = URLEncoder.encode(String.format("%s site:%s", searchTerms, site), StandardCharsets.UTF_8).trim();
|
||||
final String profile = request.queryParamOrDefault("profile", "yolo");
|
||||
|
||||
response.redirect(websiteUrl.withPath("search?query="+query+"&profile="+profile));
|
||||
|
||||
return "";
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,43 @@
|
||||
package nu.marginalia.search.command;
|
||||
|
||||
import com.google.inject.Inject;
|
||||
import nu.marginalia.search.command.commands.*;
|
||||
import spark.Response;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
public class CommandEvaluator {
|
||||
|
||||
private final List<SearchCommandInterface> specialCommands = new ArrayList<>();
|
||||
private final SearchCommand defaultCommand;
|
||||
|
||||
@Inject
|
||||
public CommandEvaluator(
|
||||
BrowseCommand browse,
|
||||
ConvertCommand convert,
|
||||
DefinitionCommand define,
|
||||
BangCommand bang,
|
||||
SiteRedirectCommand siteRedirect,
|
||||
SearchCommand search
|
||||
) {
|
||||
specialCommands.add(browse);
|
||||
specialCommands.add(convert);
|
||||
specialCommands.add(define);
|
||||
specialCommands.add(bang);
|
||||
specialCommands.add(siteRedirect);
|
||||
|
||||
defaultCommand = search;
|
||||
}
|
||||
|
||||
public Object eval(Response response, SearchParameters parameters) {
|
||||
for (var cmd : specialCommands) {
|
||||
var maybe = cmd.process(response, parameters);
|
||||
if (maybe.isPresent())
|
||||
return maybe.get();
|
||||
}
|
||||
|
||||
return defaultCommand.process(response, parameters).orElse("");
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,29 @@
|
||||
package nu.marginalia.search.command;
|
||||
|
||||
import nu.marginalia.api.searchquery.model.query.SearchQuery;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
import java.util.Arrays;
|
||||
|
||||
public enum SearchAdtechParameter {
|
||||
DEFAULT("default"),
|
||||
REDUCE("reduce", "special:ads", "special:affiliate");
|
||||
|
||||
public final String value;
|
||||
public final String[] implictExcludeSearchTerms;
|
||||
|
||||
SearchAdtechParameter(String value, String... implictExcludeSearchTerms) {
|
||||
this.value = value;
|
||||
this.implictExcludeSearchTerms = implictExcludeSearchTerms;
|
||||
}
|
||||
|
||||
public static SearchAdtechParameter parse(@Nullable String value) {
|
||||
if (REDUCE.value.equals(value)) return REDUCE;
|
||||
|
||||
return DEFAULT;
|
||||
}
|
||||
|
||||
public void addTacitTerms(SearchQuery subquery) {
|
||||
subquery.searchTermsExclude.addAll(Arrays.asList(implictExcludeSearchTerms));
|
||||
}
|
||||
}
|
@@ -0,0 +1,10 @@
|
||||
package nu.marginalia.search.command;
|
||||
|
||||
|
||||
import spark.Response;
|
||||
|
||||
import java.util.Optional;
|
||||
|
||||
public interface SearchCommandInterface {
|
||||
Optional<Object> process(Response response, SearchParameters parameters);
|
||||
}
|
@@ -0,0 +1,31 @@
|
||||
package nu.marginalia.search.command;
|
||||
|
||||
import nu.marginalia.api.searchquery.model.query.SearchQuery;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
import java.util.Arrays;
|
||||
|
||||
public enum SearchJsParameter {
|
||||
DEFAULT("default"),
|
||||
DENY_JS("no-js", "js:true"),
|
||||
REQUIRE_JS("yes-js", "js:false");
|
||||
|
||||
public final String value;
|
||||
public final String[] implictExcludeSearchTerms;
|
||||
|
||||
SearchJsParameter(String value, String... implictExcludeSearchTerms) {
|
||||
this.value = value;
|
||||
this.implictExcludeSearchTerms = implictExcludeSearchTerms;
|
||||
}
|
||||
|
||||
public static SearchJsParameter parse(@Nullable String value) {
|
||||
if (DENY_JS.value.equals(value)) return DENY_JS;
|
||||
if (REQUIRE_JS.value.equals(value)) return REQUIRE_JS;
|
||||
|
||||
return DEFAULT;
|
||||
}
|
||||
|
||||
public void addTacitTerms(SearchQuery subquery) {
|
||||
subquery.searchTermsExclude.addAll(Arrays.asList(implictExcludeSearchTerms));
|
||||
}
|
||||
}
|
@@ -0,0 +1,106 @@
|
||||
package nu.marginalia.search.command;
|
||||
|
||||
import nu.marginalia.WebsiteUrl;
|
||||
import nu.marginalia.api.searchquery.RpcTemporalBias;
|
||||
import nu.marginalia.index.query.limit.QueryStrategy;
|
||||
import nu.marginalia.index.query.limit.SpecificationLimit;
|
||||
import nu.marginalia.search.model.SearchProfile;
|
||||
import spark.Request;
|
||||
|
||||
import java.net.URLEncoder;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Objects;
|
||||
|
||||
import static nu.marginalia.search.command.SearchRecentParameter.RECENT;
|
||||
|
||||
public record SearchParameters(String query,
|
||||
SearchProfile profile,
|
||||
SearchJsParameter js,
|
||||
SearchRecentParameter recent,
|
||||
SearchTitleParameter searchTitle,
|
||||
SearchAdtechParameter adtech,
|
||||
boolean newFilter,
|
||||
int page
|
||||
) {
|
||||
|
||||
public SearchParameters(String queryString, Request request) {
|
||||
this(
|
||||
queryString,
|
||||
SearchProfile.getSearchProfile(request.queryParams("profile")),
|
||||
SearchJsParameter.parse(request.queryParams("js")),
|
||||
SearchRecentParameter.parse(request.queryParams("recent")),
|
||||
SearchTitleParameter.parse(request.queryParams("searchTitle")),
|
||||
SearchAdtechParameter.parse(request.queryParams("adtech")),
|
||||
"true".equals(request.queryParams("newfilter")),
|
||||
Integer.parseInt(Objects.requireNonNullElse(request.queryParams("page"), "1"))
|
||||
);
|
||||
}
|
||||
|
||||
public String profileStr() {
|
||||
return profile.filterId;
|
||||
}
|
||||
|
||||
public SearchParameters withProfile(SearchProfile profile) {
|
||||
return new SearchParameters(query, profile, js, recent, searchTitle, adtech, true, page);
|
||||
}
|
||||
|
||||
public SearchParameters withJs(SearchJsParameter js) {
|
||||
return new SearchParameters(query, profile, js, recent, searchTitle, adtech, true, page);
|
||||
}
|
||||
public SearchParameters withAdtech(SearchAdtechParameter adtech) {
|
||||
return new SearchParameters(query, profile, js, recent, searchTitle, adtech, true, page);
|
||||
}
|
||||
|
||||
public SearchParameters withRecent(SearchRecentParameter recent) {
|
||||
return new SearchParameters(query, profile, js, recent, searchTitle, adtech, true, page);
|
||||
}
|
||||
|
||||
public SearchParameters withTitle(SearchTitleParameter title) {
|
||||
return new SearchParameters(query, profile, js, recent, title, adtech, true, page);
|
||||
}
|
||||
|
||||
public SearchParameters withPage(int page) {
|
||||
return new SearchParameters(query, profile, js, recent, searchTitle, adtech, false, page);
|
||||
}
|
||||
|
||||
public String renderUrl(WebsiteUrl baseUrl) {
|
||||
String path = String.format("/search?query=%s&profile=%s&js=%s&adtech=%s&recent=%s&searchTitle=%s&newfilter=%s&page=%d",
|
||||
URLEncoder.encode(query, StandardCharsets.UTF_8),
|
||||
URLEncoder.encode(profile.filterId, StandardCharsets.UTF_8),
|
||||
URLEncoder.encode(js.value, StandardCharsets.UTF_8),
|
||||
URLEncoder.encode(adtech.value, StandardCharsets.UTF_8),
|
||||
URLEncoder.encode(recent.value, StandardCharsets.UTF_8),
|
||||
URLEncoder.encode(searchTitle.value, StandardCharsets.UTF_8),
|
||||
Boolean.valueOf(newFilter).toString(),
|
||||
page
|
||||
);
|
||||
|
||||
return baseUrl.withPath(path);
|
||||
}
|
||||
|
||||
public RpcTemporalBias.Bias temporalBias() {
|
||||
if (recent == RECENT) {
|
||||
return RpcTemporalBias.Bias.RECENT;
|
||||
}
|
||||
else if (profile == SearchProfile.VINTAGE) {
|
||||
return RpcTemporalBias.Bias.OLD;
|
||||
}
|
||||
|
||||
return RpcTemporalBias.Bias.NONE;
|
||||
}
|
||||
|
||||
public QueryStrategy strategy() {
|
||||
if (searchTitle == SearchTitleParameter.TITLE) {
|
||||
return QueryStrategy.REQUIRE_FIELD_TITLE;
|
||||
}
|
||||
|
||||
return QueryStrategy.AUTO;
|
||||
}
|
||||
|
||||
public SpecificationLimit yearLimit() {
|
||||
if (recent == RECENT)
|
||||
return SpecificationLimit.greaterThan(2018);
|
||||
|
||||
return profile.getYearLimit();
|
||||
}
|
||||
}
|
@@ -0,0 +1,21 @@
|
||||
package nu.marginalia.search.command;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
|
||||
public enum SearchRecentParameter {
|
||||
DEFAULT("default"),
|
||||
RECENT("recent");
|
||||
|
||||
public final String value;
|
||||
|
||||
SearchRecentParameter(String value) {
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
public static SearchRecentParameter parse(@Nullable String value) {
|
||||
if (RECENT.value.equals(value)) return RECENT;
|
||||
|
||||
return DEFAULT;
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,21 @@
|
||||
package nu.marginalia.search.command;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
|
||||
public enum SearchTitleParameter {
|
||||
DEFAULT("default"),
|
||||
TITLE("title");
|
||||
|
||||
public final String value;
|
||||
|
||||
SearchTitleParameter(String value) {
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
public static SearchTitleParameter parse(@Nullable String value) {
|
||||
if (TITLE.value.equals(value)) return TITLE;
|
||||
|
||||
return DEFAULT;
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,104 @@
|
||||
package nu.marginalia.search.command.commands;
|
||||
|
||||
import com.google.inject.Inject;
|
||||
import nu.marginalia.search.command.SearchCommandInterface;
|
||||
import nu.marginalia.search.command.SearchParameters;
|
||||
import nu.marginalia.search.exceptions.RedirectException;
|
||||
import spark.Response;
|
||||
|
||||
import java.net.URLEncoder;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
|
||||
public class BangCommand implements SearchCommandInterface {
|
||||
private final Map<String, String> bangsToPattern = new HashMap<>();
|
||||
|
||||
@Inject
|
||||
public BangCommand()
|
||||
{
|
||||
bangsToPattern.put("!g", "https://www.google.com/search?q=%s");
|
||||
bangsToPattern.put("!ddg", "https://duckduckgo.com/?q=%s");
|
||||
bangsToPattern.put("!w", "https://search.marginalia.nu/search?query=%s+site:en.wikipedia.org&profile=wiki");
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<Object> process(Response response, SearchParameters parameters) {
|
||||
|
||||
for (var entry : bangsToPattern.entrySet()) {
|
||||
String bangPattern = entry.getKey();
|
||||
String redirectPattern = entry.getValue();
|
||||
|
||||
var match = matchBangPattern(parameters.query(), bangPattern);
|
||||
|
||||
if (match.isPresent()) {
|
||||
var url = String.format(redirectPattern, URLEncoder.encode(match.get(), StandardCharsets.UTF_8));
|
||||
throw new RedirectException(url);
|
||||
}
|
||||
}
|
||||
|
||||
return Optional.empty();
|
||||
}
|
||||
|
||||
/** If the query contains the bang pattern bangKey, return the query with the bang pattern removed. */
|
||||
Optional<String> matchBangPattern(String query, String bangKey) {
|
||||
var bm = new BangMatcher(query);
|
||||
|
||||
while (bm.findNext(bangKey)) {
|
||||
|
||||
if (!bm.isRelativeSpaceOrInvalid(-1))
|
||||
continue;
|
||||
if (!bm.isRelativeSpaceOrInvalid(bangKey.length()))
|
||||
continue;
|
||||
|
||||
String prefix = bm.prefix().trim();
|
||||
String suffix = bm.suffix(bangKey.length()).trim();
|
||||
|
||||
String ret = (prefix + " " + suffix).trim();
|
||||
|
||||
return Optional.of(ret)
|
||||
.filter(s -> !s.isBlank());
|
||||
}
|
||||
|
||||
return Optional.empty();
|
||||
}
|
||||
|
||||
private static class BangMatcher {
|
||||
private final String str;
|
||||
private int pos;
|
||||
|
||||
public String prefix() {
|
||||
return str.substring(0, pos);
|
||||
}
|
||||
|
||||
public String suffix(int offset) {
|
||||
if (pos+offset < str.length())
|
||||
return str.substring(pos + offset);
|
||||
return "";
|
||||
}
|
||||
|
||||
public BangMatcher(String str) {
|
||||
this.str = str;
|
||||
this.pos = -1;
|
||||
}
|
||||
|
||||
public boolean findNext(String pattern) {
|
||||
if (pos + 1 >= str.length())
|
||||
return false;
|
||||
|
||||
return (pos = str.indexOf(pattern, pos + 1)) >= 0;
|
||||
}
|
||||
|
||||
public boolean isRelativeSpaceOrInvalid(int offset) {
|
||||
if (offset + pos < 0)
|
||||
return true;
|
||||
if (offset + pos >= str.length())
|
||||
return true;
|
||||
|
||||
return Character.isSpaceChar(str.charAt(offset + pos));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,36 @@
|
||||
package nu.marginalia.search.command.commands;
|
||||
|
||||
import com.google.inject.Inject;
|
||||
import nu.marginalia.renderer.MustacheRenderer;
|
||||
import nu.marginalia.renderer.RendererFactory;
|
||||
import nu.marginalia.search.command.SearchCommandInterface;
|
||||
import nu.marginalia.search.command.SearchParameters;
|
||||
import nu.marginalia.search.svc.SearchUnitConversionService;
|
||||
import spark.Response;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
|
||||
public class ConvertCommand implements SearchCommandInterface {
|
||||
private final SearchUnitConversionService searchUnitConversionService;
|
||||
private final MustacheRenderer<Map<String, String>> conversionRenderer;
|
||||
|
||||
@Inject
|
||||
public ConvertCommand(SearchUnitConversionService searchUnitConversionService, RendererFactory rendererFactory) throws IOException {
|
||||
this.searchUnitConversionService = searchUnitConversionService;
|
||||
|
||||
conversionRenderer = rendererFactory.renderer("search/conversion-results");
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<Object> process(Response response, SearchParameters parameters) {
|
||||
var conversion = searchUnitConversionService.tryConversion(parameters.query());
|
||||
return conversion.map(s -> conversionRenderer.render(Map.of(
|
||||
"query", parameters.query(),
|
||||
"result", s,
|
||||
"profile", parameters.profileStr())
|
||||
));
|
||||
|
||||
}
|
||||
}
|
@@ -0,0 +1,70 @@
|
||||
|
||||
package nu.marginalia.search.command.commands;
|
||||
|
||||
import com.google.inject.Inject;
|
||||
import nu.marginalia.api.math.MathClient;
|
||||
import nu.marginalia.api.math.model.DictionaryResponse;
|
||||
import nu.marginalia.renderer.MustacheRenderer;
|
||||
import nu.marginalia.search.command.SearchCommandInterface;
|
||||
import nu.marginalia.search.command.SearchParameters;
|
||||
import nu.marginalia.renderer.RendererFactory;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import spark.Response;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
public class DefinitionCommand implements SearchCommandInterface {
|
||||
private final Logger logger = LoggerFactory.getLogger(getClass());
|
||||
|
||||
private final MustacheRenderer<DictionaryResponse> dictionaryRenderer;
|
||||
private final MathClient mathClient;
|
||||
|
||||
|
||||
private final Predicate<String> queryPatternPredicate = Pattern.compile("^define:[A-Za-z\\s-0-9]+$").asPredicate();
|
||||
|
||||
@Inject
|
||||
public DefinitionCommand(RendererFactory rendererFactory, MathClient mathClient)
|
||||
throws IOException
|
||||
{
|
||||
|
||||
dictionaryRenderer = rendererFactory.renderer("search/dictionary-results");
|
||||
this.mathClient = mathClient;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<Object> process(Response response, SearchParameters parameters) {
|
||||
if (!queryPatternPredicate.test(parameters.query())) {
|
||||
return Optional.empty();
|
||||
}
|
||||
|
||||
var results = lookupDefinition(parameters.query());
|
||||
|
||||
return Optional.of(dictionaryRenderer.render(results,
|
||||
Map.of("query", parameters.query(),
|
||||
"profile", parameters.profileStr())
|
||||
));
|
||||
}
|
||||
|
||||
|
||||
private DictionaryResponse lookupDefinition(String humanQuery) {
|
||||
String definePrefix = "define:";
|
||||
String word = humanQuery.substring(definePrefix.length()).toLowerCase();
|
||||
|
||||
try {
|
||||
return mathClient
|
||||
.dictionaryLookup(word)
|
||||
.get(250, TimeUnit.MILLISECONDS);
|
||||
}
|
||||
catch (Exception e) {
|
||||
logger.error("Failed to lookup definition for word: " + word, e);
|
||||
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
}
|
@@ -0,0 +1,39 @@
|
||||
package nu.marginalia.search.command.commands;
|
||||
|
||||
import com.google.inject.Inject;
|
||||
import nu.marginalia.renderer.MustacheRenderer;
|
||||
import nu.marginalia.renderer.RendererFactory;
|
||||
import nu.marginalia.search.SearchOperator;
|
||||
import nu.marginalia.search.command.SearchCommandInterface;
|
||||
import nu.marginalia.search.command.SearchParameters;
|
||||
import nu.marginalia.search.model.DecoratedSearchResults;
|
||||
import spark.Response;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Optional;
|
||||
|
||||
public class SearchCommand implements SearchCommandInterface {
|
||||
private final SearchOperator searchOperator;
|
||||
private final MustacheRenderer<DecoratedSearchResults> searchResultsRenderer;
|
||||
|
||||
|
||||
@Inject
|
||||
public SearchCommand(SearchOperator searchOperator,
|
||||
RendererFactory rendererFactory) throws IOException {
|
||||
this.searchOperator = searchOperator;
|
||||
|
||||
searchResultsRenderer = rendererFactory.renderer("search/search-results");
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<Object> process(Response response, SearchParameters parameters) {
|
||||
try {
|
||||
DecoratedSearchResults results = searchOperator.doSearch(parameters);
|
||||
return Optional.of(searchResultsRenderer.render(results));
|
||||
}
|
||||
catch (InterruptedException ex) {
|
||||
Thread.currentThread().interrupt();
|
||||
return Optional.empty();
|
||||
}
|
||||
}
|
||||
}
|
@@ -0,0 +1,50 @@
|
||||
package nu.marginalia.search.command.commands;
|
||||
|
||||
import com.google.inject.Inject;
|
||||
import nu.marginalia.search.command.SearchCommandInterface;
|
||||
import nu.marginalia.search.command.SearchParameters;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import spark.Response;
|
||||
|
||||
import java.util.Optional;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
public class SiteRedirectCommand implements SearchCommandInterface {
|
||||
|
||||
private final Logger logger = LoggerFactory.getLogger(getClass());
|
||||
|
||||
private final Predicate<String> queryPatternPredicate = Pattern.compile("^(site|links):[.A-Za-z\\-0-9]+$").asPredicate();
|
||||
|
||||
@Inject
|
||||
public SiteRedirectCommand() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<Object> process(Response response, SearchParameters parameters) {
|
||||
if (!queryPatternPredicate.test(parameters.query())) {
|
||||
return Optional.empty();
|
||||
}
|
||||
|
||||
int idx = parameters.query().indexOf(':');
|
||||
String prefix = parameters.query().substring(0, idx);
|
||||
String domain = parameters.query().substring(idx + 1).toLowerCase();
|
||||
|
||||
// Use an HTML redirect here, so we can use relative URLs
|
||||
String view = switch (prefix) {
|
||||
case "links" -> "links";
|
||||
default -> "info";
|
||||
};
|
||||
|
||||
return Optional.of("""
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<meta charset="UTF-8">
|
||||
<title>Redirecting...</title>
|
||||
<meta http-equiv="refresh" content="0; url=/site/%s?view=%s">
|
||||
""".formatted(domain, view)
|
||||
);
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,66 @@
|
||||
package nu.marginalia.search.db;
|
||||
|
||||
import com.google.inject.Inject;
|
||||
import com.zaxxer.hikari.HikariDataSource;
|
||||
|
||||
import java.sql.ResultSet;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
public class DbNearDomainsQuery {
|
||||
|
||||
private final HikariDataSource dataSource;
|
||||
|
||||
@Inject
|
||||
public DbNearDomainsQuery(HikariDataSource dataSource) {
|
||||
this.dataSource = dataSource;
|
||||
}
|
||||
|
||||
public List<Integer> getRelatedDomains(String term, Consumer<String> onProblem) {
|
||||
List<Integer> ret = new ArrayList<>();
|
||||
try (var conn = dataSource.getConnection();
|
||||
|
||||
var selfStmt = conn.prepareStatement("""
|
||||
SELECT ID FROM EC_DOMAIN WHERE DOMAIN_NAME=?
|
||||
""");
|
||||
var stmt = conn.prepareStatement("""
|
||||
SELECT NEIGHBOR_ID, ND.INDEXED, ND.STATE FROM EC_DOMAIN_NEIGHBORS_2
|
||||
INNER JOIN EC_DOMAIN ND ON ND.ID=NEIGHBOR_ID
|
||||
WHERE DOMAIN_ID=?
|
||||
""")) {
|
||||
ResultSet rsp;
|
||||
selfStmt.setString(1, term);
|
||||
rsp = selfStmt.executeQuery();
|
||||
int domainId = -1;
|
||||
if (rsp.next()) {
|
||||
domainId = rsp.getInt(1);
|
||||
ret.add(domainId);
|
||||
}
|
||||
|
||||
stmt.setInt(1, domainId);
|
||||
rsp = stmt.executeQuery();
|
||||
|
||||
while (rsp.next()) {
|
||||
int id = rsp.getInt(1);
|
||||
int indexed = rsp.getInt(2);
|
||||
String state = rsp.getString(3);
|
||||
|
||||
if (indexed > 0 && ("ACTIVE".equalsIgnoreCase(state) || "SOCIAL_MEDIA".equalsIgnoreCase(state) || "SPECIAL".equalsIgnoreCase(state))) {
|
||||
ret.add(id);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
catch (Exception ex) {
|
||||
throw new RuntimeException(ex);
|
||||
}
|
||||
|
||||
if (ret.isEmpty()) {
|
||||
onProblem.accept("Could not find domains adjacent " + term);
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,102 @@
|
||||
package nu.marginalia.search.model;
|
||||
|
||||
import nu.marginalia.model.EdgeDomain;
|
||||
import nu.marginalia.model.idx.WordFlags;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
|
||||
import java.util.*;
|
||||
|
||||
/** A class to hold a list of UrlDetails, grouped by domain, where the first one is the main result
|
||||
* and the rest are additional results, for summary display. */
|
||||
public class ClusteredUrlDetails implements Comparable<ClusteredUrlDetails> {
|
||||
|
||||
@NotNull
|
||||
public final UrlDetails first;
|
||||
|
||||
@NotNull
|
||||
public final List<UrlDetails> rest;
|
||||
|
||||
/** Create a new ClusteredUrlDetails from a collection of UrlDetails,
|
||||
* with the best result as "first", and the others, in descending order
|
||||
* of quality as the "rest"...
|
||||
*
|
||||
* @param details A collection of UrlDetails, which must not be empty.
|
||||
*/
|
||||
public ClusteredUrlDetails(Collection<UrlDetails> details) {
|
||||
var items = new ArrayList<>(details);
|
||||
|
||||
items.sort(Comparator.naturalOrder());
|
||||
|
||||
if (items.isEmpty())
|
||||
throw new IllegalArgumentException("Empty list of details");
|
||||
|
||||
this.first = items.removeFirst();
|
||||
this.rest = items;
|
||||
|
||||
double bestScore = first.termScore;
|
||||
double scoreLimit = Math.min(4.0, bestScore * 1.25);
|
||||
|
||||
this.rest.removeIf(urlDetail -> {
|
||||
if (urlDetail.termScore > scoreLimit)
|
||||
return false;
|
||||
|
||||
for (var keywordScore : urlDetail.resultItem.keywordScores) {
|
||||
if (keywordScore.isKeywordSpecial())
|
||||
continue;
|
||||
if (keywordScore.hasTermFlag(WordFlags.Title))
|
||||
return false;
|
||||
if (keywordScore.hasTermFlag(WordFlags.ExternalLink))
|
||||
return false;
|
||||
if (keywordScore.hasTermFlag(WordFlags.UrlDomain))
|
||||
return false;
|
||||
if (keywordScore.hasTermFlag(WordFlags.UrlPath))
|
||||
return false;
|
||||
if (keywordScore.hasTermFlag(WordFlags.Subjects))
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
|
||||
public ClusteredUrlDetails(@NotNull UrlDetails onlyFirst) {
|
||||
this.first = onlyFirst;
|
||||
this.rest = Collections.emptyList();
|
||||
}
|
||||
|
||||
// For renderer use, do not remove
|
||||
public @NotNull UrlDetails getFirst() {
|
||||
return first;
|
||||
}
|
||||
|
||||
// For renderer use, do not remove
|
||||
public @NotNull List<UrlDetails> getRest() {
|
||||
return rest;
|
||||
}
|
||||
|
||||
|
||||
public EdgeDomain getDomain() {
|
||||
return first.url.getDomain();
|
||||
}
|
||||
|
||||
public boolean hasMultiple() {
|
||||
return !rest.isEmpty();
|
||||
}
|
||||
|
||||
/** Returns the total number of results from the same domain,
|
||||
* including such results that are not included here. */
|
||||
public int totalCount() {
|
||||
return first.resultsFromSameDomain;
|
||||
}
|
||||
|
||||
public int remainingCount() {
|
||||
return totalCount() - 1 - rest.size();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int compareTo(@NotNull ClusteredUrlDetails o) {
|
||||
return Objects.compare(first, o.first, UrlDetails::compareTo);
|
||||
}
|
||||
}
|
@@ -0,0 +1,186 @@
|
||||
package nu.marginalia.search.model;
|
||||
|
||||
import nu.marginalia.search.command.SearchParameters;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* A class to hold details about the search results,
|
||||
* as used by the handlebars templating engine to render
|
||||
* the search results page.
|
||||
*/
|
||||
public class DecoratedSearchResults {
|
||||
private final SearchParameters params;
|
||||
private final List<String> problems;
|
||||
private final String evalResult;
|
||||
|
||||
public DecoratedSearchResults(SearchParameters params,
|
||||
List<String> problems,
|
||||
String evalResult,
|
||||
List<ClusteredUrlDetails> results,
|
||||
String focusDomain,
|
||||
int focusDomainId,
|
||||
SearchFilters filters,
|
||||
List<Page> resultPages) {
|
||||
this.params = params;
|
||||
this.problems = problems;
|
||||
this.evalResult = evalResult;
|
||||
this.results = results;
|
||||
this.focusDomain = focusDomain;
|
||||
this.focusDomainId = focusDomainId;
|
||||
this.filters = filters;
|
||||
this.resultPages = resultPages;
|
||||
}
|
||||
|
||||
public final List<ClusteredUrlDetails> results;
|
||||
|
||||
public static DecoratedSearchResultsBuilder builder() {
|
||||
return new DecoratedSearchResultsBuilder();
|
||||
}
|
||||
|
||||
public SearchParameters getParams() {
|
||||
return params;
|
||||
}
|
||||
|
||||
public List<String> getProblems() {
|
||||
return problems;
|
||||
}
|
||||
|
||||
public String getEvalResult() {
|
||||
return evalResult;
|
||||
}
|
||||
|
||||
public List<ClusteredUrlDetails> getResults() {
|
||||
return results;
|
||||
}
|
||||
|
||||
public String getFocusDomain() {
|
||||
return focusDomain;
|
||||
}
|
||||
|
||||
public int getFocusDomainId() {
|
||||
return focusDomainId;
|
||||
}
|
||||
|
||||
public SearchFilters getFilters() {
|
||||
return filters;
|
||||
}
|
||||
|
||||
public List<Page> getResultPages() {
|
||||
return resultPages;
|
||||
}
|
||||
|
||||
private final String focusDomain;
|
||||
private final int focusDomainId;
|
||||
private final SearchFilters filters;
|
||||
|
||||
private final List<Page> resultPages;
|
||||
|
||||
public boolean isMultipage() {
|
||||
return resultPages.size() > 1;
|
||||
}
|
||||
|
||||
public record Page(int number, boolean current, String href) {
|
||||
}
|
||||
|
||||
// These are used by the search form, they look unused in the IDE but are used by the mustache template,
|
||||
// DO NOT REMOVE THEM
|
||||
public int getResultCount() {
|
||||
return results.size();
|
||||
}
|
||||
|
||||
public String getQuery() {
|
||||
return params.query();
|
||||
}
|
||||
|
||||
public String getProfile() {
|
||||
return params.profile().filterId;
|
||||
}
|
||||
|
||||
public String getJs() {
|
||||
return params.js().value;
|
||||
}
|
||||
|
||||
public String getAdtech() {
|
||||
return params.adtech().value;
|
||||
}
|
||||
|
||||
public String getRecent() {
|
||||
return params.recent().value;
|
||||
}
|
||||
|
||||
public String getSearchTitle() {
|
||||
return params.searchTitle().value;
|
||||
}
|
||||
|
||||
public int page() {
|
||||
return params.page();
|
||||
}
|
||||
|
||||
public Boolean isNewFilter() {
|
||||
return params.newFilter();
|
||||
}
|
||||
|
||||
|
||||
public static class DecoratedSearchResultsBuilder {
|
||||
private SearchParameters params;
|
||||
private List<String> problems;
|
||||
private String evalResult;
|
||||
private List<ClusteredUrlDetails> results;
|
||||
private String focusDomain;
|
||||
private int focusDomainId;
|
||||
private SearchFilters filters;
|
||||
private List<Page> resultPages;
|
||||
|
||||
DecoratedSearchResultsBuilder() {
|
||||
}
|
||||
|
||||
public DecoratedSearchResultsBuilder params(SearchParameters params) {
|
||||
this.params = params;
|
||||
return this;
|
||||
}
|
||||
|
||||
public DecoratedSearchResultsBuilder problems(List<String> problems) {
|
||||
this.problems = problems;
|
||||
return this;
|
||||
}
|
||||
|
||||
public DecoratedSearchResultsBuilder evalResult(String evalResult) {
|
||||
this.evalResult = evalResult;
|
||||
return this;
|
||||
}
|
||||
|
||||
public DecoratedSearchResultsBuilder results(List<ClusteredUrlDetails> results) {
|
||||
this.results = results;
|
||||
return this;
|
||||
}
|
||||
|
||||
public DecoratedSearchResultsBuilder focusDomain(String focusDomain) {
|
||||
this.focusDomain = focusDomain;
|
||||
return this;
|
||||
}
|
||||
|
||||
public DecoratedSearchResultsBuilder focusDomainId(int focusDomainId) {
|
||||
this.focusDomainId = focusDomainId;
|
||||
return this;
|
||||
}
|
||||
|
||||
public DecoratedSearchResultsBuilder filters(SearchFilters filters) {
|
||||
this.filters = filters;
|
||||
return this;
|
||||
}
|
||||
|
||||
public DecoratedSearchResultsBuilder resultPages(List<Page> resultPages) {
|
||||
this.resultPages = resultPages;
|
||||
return this;
|
||||
}
|
||||
|
||||
public DecoratedSearchResults build() {
|
||||
return new DecoratedSearchResults(this.params, this.problems, this.evalResult, this.results, this.focusDomain, this.focusDomainId, this.filters, this.resultPages);
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return "DecoratedSearchResults.DecoratedSearchResultsBuilder(params=" + this.params + ", problems=" + this.problems + ", evalResult=" + this.evalResult + ", results=" + this.results + ", focusDomain=" + this.focusDomain + ", focusDomainId=" + this.focusDomainId + ", filters=" + this.filters + ", resultPages=" + this.resultPages + ")";
|
||||
}
|
||||
}
|
||||
}
|
@@ -0,0 +1,223 @@
|
||||
package nu.marginalia.search.model;
|
||||
|
||||
import nu.marginalia.WebsiteUrl;
|
||||
import nu.marginalia.search.command.*;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/** Models the search filters displayed next to the search results */
|
||||
public class SearchFilters {
|
||||
private final WebsiteUrl url;
|
||||
|
||||
public final String currentFilter;
|
||||
|
||||
// These are necessary for the renderer to access the data
|
||||
public final RemoveJsOption removeJsOption;
|
||||
public final ReduceAdtechOption reduceAdtechOption;
|
||||
public final ShowRecentOption showRecentOption;
|
||||
public final SearchTitleOption searchTitleOption;
|
||||
|
||||
public final List<List<Filter>> filterGroups;
|
||||
|
||||
// Getters are for the renderer to access the data
|
||||
|
||||
|
||||
public String getCurrentFilter() {
|
||||
return currentFilter;
|
||||
}
|
||||
|
||||
public RemoveJsOption getRemoveJsOption() {
|
||||
return removeJsOption;
|
||||
}
|
||||
|
||||
public ReduceAdtechOption getReduceAdtechOption() {
|
||||
return reduceAdtechOption;
|
||||
}
|
||||
|
||||
public ShowRecentOption getShowRecentOption() {
|
||||
return showRecentOption;
|
||||
}
|
||||
|
||||
public SearchTitleOption getSearchTitleOption() {
|
||||
return searchTitleOption;
|
||||
}
|
||||
|
||||
public List<List<Filter>> getFilterGroups() {
|
||||
return filterGroups;
|
||||
}
|
||||
|
||||
public SearchFilters(WebsiteUrl url, SearchParameters parameters) {
|
||||
this.url = url;
|
||||
|
||||
removeJsOption = new RemoveJsOption(parameters);
|
||||
reduceAdtechOption = new ReduceAdtechOption(parameters);
|
||||
showRecentOption = new ShowRecentOption(parameters);
|
||||
searchTitleOption = new SearchTitleOption(parameters);
|
||||
|
||||
|
||||
currentFilter = parameters.profile().filterId;
|
||||
|
||||
filterGroups = List.of(
|
||||
List.of(
|
||||
new Filter("No Filter", SearchProfile.NO_FILTER, parameters),
|
||||
// new Filter("Popular", SearchProfile.POPULAR, parameters),
|
||||
new Filter("Small Web", SearchProfile.SMALLWEB, parameters),
|
||||
new Filter("Blogosphere", SearchProfile.BLOGOSPHERE, parameters),
|
||||
new Filter("Academia", SearchProfile.ACADEMIA, parameters)
|
||||
),
|
||||
List.of(
|
||||
new Filter("Vintage", SearchProfile.VINTAGE, parameters),
|
||||
new Filter("Plain Text", SearchProfile.PLAIN_TEXT, parameters),
|
||||
new Filter("~tilde", SearchProfile.TILDE, parameters)
|
||||
),
|
||||
List.of(
|
||||
new Filter("Wiki", SearchProfile.WIKI, parameters),
|
||||
new Filter("Forum", SearchProfile.FORUM, parameters),
|
||||
new Filter("Docs", SearchProfile.DOCS, parameters),
|
||||
new Filter("Recipes", SearchProfile.FOOD, parameters)
|
||||
)
|
||||
);
|
||||
|
||||
|
||||
}
|
||||
|
||||
public class RemoveJsOption {
|
||||
private final SearchJsParameter value;
|
||||
|
||||
public final String url;
|
||||
public String getUrl() {
|
||||
return url;
|
||||
}
|
||||
|
||||
public boolean isSet() {
|
||||
return value.equals(SearchJsParameter.DENY_JS);
|
||||
}
|
||||
|
||||
public String name() {
|
||||
return "Remove Javascript";
|
||||
}
|
||||
|
||||
public RemoveJsOption(SearchParameters parameters) {
|
||||
this.value = parameters.js();
|
||||
|
||||
var toggledValue = switch (parameters.js()) {
|
||||
case DENY_JS -> SearchJsParameter.DEFAULT;
|
||||
default -> SearchJsParameter.DENY_JS;
|
||||
};
|
||||
|
||||
this.url = parameters.withJs(toggledValue).renderUrl(SearchFilters.this.url);
|
||||
}
|
||||
}
|
||||
|
||||
public class ReduceAdtechOption {
|
||||
private final SearchAdtechParameter value;
|
||||
|
||||
public final String url;
|
||||
public String getUrl() {
|
||||
return url;
|
||||
}
|
||||
|
||||
public boolean isSet() {
|
||||
return value.equals(SearchAdtechParameter.REDUCE);
|
||||
}
|
||||
|
||||
public String name() {
|
||||
return "Reduce Adtech";
|
||||
}
|
||||
|
||||
public ReduceAdtechOption(SearchParameters parameters) {
|
||||
this.value = parameters.adtech();
|
||||
|
||||
var toggledValue = switch (parameters.adtech()) {
|
||||
case REDUCE -> SearchAdtechParameter.DEFAULT;
|
||||
default -> SearchAdtechParameter.REDUCE;
|
||||
};
|
||||
|
||||
this.url = parameters.withAdtech(toggledValue).renderUrl(SearchFilters.this.url);
|
||||
}
|
||||
}
|
||||
|
||||
public class ShowRecentOption {
|
||||
private final SearchRecentParameter value;
|
||||
|
||||
public final String url;
|
||||
public String getUrl() {
|
||||
return url;
|
||||
}
|
||||
|
||||
public boolean isSet() {
|
||||
return value.equals(SearchRecentParameter.RECENT);
|
||||
}
|
||||
|
||||
public String name() {
|
||||
return "Recent Results";
|
||||
}
|
||||
|
||||
public ShowRecentOption(SearchParameters parameters) {
|
||||
this.value = parameters.recent();
|
||||
|
||||
var toggledValue = switch (parameters.recent()) {
|
||||
case RECENT -> SearchRecentParameter.DEFAULT;
|
||||
default -> SearchRecentParameter.RECENT;
|
||||
};
|
||||
|
||||
this.url = parameters.withRecent(toggledValue).renderUrl(SearchFilters.this.url);
|
||||
}
|
||||
}
|
||||
|
||||
public class SearchTitleOption {
|
||||
private final SearchTitleParameter value;
|
||||
|
||||
public final String url;
|
||||
public String getUrl() {
|
||||
return url;
|
||||
}
|
||||
|
||||
public boolean isSet() {
|
||||
return value.equals(SearchTitleParameter.TITLE);
|
||||
}
|
||||
|
||||
public String name() {
|
||||
return "Search In Title";
|
||||
}
|
||||
|
||||
public SearchTitleOption(SearchParameters parameters) {
|
||||
this.value = parameters.searchTitle();
|
||||
|
||||
var toggledValue = switch (parameters.searchTitle()) {
|
||||
case TITLE -> SearchTitleParameter.DEFAULT;
|
||||
default -> SearchTitleParameter.TITLE;
|
||||
};
|
||||
|
||||
this.url = parameters.withTitle(toggledValue).renderUrl(SearchFilters.this.url);
|
||||
}
|
||||
}
|
||||
|
||||
public class Filter {
|
||||
public final SearchProfile profile;
|
||||
|
||||
public final String displayName;
|
||||
public final boolean current;
|
||||
public final String url;
|
||||
|
||||
public Filter(String displayName, SearchProfile profile, SearchParameters parameters) {
|
||||
this.displayName = displayName;
|
||||
this.profile = profile;
|
||||
this.current = profile.equals(parameters.profile());
|
||||
|
||||
this.url = parameters.withProfile(profile).renderUrl(SearchFilters.this.url);
|
||||
}
|
||||
|
||||
public String getDisplayName() {
|
||||
return displayName;
|
||||
}
|
||||
|
||||
public boolean isCurrent() {
|
||||
return current;
|
||||
}
|
||||
|
||||
public String getUrl() {
|
||||
return url;
|
||||
}
|
||||
}
|
||||
}
|
@@ -0,0 +1,105 @@
|
||||
package nu.marginalia.search.model;
|
||||
|
||||
import nu.marginalia.index.query.limit.SpecificationLimit;
|
||||
import nu.marginalia.model.crawl.HtmlFeature;
|
||||
import nu.marginalia.api.searchquery.model.query.SearchQuery;
|
||||
import nu.marginalia.api.searchquery.model.query.SearchSetIdentifier;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
public enum SearchProfile {
|
||||
POPULAR("default", SearchSetIdentifier.POPULAR),
|
||||
SMALLWEB("modern", SearchSetIdentifier.SMALLWEB),
|
||||
BLOGOSPHERE("blogosphere", SearchSetIdentifier.BLOGS),
|
||||
NO_FILTER("corpo", SearchSetIdentifier.NONE),
|
||||
VINTAGE("vintage", SearchSetIdentifier.NONE),
|
||||
TILDE("tilde", SearchSetIdentifier.NONE),
|
||||
CORPO_CLEAN("corpo-clean", SearchSetIdentifier.NONE),
|
||||
ACADEMIA("academia", SearchSetIdentifier.NONE),
|
||||
PLAIN_TEXT("plain-text", SearchSetIdentifier.NONE),
|
||||
FOOD("food", SearchSetIdentifier.POPULAR),
|
||||
FORUM("forum", SearchSetIdentifier.NONE),
|
||||
WIKI("wiki", SearchSetIdentifier.NONE),
|
||||
DOCS("docs", SearchSetIdentifier.NONE),
|
||||
;
|
||||
|
||||
|
||||
public final String filterId;
|
||||
public final SearchSetIdentifier searchSetIdentifier;
|
||||
|
||||
SearchProfile(String filterId, SearchSetIdentifier searchSetIdentifier) {
|
||||
this.filterId = filterId;
|
||||
this.searchSetIdentifier = searchSetIdentifier;
|
||||
}
|
||||
|
||||
private final static SearchProfile[] values = values();
|
||||
public static SearchProfile getSearchProfile(String param) {
|
||||
if (null == param) {
|
||||
return NO_FILTER;
|
||||
}
|
||||
|
||||
for (var profile : values) {
|
||||
if (Objects.equals(profile.filterId, param)) {
|
||||
return profile;
|
||||
}
|
||||
}
|
||||
|
||||
return NO_FILTER;
|
||||
}
|
||||
|
||||
public void addTacitTerms(SearchQuery subquery) {
|
||||
if (this == ACADEMIA) {
|
||||
subquery.searchTermsAdvice.add("special:academia");
|
||||
}
|
||||
if (this == VINTAGE) {
|
||||
subquery.searchTermsPriority.add("format:html123");
|
||||
subquery.searchTermsPriority.add("js:false");
|
||||
}
|
||||
if (this == TILDE) {
|
||||
subquery.searchTermsAdvice.add("special:tilde");
|
||||
}
|
||||
if (this == PLAIN_TEXT) {
|
||||
subquery.searchTermsAdvice.add("format:plain");
|
||||
}
|
||||
if (this == WIKI) {
|
||||
subquery.searchTermsAdvice.add("generator:wiki");
|
||||
}
|
||||
if (this == FORUM) {
|
||||
subquery.searchTermsAdvice.add("generator:forum");
|
||||
}
|
||||
if (this == DOCS) {
|
||||
subquery.searchTermsAdvice.add("generator:docs");
|
||||
}
|
||||
if (this == FOOD) {
|
||||
subquery.searchTermsAdvice.add(HtmlFeature.CATEGORY_FOOD.getKeyword());
|
||||
subquery.searchTermsExclude.add("special:ads");
|
||||
}
|
||||
}
|
||||
|
||||
public SpecificationLimit getYearLimit() {
|
||||
if (this == SMALLWEB) {
|
||||
return SpecificationLimit.greaterThan(2015);
|
||||
}
|
||||
if (this == VINTAGE) {
|
||||
return SpecificationLimit.lessThan(2003);
|
||||
}
|
||||
else return SpecificationLimit.none();
|
||||
}
|
||||
|
||||
public SpecificationLimit getSizeLimit() {
|
||||
if (this == SMALLWEB) {
|
||||
return SpecificationLimit.lessThan(500);
|
||||
}
|
||||
else return SpecificationLimit.none();
|
||||
}
|
||||
|
||||
|
||||
public SpecificationLimit getQualityLimit() {
|
||||
if (this == SMALLWEB) {
|
||||
return SpecificationLimit.lessThan(5);
|
||||
}
|
||||
else return SpecificationLimit.none();
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -0,0 +1,293 @@
|
||||
package nu.marginalia.search.model;
|
||||
|
||||
import nu.marginalia.api.searchquery.model.results.SearchResultItem;
|
||||
import nu.marginalia.api.searchquery.model.results.SearchResultKeywordScore;
|
||||
import nu.marginalia.model.EdgeUrl;
|
||||
import nu.marginalia.model.crawl.DomainIndexingState;
|
||||
import nu.marginalia.model.crawl.HtmlFeature;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* A class to hold details about a single search result.
|
||||
*/
|
||||
public class UrlDetails implements Comparable<UrlDetails> {
|
||||
public long id;
|
||||
public int domainId;
|
||||
|
||||
public EdgeUrl url;
|
||||
public String title;
|
||||
public String description;
|
||||
|
||||
public String format;
|
||||
public int features;
|
||||
|
||||
public DomainIndexingState domainState;
|
||||
|
||||
public double termScore;
|
||||
|
||||
public int resultsFromSameDomain;
|
||||
|
||||
public String positions;
|
||||
public int positionsCount;
|
||||
public SearchResultItem resultItem;
|
||||
public List<SearchResultKeywordScore> keywordScores;
|
||||
|
||||
public UrlDetails(long id, int domainId, EdgeUrl url, String title, String description, String format, int features, DomainIndexingState domainState, double termScore, int resultsFromSameDomain, String positions, int positionsCount, SearchResultItem resultItem, List<SearchResultKeywordScore> keywordScores) {
|
||||
this.id = id;
|
||||
this.domainId = domainId;
|
||||
this.url = url;
|
||||
this.title = title;
|
||||
this.description = description;
|
||||
this.format = format;
|
||||
this.features = features;
|
||||
this.domainState = domainState;
|
||||
this.termScore = termScore;
|
||||
this.resultsFromSameDomain = resultsFromSameDomain;
|
||||
this.positions = positions;
|
||||
this.positionsCount = positionsCount;
|
||||
this.resultItem = resultItem;
|
||||
this.keywordScores = keywordScores;
|
||||
}
|
||||
|
||||
public UrlDetails() {
|
||||
}
|
||||
|
||||
public boolean hasMoreResults() {
|
||||
return resultsFromSameDomain > 1;
|
||||
}
|
||||
|
||||
public String getFormat() {
|
||||
if (null == format) {
|
||||
return "?";
|
||||
}
|
||||
switch (format) {
|
||||
case "HTML123":
|
||||
return "HTML 1-3";
|
||||
case "HTML4":
|
||||
return "HTML 4";
|
||||
case "XHTML":
|
||||
return "XHTML";
|
||||
case "HTML5":
|
||||
return "HTML 5";
|
||||
case "PLAIN":
|
||||
return "Plain Text";
|
||||
default:
|
||||
return "?";
|
||||
}
|
||||
}
|
||||
|
||||
public int hashCode() {
|
||||
return Long.hashCode(id);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int compareTo(UrlDetails other) {
|
||||
int result = Double.compare(getTermScore(), other.getTermScore());
|
||||
if (result == 0) result = Long.compare(getId(), other.getId());
|
||||
return result;
|
||||
}
|
||||
|
||||
public boolean equals(Object other) {
|
||||
if (other == null) {
|
||||
return false;
|
||||
}
|
||||
if (other == this) {
|
||||
return true;
|
||||
}
|
||||
if (other instanceof UrlDetails) {
|
||||
return ((UrlDetails) other).id == id;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public String getTitle() {
|
||||
if (title == null || title.isBlank()) {
|
||||
return url.toString();
|
||||
}
|
||||
return title;
|
||||
}
|
||||
|
||||
public boolean isPlainText() {
|
||||
return "PLAIN".equals(format);
|
||||
}
|
||||
|
||||
public int getProblemCount() {
|
||||
int mask = HtmlFeature.JS.getFeatureBit()
|
||||
| HtmlFeature.COOKIES.getFeatureBit()
|
||||
| HtmlFeature.TRACKING.getFeatureBit()
|
||||
| HtmlFeature.AFFILIATE_LINK.getFeatureBit()
|
||||
| HtmlFeature.TRACKING_ADTECH.getFeatureBit()
|
||||
| HtmlFeature.ADVERTISEMENT.getFeatureBit();
|
||||
|
||||
return Integer.bitCount(features & mask);
|
||||
}
|
||||
|
||||
public List<UrlProblem> getProblems() {
|
||||
List<UrlProblem> problems = new ArrayList<>();
|
||||
|
||||
if (isScripts()) {
|
||||
problems.add(new UrlProblem("Js", "The page uses Javascript"));
|
||||
}
|
||||
if (isCookies()) {
|
||||
problems.add(new UrlProblem("Co", "The page uses Cookies"));
|
||||
}
|
||||
if (isTracking()) {
|
||||
problems.add(new UrlProblem("Tr", "The page uses Tracking/Analytics"));
|
||||
}
|
||||
if (isAffiliate()) {
|
||||
problems.add(new UrlProblem("Af", "The page may use Affiliate Linking"));
|
||||
}
|
||||
if (isAds()) {
|
||||
problems.add(new UrlProblem("Ad", "The page uses Ads/Adtech Tracking"));
|
||||
}
|
||||
return problems;
|
||||
|
||||
}
|
||||
|
||||
public boolean isScripts() {
|
||||
return HtmlFeature.hasFeature(features, HtmlFeature.JS);
|
||||
}
|
||||
|
||||
public boolean isTracking() {
|
||||
return HtmlFeature.hasFeature(features, HtmlFeature.TRACKING);
|
||||
}
|
||||
|
||||
public boolean isAffiliate() {
|
||||
return HtmlFeature.hasFeature(features, HtmlFeature.AFFILIATE_LINK);
|
||||
}
|
||||
|
||||
public boolean isMedia() {
|
||||
return HtmlFeature.hasFeature(features, HtmlFeature.MEDIA);
|
||||
}
|
||||
|
||||
public boolean isCookies() {
|
||||
return HtmlFeature.hasFeature(features, HtmlFeature.COOKIES);
|
||||
}
|
||||
|
||||
public boolean isAds() {
|
||||
return HtmlFeature.hasFeature(features, HtmlFeature.TRACKING_ADTECH);
|
||||
}
|
||||
|
||||
public int getMatchRank() {
|
||||
if (termScore <= 1) return 1;
|
||||
if (termScore <= 2) return 2;
|
||||
if (termScore <= 3) return 3;
|
||||
if (termScore <= 5) return 5;
|
||||
|
||||
return 10;
|
||||
}
|
||||
|
||||
public long getId() {
|
||||
return this.id;
|
||||
}
|
||||
|
||||
public int getDomainId() {
|
||||
return this.domainId;
|
||||
}
|
||||
|
||||
public EdgeUrl getUrl() {
|
||||
return this.url;
|
||||
}
|
||||
|
||||
public String getDescription() {
|
||||
return this.description;
|
||||
}
|
||||
|
||||
public int getFeatures() {
|
||||
return this.features;
|
||||
}
|
||||
|
||||
public DomainIndexingState getDomainState() {
|
||||
return this.domainState;
|
||||
}
|
||||
|
||||
public double getTermScore() {
|
||||
return this.termScore;
|
||||
}
|
||||
|
||||
public int getResultsFromSameDomain() {
|
||||
return this.resultsFromSameDomain;
|
||||
}
|
||||
|
||||
public String getPositions() {
|
||||
return this.positions;
|
||||
}
|
||||
|
||||
public int getPositionsCount() {
|
||||
return this.positionsCount;
|
||||
}
|
||||
|
||||
public SearchResultItem getResultItem() {
|
||||
return this.resultItem;
|
||||
}
|
||||
|
||||
public List<SearchResultKeywordScore> getKeywordScores() {
|
||||
return this.keywordScores;
|
||||
}
|
||||
|
||||
public UrlDetails withId(long id) {
|
||||
return this.id == id ? this : new UrlDetails(id, this.domainId, this.url, this.title, this.description, this.format, this.features, this.domainState, this.termScore, this.resultsFromSameDomain, this.positions, this.positionsCount, this.resultItem, this.keywordScores);
|
||||
}
|
||||
|
||||
public UrlDetails withDomainId(int domainId) {
|
||||
return this.domainId == domainId ? this : new UrlDetails(this.id, domainId, this.url, this.title, this.description, this.format, this.features, this.domainState, this.termScore, this.resultsFromSameDomain, this.positions, this.positionsCount, this.resultItem, this.keywordScores);
|
||||
}
|
||||
|
||||
public UrlDetails withUrl(EdgeUrl url) {
|
||||
return this.url == url ? this : new UrlDetails(this.id, this.domainId, url, this.title, this.description, this.format, this.features, this.domainState, this.termScore, this.resultsFromSameDomain, this.positions, this.positionsCount, this.resultItem, this.keywordScores);
|
||||
}
|
||||
|
||||
public UrlDetails withTitle(String title) {
|
||||
return this.title == title ? this : new UrlDetails(this.id, this.domainId, this.url, title, this.description, this.format, this.features, this.domainState, this.termScore, this.resultsFromSameDomain, this.positions, this.positionsCount, this.resultItem, this.keywordScores);
|
||||
}
|
||||
|
||||
public UrlDetails withDescription(String description) {
|
||||
return this.description == description ? this : new UrlDetails(this.id, this.domainId, this.url, this.title, description, this.format, this.features, this.domainState, this.termScore, this.resultsFromSameDomain, this.positions, this.positionsCount, this.resultItem, this.keywordScores);
|
||||
}
|
||||
|
||||
public UrlDetails withFormat(String format) {
|
||||
return this.format == format ? this : new UrlDetails(this.id, this.domainId, this.url, this.title, this.description, format, this.features, this.domainState, this.termScore, this.resultsFromSameDomain, this.positions, this.positionsCount, this.resultItem, this.keywordScores);
|
||||
}
|
||||
|
||||
public UrlDetails withFeatures(int features) {
|
||||
return this.features == features ? this : new UrlDetails(this.id, this.domainId, this.url, this.title, this.description, this.format, features, this.domainState, this.termScore, this.resultsFromSameDomain, this.positions, this.positionsCount, this.resultItem, this.keywordScores);
|
||||
}
|
||||
|
||||
public UrlDetails withDomainState(DomainIndexingState domainState) {
|
||||
return this.domainState == domainState ? this : new UrlDetails(this.id, this.domainId, this.url, this.title, this.description, this.format, this.features, domainState, this.termScore, this.resultsFromSameDomain, this.positions, this.positionsCount, this.resultItem, this.keywordScores);
|
||||
}
|
||||
|
||||
public UrlDetails withTermScore(double termScore) {
|
||||
return this.termScore == termScore ? this : new UrlDetails(this.id, this.domainId, this.url, this.title, this.description, this.format, this.features, this.domainState, termScore, this.resultsFromSameDomain, this.positions, this.positionsCount, this.resultItem, this.keywordScores);
|
||||
}
|
||||
|
||||
public UrlDetails withResultsFromSameDomain(int resultsFromSameDomain) {
|
||||
return this.resultsFromSameDomain == resultsFromSameDomain ? this : new UrlDetails(this.id, this.domainId, this.url, this.title, this.description, this.format, this.features, this.domainState, this.termScore, resultsFromSameDomain, this.positions, this.positionsCount, this.resultItem, this.keywordScores);
|
||||
}
|
||||
|
||||
public UrlDetails withPositions(String positions) {
|
||||
return this.positions == positions ? this : new UrlDetails(this.id, this.domainId, this.url, this.title, this.description, this.format, this.features, this.domainState, this.termScore, this.resultsFromSameDomain, positions, this.positionsCount, this.resultItem, this.keywordScores);
|
||||
}
|
||||
|
||||
public UrlDetails withPositionsCount(int positionsCount) {
|
||||
return this.positionsCount == positionsCount ? this : new UrlDetails(this.id, this.domainId, this.url, this.title, this.description, this.format, this.features, this.domainState, this.termScore, this.resultsFromSameDomain, this.positions, positionsCount, this.resultItem, this.keywordScores);
|
||||
}
|
||||
|
||||
public UrlDetails withResultItem(SearchResultItem resultItem) {
|
||||
return this.resultItem == resultItem ? this : new UrlDetails(this.id, this.domainId, this.url, this.title, this.description, this.format, this.features, this.domainState, this.termScore, this.resultsFromSameDomain, this.positions, this.positionsCount, resultItem, this.keywordScores);
|
||||
}
|
||||
|
||||
public UrlDetails withKeywordScores(List<SearchResultKeywordScore> keywordScores) {
|
||||
return this.keywordScores == keywordScores ? this : new UrlDetails(this.id, this.domainId, this.url, this.title, this.description, this.format, this.features, this.domainState, this.termScore, this.resultsFromSameDomain, this.positions, this.positionsCount, this.resultItem, keywordScores);
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return "UrlDetails(id=" + this.getId() + ", domainId=" + this.getDomainId() + ", url=" + this.getUrl() + ", title=" + this.getTitle() + ", description=" + this.getDescription() + ", format=" + this.getFormat() + ", features=" + this.getFeatures() + ", domainState=" + this.getDomainState() + ", termScore=" + this.getTermScore() + ", resultsFromSameDomain=" + this.getResultsFromSameDomain() + ", positions=" + this.getPositions() + ", positionsCount=" + this.getPositionsCount() + ", resultItem=" + this.getResultItem() + ", keywordScores=" + this.getKeywordScores() + ")";
|
||||
}
|
||||
|
||||
public static record UrlProblem(String name, String description) {
|
||||
|
||||
}
|
||||
}
|
@@ -0,0 +1,27 @@
|
||||
package nu.marginalia.search.results;
|
||||
|
||||
import com.google.inject.Inject;
|
||||
import com.google.inject.Singleton;
|
||||
import nu.marginalia.browse.model.BrowseResult;
|
||||
import nu.marginalia.screenshot.ScreenshotService;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
@Singleton
|
||||
public class BrowseResultCleaner {
|
||||
private final ScreenshotService screenshotService;
|
||||
|
||||
@Inject
|
||||
public BrowseResultCleaner(ScreenshotService screenshotService) {
|
||||
this.screenshotService = screenshotService;
|
||||
}
|
||||
|
||||
public Predicate<BrowseResult> shouldRemoveResultPredicateBr() {
|
||||
Set<String> domainHashes = new HashSet<>(100);
|
||||
|
||||
return (res) -> !screenshotService.hasScreenshot(res.domainId())
|
||||
|| !domainHashes.add(res.domainHash());
|
||||
}
|
||||
}
|
@@ -0,0 +1,69 @@
|
||||
package nu.marginalia.search.results;
|
||||
|
||||
import gnu.trove.list.TLongList;
|
||||
import gnu.trove.list.array.TLongArrayList;
|
||||
import gnu.trove.map.hash.TObjectIntHashMap;
|
||||
import gnu.trove.set.hash.TIntHashSet;
|
||||
import nu.marginalia.api.searchquery.model.results.DecoratedSearchResultItem;
|
||||
import nu.marginalia.lsh.EasyLSH;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
public class UrlDeduplicator {
|
||||
private final int LSH_SIMILARITY_THRESHOLD = 2;
|
||||
private static final Logger logger = LoggerFactory.getLogger(UrlDeduplicator.class);
|
||||
|
||||
private final TIntHashSet seenSuperficialhashes = new TIntHashSet(200);
|
||||
private final TLongList seehLSHList = new TLongArrayList(200);
|
||||
private final TObjectIntHashMap<String> keyCount = new TObjectIntHashMap<>(200, 0.75f, 0);
|
||||
|
||||
private final int resultsPerKey;
|
||||
public UrlDeduplicator(int resultsPerKey) {
|
||||
this.resultsPerKey = resultsPerKey;
|
||||
}
|
||||
|
||||
public boolean shouldRemove(DecoratedSearchResultItem details) {
|
||||
if (!deduplicateOnSuperficialHash(details))
|
||||
return true;
|
||||
if (!deduplicateOnLSH(details))
|
||||
return true;
|
||||
if (!limitResultsPerDomain(details))
|
||||
return true;
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean shouldRetain(DecoratedSearchResultItem details) {
|
||||
return !shouldRemove(details);
|
||||
}
|
||||
|
||||
private boolean deduplicateOnSuperficialHash(DecoratedSearchResultItem details) {
|
||||
return seenSuperficialhashes.add(Objects.hash(details.url.path, details.title));
|
||||
}
|
||||
|
||||
private boolean deduplicateOnLSH(DecoratedSearchResultItem details) {
|
||||
long thisHash = details.dataHash;
|
||||
|
||||
if (0 == thisHash)
|
||||
return true;
|
||||
|
||||
if (seehLSHList.forEach(otherHash -> EasyLSH.hammingDistance(thisHash, otherHash) >= LSH_SIMILARITY_THRESHOLD))
|
||||
{
|
||||
seehLSHList.add(thisHash);
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
|
||||
}
|
||||
|
||||
private boolean limitResultsPerDomain(DecoratedSearchResultItem details) {
|
||||
final var domain = details.getUrl().getDomain();
|
||||
final String key = domain.getDomainKey();
|
||||
|
||||
return keyCount.adjustOrPutValue(key, 1, 1) <= resultsPerKey;
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,69 @@
|
||||
package nu.marginalia.search.svc;
|
||||
|
||||
import com.google.inject.Inject;
|
||||
import com.zaxxer.hikari.HikariDataSource;
|
||||
import nu.marginalia.WebsiteUrl;
|
||||
import nu.marginalia.db.DbDomainQueries;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import spark.Request;
|
||||
import spark.Response;
|
||||
import spark.Spark;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
||||
public class SearchAddToCrawlQueueService {
|
||||
|
||||
private final DbDomainQueries domainQueries;
|
||||
private final WebsiteUrl websiteUrl;
|
||||
private final HikariDataSource dataSource;
|
||||
private final Logger logger = LoggerFactory.getLogger(SearchAddToCrawlQueueService.class);
|
||||
|
||||
@Inject
|
||||
public SearchAddToCrawlQueueService(DbDomainQueries domainQueries,
|
||||
WebsiteUrl websiteUrl,
|
||||
HikariDataSource dataSource) {
|
||||
this.domainQueries = domainQueries;
|
||||
this.websiteUrl = websiteUrl;
|
||||
this.dataSource = dataSource;
|
||||
}
|
||||
|
||||
public Object suggestCrawling(Request request, Response response) throws SQLException {
|
||||
logger.info("{}", request.queryParams());
|
||||
int id = Integer.parseInt(request.queryParams("id"));
|
||||
boolean nomisclick = "on".equals(request.queryParams("nomisclick"));
|
||||
|
||||
String domainName = getDomainName(id);
|
||||
|
||||
if (nomisclick) {
|
||||
logger.info("Adding {} to crawl queue", domainName);
|
||||
addToCrawlQueue(id);
|
||||
}
|
||||
else {
|
||||
logger.info("Nomisclick not set, not adding {} to crawl queue", domainName);
|
||||
}
|
||||
|
||||
response.redirect(websiteUrl.withPath("/site/" + domainName));
|
||||
|
||||
return "";
|
||||
}
|
||||
|
||||
private void addToCrawlQueue(int id) throws SQLException {
|
||||
try (var conn = dataSource.getConnection();
|
||||
var stmt = conn.prepareStatement("""
|
||||
INSERT IGNORE INTO CRAWL_QUEUE(DOMAIN_NAME, SOURCE)
|
||||
SELECT DOMAIN_NAME, "user" FROM EC_DOMAIN WHERE ID=?
|
||||
""")) {
|
||||
stmt.setInt(1, id);
|
||||
stmt.executeUpdate();
|
||||
}
|
||||
}
|
||||
|
||||
private String getDomainName(int id) {
|
||||
var domain = domainQueries.getDomain(id);
|
||||
if (domain.isEmpty())
|
||||
Spark.halt(404);
|
||||
return domain.get().toString();
|
||||
}
|
||||
}
|
||||
|
@@ -0,0 +1,87 @@
|
||||
package nu.marginalia.search.svc;
|
||||
|
||||
import com.google.inject.Inject;
|
||||
import nu.marginalia.api.domains.DomainInfoClient;
|
||||
import nu.marginalia.api.domains.model.SimilarDomain;
|
||||
import nu.marginalia.browse.DbBrowseDomainsRandom;
|
||||
import nu.marginalia.browse.model.BrowseResult;
|
||||
import nu.marginalia.browse.model.BrowseResultSet;
|
||||
import nu.marginalia.db.DbDomainQueries;
|
||||
import nu.marginalia.db.DomainBlacklist;
|
||||
import nu.marginalia.model.EdgeDomain;
|
||||
import nu.marginalia.search.results.BrowseResultCleaner;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
import static java.util.Collections.shuffle;
|
||||
|
||||
public class SearchBrowseService {
|
||||
private final DbBrowseDomainsRandom randomDomains;
|
||||
private final DbDomainQueries domainQueries;
|
||||
private final DomainBlacklist blacklist;
|
||||
private final DomainInfoClient domainInfoClient;
|
||||
private final BrowseResultCleaner browseResultCleaner;
|
||||
|
||||
@Inject
|
||||
public SearchBrowseService(DbBrowseDomainsRandom randomDomains,
|
||||
DbDomainQueries domainQueries,
|
||||
DomainBlacklist blacklist,
|
||||
DomainInfoClient domainInfoClient,
|
||||
BrowseResultCleaner browseResultCleaner)
|
||||
{
|
||||
this.randomDomains = randomDomains;
|
||||
this.domainQueries = domainQueries;
|
||||
this.blacklist = blacklist;
|
||||
this.domainInfoClient = domainInfoClient;
|
||||
this.browseResultCleaner = browseResultCleaner;
|
||||
}
|
||||
|
||||
public BrowseResultSet getRandomEntries(int set) {
|
||||
List<BrowseResult> results = randomDomains.getRandomDomains(25, blacklist, set);
|
||||
|
||||
results.removeIf(browseResultCleaner.shouldRemoveResultPredicateBr());
|
||||
|
||||
return new BrowseResultSet(results);
|
||||
}
|
||||
|
||||
public BrowseResultSet getRelatedEntries(String domainName) throws ExecutionException, InterruptedException, TimeoutException {
|
||||
var domain = domainQueries.getDomainId(new EdgeDomain(domainName));
|
||||
|
||||
var neighbors = domainInfoClient.similarDomains(domain, 50)
|
||||
.get(100, TimeUnit.MILLISECONDS);
|
||||
|
||||
neighbors.removeIf(sd -> !sd.screenshot());
|
||||
|
||||
// If the results are very few, supplement with the alternative shitty algorithm
|
||||
if (neighbors.size() < 25) {
|
||||
Set<SimilarDomain> allNeighbors = new HashSet<>(neighbors);
|
||||
allNeighbors.addAll(domainInfoClient
|
||||
.linkedDomains(domain, 50)
|
||||
.get(100, TimeUnit.MILLISECONDS)
|
||||
);
|
||||
|
||||
neighbors.clear();
|
||||
neighbors.addAll(allNeighbors);
|
||||
neighbors.removeIf(sd -> !sd.screenshot());
|
||||
}
|
||||
|
||||
List<BrowseResult> results = new ArrayList<>(neighbors.size());
|
||||
for (SimilarDomain sd : neighbors) {
|
||||
var resultDomain = domainQueries.getDomain(sd.domainId());
|
||||
if (resultDomain.isEmpty())
|
||||
continue;
|
||||
|
||||
results.add(new BrowseResult(resultDomain.get().toRootUrlHttp(), sd.domainId(), 0, sd.screenshot()));
|
||||
}
|
||||
// shuffle the items for a less repetitive experience
|
||||
shuffle(neighbors);
|
||||
|
||||
return new BrowseResultSet(results, domainName);
|
||||
}
|
||||
}
|
@@ -0,0 +1,69 @@
|
||||
package nu.marginalia.search.svc;
|
||||
|
||||
import com.google.inject.Inject;
|
||||
import nu.marginalia.renderer.MustacheRenderer;
|
||||
import nu.marginalia.renderer.RendererFactory;
|
||||
import nu.marginalia.search.SearchOperator;
|
||||
import nu.marginalia.search.model.UrlDetails;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import spark.Request;
|
||||
import spark.Response;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
|
||||
public class SearchCrosstalkService {
|
||||
private static final Logger logger = LoggerFactory.getLogger(SearchCrosstalkService.class);
|
||||
private final SearchOperator searchOperator;
|
||||
private final MustacheRenderer<CrosstalkResult> renderer;
|
||||
|
||||
@Inject
|
||||
public SearchCrosstalkService(SearchOperator searchOperator,
|
||||
RendererFactory rendererFactory) throws IOException
|
||||
{
|
||||
this.searchOperator = searchOperator;
|
||||
this.renderer = rendererFactory.renderer("search/site-info/site-crosstalk");
|
||||
}
|
||||
|
||||
public Object handle(Request request, Response response) throws SQLException {
|
||||
String domains = request.queryParams("domains");
|
||||
String[] parts = StringUtils.split(domains, ',');
|
||||
|
||||
if (parts.length != 2) {
|
||||
throw new IllegalArgumentException("Expected exactly two domains");
|
||||
}
|
||||
|
||||
response.type("text/html");
|
||||
|
||||
for (int i = 0; i < parts.length; i++) {
|
||||
parts[i] = parts[i].trim();
|
||||
}
|
||||
|
||||
var resAtoB = searchOperator.doLinkSearch(parts[0], parts[1]);
|
||||
var resBtoA = searchOperator.doLinkSearch(parts[1], parts[0]);
|
||||
|
||||
var model = new CrosstalkResult(parts[0], parts[1], resAtoB, resBtoA);
|
||||
|
||||
return renderer.render(model);
|
||||
}
|
||||
|
||||
|
||||
|
||||
private record CrosstalkResult(String domainA,
|
||||
String domainB,
|
||||
List<UrlDetails> forward,
|
||||
List<UrlDetails> backward)
|
||||
{
|
||||
|
||||
public boolean isFocusDomain() {
|
||||
return true; // Hack to get the search result templates behave well
|
||||
}
|
||||
public boolean hasBoth() {
|
||||
return !forward.isEmpty() && !backward.isEmpty();
|
||||
}
|
||||
|
||||
}
|
||||
}
|
@@ -0,0 +1,47 @@
|
||||
package nu.marginalia.search.svc;
|
||||
|
||||
import com.google.inject.Inject;
|
||||
import nu.marginalia.index.api.IndexMqClient;
|
||||
import nu.marginalia.renderer.MustacheRenderer;
|
||||
import nu.marginalia.renderer.RendererFactory;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import spark.Request;
|
||||
import spark.Response;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
public class SearchErrorPageService {
|
||||
private final IndexMqClient indexMqClient;
|
||||
private final Logger logger = LoggerFactory.getLogger(getClass());
|
||||
private final MustacheRenderer<Object> renderer;
|
||||
|
||||
@Inject
|
||||
public SearchErrorPageService(IndexMqClient indexMqClient,
|
||||
RendererFactory rendererFactory) throws IOException {
|
||||
|
||||
renderer = rendererFactory.renderer("search/error-page-search");
|
||||
|
||||
this.indexMqClient = indexMqClient;
|
||||
}
|
||||
|
||||
public void serveError(Request request, Response rsp) {
|
||||
rsp.body(renderError(request, "Internal error",
|
||||
"""
|
||||
An error occurred when communicating with the search engine index.
|
||||
<p>
|
||||
This is hopefully a temporary state of affairs. It may be due to
|
||||
an upgrade. The index typically takes a about two or three minutes
|
||||
to reload from a cold restart. Thanks for your patience.
|
||||
"""));
|
||||
}
|
||||
|
||||
private String renderError(Request request, String title, String message) {
|
||||
return renderer.render(Map.of("title", title, "message", message,
|
||||
"profile", request.queryParamOrDefault("profile", ""),
|
||||
"js", request.queryParamOrDefault("js", ""),
|
||||
"query", request.queryParamOrDefault("query", "")
|
||||
));
|
||||
}
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user