mirror of
https://github.com/MarginaliaSearch/MarginaliaSearch.git
synced 2025-10-05 21:22:39 +02:00
Compare commits
159 Commits
deploy-001
...
deploy-006
Author | SHA1 | Date | |
---|---|---|---|
|
579a115243 | ||
|
2c67f50a43 | ||
|
78a958e2b0 | ||
|
4e939389b2 | ||
|
e67a9bdb91 | ||
|
567e4e1237 | ||
|
4342e42722 | ||
|
bc818056e6 | ||
|
de2feac238 | ||
|
1e770205a5 | ||
|
e44ecd6d69 | ||
|
5b93a0e633 | ||
|
08fb0e5efe | ||
|
bcf67782ea | ||
|
ef3f175ede | ||
|
bbe4b5d9fd | ||
|
c67a635103 | ||
|
20b24133fb | ||
|
f2567677e8 | ||
|
bc2c2061f2 | ||
|
1c7f5a31a5 | ||
|
59a8ea60f7 | ||
|
aa9b1244ea | ||
|
2d17233366 | ||
|
b245cc9f38 | ||
|
6614d05bdf | ||
|
55aeb03c4a | ||
|
faa589962f | ||
|
c7edd6b39f | ||
|
79da622e3b | ||
|
3da8337ba6 | ||
|
a32d230f0a | ||
|
3772bfd387 | ||
|
02a7900d1a | ||
|
a1fb92468f | ||
|
b7f0a2a98e | ||
|
5fb76b2e79 | ||
|
ad8c97f342 | ||
|
dc1b6373eb | ||
|
983d6d067c | ||
|
a84a06975c | ||
|
d2864c13ec | ||
|
03ba53ce51 | ||
|
d4a6684931 | ||
|
6f0485287a | ||
|
59e2dd4c26 | ||
|
ca1807caae | ||
|
26c20e18ac | ||
|
7c90b6b414 | ||
|
b63c54c4ce | ||
|
fecd2f4ec3 | ||
|
39e420de88 | ||
|
dc83619861 | ||
|
87d1c89701 | ||
|
a42a7769e2 | ||
|
202bda884f | ||
|
2315fdc731 | ||
|
b5469bd8a1 | ||
|
6a6318d04c | ||
|
55933f8d40 | ||
|
be6382e0d0 | ||
|
45e771f96b | ||
|
8dde502cc9 | ||
|
3e66767af3 | ||
|
9ec9d1b338 | ||
|
dcad0d7863 | ||
|
94e1aa0baf | ||
|
b62f043910 | ||
|
6ea22d0d21 | ||
|
8c69dc31b8 | ||
|
00734ea87f | ||
|
3009713db4 | ||
|
9b2ceaf37c | ||
|
8019c2ce18 | ||
|
a9e312b8b1 | ||
|
4da3563d8a | ||
|
48d0a3089a | ||
|
594df64b20 | ||
|
06efb5abfc | ||
|
78eb1417a7 | ||
|
8c8f2ad5ee | ||
|
f71e79d10f | ||
|
1b27c5cf06 | ||
|
67edc8f90d | ||
|
5f576b7d0c | ||
|
8b05c788fd | ||
|
236f033bc9 | ||
|
510fc75121 | ||
|
0376f2e6e3 | ||
|
0b65164f60 | ||
|
9be477de33 | ||
|
84f55b84ff | ||
|
ab5c30ad51 | ||
|
0c839453c5 | ||
|
5e4c5d03ae | ||
|
710af4999a | ||
|
a5b0a1ae62 | ||
|
e9f71ee39b | ||
|
baeb4a46cd | ||
|
5e2a8e9f27 | ||
|
cc1a5bdf90 | ||
|
7f7b1ffaba | ||
|
0ea8092350 | ||
|
483d29497e | ||
|
bae44497fe | ||
|
0d59202aca | ||
|
81cdd6385d | ||
|
e76c42329f | ||
|
e6ef4734ea | ||
|
df4bc1d7e9 | ||
|
2b222efa75 | ||
|
6d18e6d840 | ||
|
2a3c63f209 | ||
|
9f70cecaef | ||
|
c08203e2ed | ||
|
86497fd32f | ||
|
3b998573fd | ||
|
e161882ec7 | ||
|
357f349e30 | ||
|
e4769f541d | ||
|
2a173e2861 | ||
|
a6a900266c | ||
|
bdba53f055 | ||
|
bbdde789e7 | ||
|
eab61cd48a | ||
|
0ce2ba9ad9 | ||
|
3ddcebaa36 | ||
|
b91463383e | ||
|
7444a2f36c | ||
|
fdee07048d | ||
|
2fbf201761 | ||
|
4018e4c434 | ||
|
f3382b5bd8 | ||
|
9287ee0141 | ||
|
2769c8f869 | ||
|
ddb66f33ba | ||
|
79500b8fbc | ||
|
187eea43a4 | ||
|
a89ed6fa9f | ||
|
8d168be138 | ||
|
6e1aa7b391 | ||
|
deab9b9516 | ||
|
39d99a906a | ||
|
6f72e6e0d3 | ||
|
d786d79483 | ||
|
01510f6c2e | ||
|
7ba43e9e3f | ||
|
97bfcd1353 | ||
|
aa3c85c196 | ||
|
fb75a3827d | ||
|
7d546d0e2a | ||
|
8fcb6ffd7a | ||
|
f97de0c15a | ||
|
be9e192b78 | ||
|
75ae1c9526 | ||
|
33761a0236 | ||
|
19b69b1764 | ||
|
8b804359a9 | ||
|
f050bf5c4c |
1
.github/FUNDING.yml
vendored
1
.github/FUNDING.yml
vendored
@@ -1,5 +1,6 @@
|
||||
# These are supported funding model platforms
|
||||
|
||||
polar: marginalia-search
|
||||
github: MarginaliaSearch
|
||||
patreon: marginalia_nu
|
||||
open_collective: # Replace with a single Open Collective username
|
||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@@ -7,3 +7,4 @@ build/
|
||||
lombok.config
|
||||
Dockerfile
|
||||
run
|
||||
jte-classes
|
75
ROADMAP.md
75
ROADMAP.md
@@ -1,4 +1,4 @@
|
||||
# Roadmap 2024-2025
|
||||
# Roadmap 2025
|
||||
|
||||
This is a roadmap with major features planned for Marginalia Search.
|
||||
|
||||
@@ -8,20 +8,10 @@ be implemented as well.
|
||||
Major goals:
|
||||
|
||||
* Reach 1 billion pages indexed
|
||||
* Improve technical ability of indexing and search. Although this area has improved a bit, the
|
||||
search engine is still not very good at dealing with longer queries.
|
||||
|
||||
## Proper Position Index (COMPLETED 2024-09)
|
||||
|
||||
The search engine uses a fixed width bit mask to indicate word positions. It has the benefit
|
||||
of being very fast to evaluate and works well for what it is, but is inaccurate and has the
|
||||
drawback of making support for quoted search terms inaccurate and largely reliant on indexing
|
||||
word n-grams known beforehand. This limits the ability to interpret longer queries.
|
||||
|
||||
The positions mask should be supplemented or replaced with a more accurate (e.g.) gamma coded positions
|
||||
list, as is the civilized way of doing this.
|
||||
|
||||
Completed with PR [#99](https://github.com/MarginaliaSearch/MarginaliaSearch/pull/99)
|
||||
* Improve technical ability of indexing and search. ~~Although this area has improved a bit, the
|
||||
search engine is still not very good at dealing with longer queries.~~ (As of PR [#129](https://github.com/MarginaliaSearch/MarginaliaSearch/pull/129), this has improved significantly. There is still more work to be done )
|
||||
|
||||
## Hybridize crawler w/ Common Crawl data
|
||||
|
||||
@@ -37,16 +27,9 @@ Retaining the ability to independently crawl the web is still strongly desirable
|
||||
|
||||
## Safe Search
|
||||
|
||||
The search engine has a bit of a problem showing spicy content mixed in with the results. It would be desirable
|
||||
to have a way to filter this out. It's likely something like a URL blacklist (e.g. [UT1](https://dsi.ut-capitole.fr/blacklists/index_en.php) )
|
||||
The search engine has a bit of a problem showing spicy content mixed in with the results. It would be desirable to have a way to filter this out. It's likely something like a URL blacklist (e.g. [UT1](https://dsi.ut-capitole.fr/blacklists/index_en.php) )
|
||||
combined with naive bayesian filter would go a long way, or something more sophisticated...?
|
||||
|
||||
## Web Design Overhaul
|
||||
|
||||
The design is kinda clunky and hard to maintain, and needlessly outdated-looking.
|
||||
|
||||
In progress: PR [#127](https://github.com/MarginaliaSearch/MarginaliaSearch/pull/127) -- demo available at https://test.marginalia.nu/
|
||||
|
||||
## Additional Language Support
|
||||
|
||||
It would be desirable if the search engine supported more languages than English. This is partially about
|
||||
@@ -55,15 +38,6 @@ associated with each language added, at least a models file or two, as well as s
|
||||
|
||||
It would be very helpful to find a speaker of a large language other than English to help in the fine tuning.
|
||||
|
||||
## Finalize RSS support (COMPLETED 2024-11)
|
||||
|
||||
Marginalia has experimental RSS preview support for a few domains. This works well and
|
||||
it should be extended to all domains. It would also be interesting to offer search of the
|
||||
RSS data itself, or use the RSS set to feed a special live index that updates faster than the
|
||||
main dataset.
|
||||
|
||||
Completed with PR [#122](https://github.com/MarginaliaSearch/MarginaliaSearch/pull/122) and PR [#125](https://github.com/MarginaliaSearch/MarginaliaSearch/pull/125)
|
||||
|
||||
## Support for binary formats like PDF
|
||||
|
||||
The crawler needs to be modified to retain them, and the conversion logic needs to parse them.
|
||||
@@ -80,5 +54,42 @@ This looks like a good idea that wouldn't just help clean up the search filters
|
||||
website, but might be cheap enough we might go as far as to offer a number of ad-hoc custom search
|
||||
filter for any API consumer.
|
||||
|
||||
I've talked to the stract dev and he does not think it's a good idea to mimic their optics language,
|
||||
which is quite ad-hoc, but instead to work together to find some new common description language for this.
|
||||
I've talked to the stract dev and he does not think it's a good idea to mimic their optics language, which is quite ad-hoc, but instead to work together to find some new common description language for this.
|
||||
|
||||
## Show favicons next to search results
|
||||
|
||||
This is expected from search engines. Basic proof of concept sketch of fetching this data has been done, but the feature is some way from being reality.
|
||||
|
||||
## Specialized crawler for github
|
||||
|
||||
One of the search engine's biggest limitations right now is that it does not index github at all. A specialized crawler that fetches at least the readme.md would go a long way toward providing search capabilities in this domain.
|
||||
|
||||
# Completed
|
||||
|
||||
## Web Design Overhaul (COMPLETED 2025-01)
|
||||
|
||||
The design is kinda clunky and hard to maintain, and needlessly outdated-looking.
|
||||
|
||||
PR [#127](https://github.com/MarginaliaSearch/MarginaliaSearch/pull/127)
|
||||
|
||||
## Finalize RSS support (COMPLETED 2024-11)
|
||||
|
||||
Marginalia has experimental RSS preview support for a few domains. This works well and
|
||||
it should be extended to all domains. It would also be interesting to offer search of the
|
||||
RSS data itself, or use the RSS set to feed a special live index that updates faster than the
|
||||
main dataset.
|
||||
|
||||
Completed with PR [#122](https://github.com/MarginaliaSearch/MarginaliaSearch/pull/122) and PR [#125](https://github.com/MarginaliaSearch/MarginaliaSearch/pull/125)
|
||||
|
||||
## Proper Position Index (COMPLETED 2024-09)
|
||||
|
||||
The search engine uses a fixed width bit mask to indicate word positions. It has the benefit
|
||||
of being very fast to evaluate and works well for what it is, but is inaccurate and has the
|
||||
drawback of making support for quoted search terms inaccurate and largely reliant on indexing
|
||||
word n-grams known beforehand. This limits the ability to interpret longer queries.
|
||||
|
||||
The positions mask should be supplemented or replaced with a more accurate (e.g.) gamma coded positions
|
||||
list, as is the civilized way of doing this.
|
||||
|
||||
Completed with PR [#99](https://github.com/MarginaliaSearch/MarginaliaSearch/pull/99)
|
||||
|
||||
|
@@ -47,7 +47,8 @@ ext {
|
||||
dockerImageBase='container-registry.oracle.com/graalvm/jdk:23'
|
||||
dockerImageTag='latest'
|
||||
dockerImageRegistry='marginalia'
|
||||
jibVersion = '3.4.3'
|
||||
jibVersion = '3.4.4'
|
||||
|
||||
}
|
||||
|
||||
idea {
|
||||
|
@@ -8,18 +8,22 @@ import com.google.inject.Inject;
|
||||
import com.google.inject.Singleton;
|
||||
import com.zaxxer.hikari.HikariDataSource;
|
||||
import nu.marginalia.model.EdgeDomain;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.NoSuchElementException;
|
||||
import java.util.Optional;
|
||||
import java.util.OptionalInt;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
|
||||
@Singleton
|
||||
public class DbDomainQueries {
|
||||
private final HikariDataSource dataSource;
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(DbDomainQueries.class);
|
||||
|
||||
private final Cache<EdgeDomain, Integer> domainIdCache = CacheBuilder.newBuilder().maximumSize(10_000).build();
|
||||
private final Cache<Integer, EdgeDomain> domainNameCache = CacheBuilder.newBuilder().maximumSize(10_000).build();
|
||||
private final Cache<String, List<DomainWithNode>> siblingsCache = CacheBuilder.newBuilder().maximumSize(10_000).build();
|
||||
|
||||
@Inject
|
||||
public DbDomainQueries(HikariDataSource dataSource)
|
||||
@@ -28,26 +32,31 @@ public class DbDomainQueries {
|
||||
}
|
||||
|
||||
|
||||
public Integer getDomainId(EdgeDomain domain) {
|
||||
try (var connection = dataSource.getConnection()) {
|
||||
|
||||
public Integer getDomainId(EdgeDomain domain) throws NoSuchElementException {
|
||||
try {
|
||||
return domainIdCache.get(domain, () -> {
|
||||
try (var stmt = connection.prepareStatement("SELECT ID FROM EC_DOMAIN WHERE DOMAIN_NAME=?")) {
|
||||
try (var connection = dataSource.getConnection();
|
||||
var stmt = connection.prepareStatement("SELECT ID FROM EC_DOMAIN WHERE DOMAIN_NAME=?")) {
|
||||
|
||||
stmt.setString(1, domain.toString());
|
||||
var rsp = stmt.executeQuery();
|
||||
if (rsp.next()) {
|
||||
return rsp.getInt(1);
|
||||
}
|
||||
}
|
||||
catch (SQLException ex) {
|
||||
throw new RuntimeException(ex);
|
||||
}
|
||||
|
||||
throw new NoSuchElementException();
|
||||
});
|
||||
}
|
||||
catch (UncheckedExecutionException ex) {
|
||||
throw new NoSuchElementException();
|
||||
}
|
||||
catch (ExecutionException ex) {
|
||||
throw new RuntimeException(ex.getCause());
|
||||
}
|
||||
catch (SQLException ex) {
|
||||
throw new RuntimeException(ex);
|
||||
}
|
||||
}
|
||||
|
||||
public OptionalInt tryGetDomainId(EdgeDomain domain) {
|
||||
@@ -80,22 +89,60 @@ public class DbDomainQueries {
|
||||
}
|
||||
|
||||
public Optional<EdgeDomain> getDomain(int id) {
|
||||
try (var connection = dataSource.getConnection()) {
|
||||
|
||||
EdgeDomain existing = domainNameCache.getIfPresent(id);
|
||||
if (existing != null) {
|
||||
return Optional.of(existing);
|
||||
}
|
||||
|
||||
try (var connection = dataSource.getConnection()) {
|
||||
try (var stmt = connection.prepareStatement("SELECT DOMAIN_NAME FROM EC_DOMAIN WHERE ID=?")) {
|
||||
stmt.setInt(1, id);
|
||||
var rsp = stmt.executeQuery();
|
||||
if (rsp.next()) {
|
||||
return Optional.of(new EdgeDomain(rsp.getString(1)));
|
||||
var val = new EdgeDomain(rsp.getString(1));
|
||||
domainNameCache.put(id, val);
|
||||
return Optional.of(val);
|
||||
}
|
||||
return Optional.empty();
|
||||
}
|
||||
}
|
||||
catch (UncheckedExecutionException ex) {
|
||||
throw new RuntimeException(ex.getCause());
|
||||
}
|
||||
catch (SQLException ex) {
|
||||
throw new RuntimeException(ex);
|
||||
}
|
||||
}
|
||||
|
||||
public List<DomainWithNode> otherSubdomains(EdgeDomain domain, int cnt) throws ExecutionException {
|
||||
String topDomain = domain.topDomain;
|
||||
|
||||
return siblingsCache.get(topDomain, () -> {
|
||||
List<DomainWithNode> ret = new ArrayList<>();
|
||||
|
||||
try (var conn = dataSource.getConnection();
|
||||
var stmt = conn.prepareStatement("SELECT DOMAIN_NAME, NODE_AFFINITY FROM EC_DOMAIN WHERE DOMAIN_TOP = ? LIMIT ?")) {
|
||||
stmt.setString(1, topDomain);
|
||||
stmt.setInt(2, cnt);
|
||||
|
||||
var rs = stmt.executeQuery();
|
||||
while (rs.next()) {
|
||||
var sibling = new EdgeDomain(rs.getString(1));
|
||||
|
||||
if (sibling.equals(domain))
|
||||
continue;
|
||||
|
||||
ret.add(new DomainWithNode(sibling, rs.getInt(2)));
|
||||
}
|
||||
} catch (SQLException e) {
|
||||
logger.error("Failed to get domain neighbors");
|
||||
}
|
||||
return ret;
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
public record DomainWithNode (EdgeDomain domain, int nodeAffinity) {
|
||||
public boolean isIndexed() {
|
||||
return nodeAffinity > 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -1,118 +0,0 @@
|
||||
package nu.marginalia.db;
|
||||
|
||||
import com.zaxxer.hikari.HikariDataSource;
|
||||
|
||||
import java.sql.Connection;
|
||||
import java.sql.PreparedStatement;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.OptionalInt;
|
||||
|
||||
/** Class used in exporting data. This is intended to be used for a brief time
|
||||
* and then discarded, not kept around as a service.
|
||||
*/
|
||||
public class DbDomainStatsExportMultitool implements AutoCloseable {
|
||||
private final Connection connection;
|
||||
private final int nodeId;
|
||||
private final PreparedStatement knownUrlsQuery;
|
||||
private final PreparedStatement visitedUrlsQuery;
|
||||
private final PreparedStatement goodUrlsQuery;
|
||||
private final PreparedStatement domainNameToId;
|
||||
|
||||
private final PreparedStatement allDomainsQuery;
|
||||
private final PreparedStatement crawlQueueDomains;
|
||||
private final PreparedStatement indexedDomainsQuery;
|
||||
|
||||
public DbDomainStatsExportMultitool(HikariDataSource dataSource, int nodeId) throws SQLException {
|
||||
this.connection = dataSource.getConnection();
|
||||
this.nodeId = nodeId;
|
||||
|
||||
knownUrlsQuery = connection.prepareStatement("""
|
||||
SELECT KNOWN_URLS
|
||||
FROM EC_DOMAIN INNER JOIN DOMAIN_METADATA
|
||||
ON EC_DOMAIN.ID=DOMAIN_METADATA.ID
|
||||
WHERE DOMAIN_NAME=?
|
||||
""");
|
||||
visitedUrlsQuery = connection.prepareStatement("""
|
||||
SELECT VISITED_URLS
|
||||
FROM EC_DOMAIN INNER JOIN DOMAIN_METADATA
|
||||
ON EC_DOMAIN.ID=DOMAIN_METADATA.ID
|
||||
WHERE DOMAIN_NAME=?
|
||||
""");
|
||||
goodUrlsQuery = connection.prepareStatement("""
|
||||
SELECT GOOD_URLS
|
||||
FROM EC_DOMAIN INNER JOIN DOMAIN_METADATA
|
||||
ON EC_DOMAIN.ID=DOMAIN_METADATA.ID
|
||||
WHERE DOMAIN_NAME=?
|
||||
""");
|
||||
domainNameToId = connection.prepareStatement("""
|
||||
SELECT ID
|
||||
FROM EC_DOMAIN
|
||||
WHERE DOMAIN_NAME=?
|
||||
""");
|
||||
allDomainsQuery = connection.prepareStatement("""
|
||||
SELECT DOMAIN_NAME
|
||||
FROM EC_DOMAIN
|
||||
""");
|
||||
crawlQueueDomains = connection.prepareStatement("""
|
||||
SELECT DOMAIN_NAME
|
||||
FROM CRAWL_QUEUE
|
||||
""");
|
||||
indexedDomainsQuery = connection.prepareStatement("""
|
||||
SELECT DOMAIN_NAME
|
||||
FROM EC_DOMAIN
|
||||
WHERE INDEXED > 0
|
||||
""");
|
||||
}
|
||||
|
||||
public OptionalInt getVisitedUrls(String domainName) throws SQLException {
|
||||
return executeNameToIntQuery(domainName, visitedUrlsQuery);
|
||||
}
|
||||
|
||||
public OptionalInt getDomainId(String domainName) throws SQLException {
|
||||
return executeNameToIntQuery(domainName, domainNameToId);
|
||||
}
|
||||
|
||||
public List<String> getCrawlQueueDomains() throws SQLException {
|
||||
return executeListQuery(crawlQueueDomains, 100);
|
||||
}
|
||||
public List<String> getAllIndexedDomains() throws SQLException {
|
||||
return executeListQuery(indexedDomainsQuery, 100_000);
|
||||
}
|
||||
|
||||
private OptionalInt executeNameToIntQuery(String domainName, PreparedStatement statement)
|
||||
throws SQLException {
|
||||
statement.setString(1, domainName);
|
||||
var rs = statement.executeQuery();
|
||||
|
||||
if (rs.next()) {
|
||||
return OptionalInt.of(rs.getInt(1));
|
||||
}
|
||||
|
||||
return OptionalInt.empty();
|
||||
}
|
||||
|
||||
private List<String> executeListQuery(PreparedStatement statement, int sizeHint) throws SQLException {
|
||||
List<String> ret = new ArrayList<>(sizeHint);
|
||||
|
||||
var rs = statement.executeQuery();
|
||||
|
||||
while (rs.next()) {
|
||||
ret.add(rs.getString(1));
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws SQLException {
|
||||
knownUrlsQuery.close();
|
||||
goodUrlsQuery.close();
|
||||
visitedUrlsQuery.close();
|
||||
allDomainsQuery.close();
|
||||
crawlQueueDomains.close();
|
||||
domainNameToId.close();
|
||||
connection.close();
|
||||
}
|
||||
}
|
@@ -83,6 +83,11 @@ public class QueryParams {
|
||||
if (path.endsWith("StoryView.py")) { // folklore.org is neat
|
||||
return param.startsWith("project=") || param.startsWith("story=");
|
||||
}
|
||||
|
||||
// www.perseus.tufts.edu:
|
||||
if (param.startsWith("collection=")) return true;
|
||||
if (param.startsWith("doc=")) return true;
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
@@ -42,6 +42,12 @@ dependencies {
|
||||
implementation libs.bundles.curator
|
||||
implementation libs.bundles.flyway
|
||||
|
||||
libs.bundles.jooby.get().each {
|
||||
implementation dependencies.create(it) {
|
||||
exclude group: 'org.slf4j'
|
||||
}
|
||||
}
|
||||
|
||||
testImplementation libs.bundles.slf4j.test
|
||||
implementation libs.bundles.mariadb
|
||||
|
||||
|
@@ -7,8 +7,6 @@ import nu.marginalia.service.discovery.property.PartitionTraits;
|
||||
import nu.marginalia.service.discovery.property.ServiceEndpoint;
|
||||
import nu.marginalia.service.discovery.property.ServiceKey;
|
||||
import nu.marginalia.service.discovery.property.ServicePartition;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
@@ -24,7 +22,7 @@ import java.util.function.Function;
|
||||
public class GrpcMultiNodeChannelPool<STUB> {
|
||||
private final ConcurrentHashMap<Integer, GrpcSingleNodeChannelPool<STUB>> pools =
|
||||
new ConcurrentHashMap<>();
|
||||
private static final Logger logger = LoggerFactory.getLogger(GrpcMultiNodeChannelPool.class);
|
||||
|
||||
private final ServiceRegistryIf serviceRegistryIf;
|
||||
private final ServiceKey<? extends PartitionTraits.Multicast> serviceKey;
|
||||
private final Function<ServiceEndpoint.InstanceAddress, ManagedChannel> channelConstructor;
|
||||
|
@@ -10,6 +10,8 @@ import nu.marginalia.service.discovery.property.ServiceKey;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.slf4j.Marker;
|
||||
import org.slf4j.MarkerFactory;
|
||||
|
||||
import java.time.Duration;
|
||||
import java.util.*;
|
||||
@@ -26,13 +28,13 @@ import java.util.function.Function;
|
||||
public class GrpcSingleNodeChannelPool<STUB> extends ServiceChangeMonitor {
|
||||
private final Map<InstanceAddress, ConnectionHolder> channels = new ConcurrentHashMap<>();
|
||||
|
||||
private final Marker grpcMarker = MarkerFactory.getMarker("GRPC");
|
||||
private static final Logger logger = LoggerFactory.getLogger(GrpcSingleNodeChannelPool.class);
|
||||
|
||||
private final ServiceRegistryIf serviceRegistryIf;
|
||||
private final Function<InstanceAddress, ManagedChannel> channelConstructor;
|
||||
private final Function<ManagedChannel, STUB> stubConstructor;
|
||||
|
||||
|
||||
public GrpcSingleNodeChannelPool(ServiceRegistryIf serviceRegistryIf,
|
||||
ServiceKey<? extends PartitionTraits.Unicast> serviceKey,
|
||||
Function<InstanceAddress, ManagedChannel> channelConstructor,
|
||||
@@ -48,8 +50,6 @@ public class GrpcSingleNodeChannelPool<STUB> extends ServiceChangeMonitor {
|
||||
serviceRegistryIf.registerMonitor(this);
|
||||
|
||||
onChange();
|
||||
|
||||
awaitChannel(Duration.ofSeconds(5));
|
||||
}
|
||||
|
||||
|
||||
@@ -62,10 +62,10 @@ public class GrpcSingleNodeChannelPool<STUB> extends ServiceChangeMonitor {
|
||||
for (var route : Sets.symmetricDifference(oldRoutes, newRoutes)) {
|
||||
ConnectionHolder oldChannel;
|
||||
if (newRoutes.contains(route)) {
|
||||
logger.info("Adding route {}", route);
|
||||
logger.info(grpcMarker, "Adding route {} => {}", serviceKey, route);
|
||||
oldChannel = channels.put(route, new ConnectionHolder(route));
|
||||
} else {
|
||||
logger.info("Expelling route {}", route);
|
||||
logger.info(grpcMarker, "Expelling route {} => {}", serviceKey, route);
|
||||
oldChannel = channels.remove(route);
|
||||
}
|
||||
if (oldChannel != null) {
|
||||
@@ -103,7 +103,7 @@ public class GrpcSingleNodeChannelPool<STUB> extends ServiceChangeMonitor {
|
||||
}
|
||||
|
||||
try {
|
||||
logger.info("Creating channel for {}:{}", serviceKey, address);
|
||||
logger.info(grpcMarker, "Creating channel for {} => {}", serviceKey, address);
|
||||
value = channelConstructor.apply(address);
|
||||
if (channel.compareAndSet(null, value)) {
|
||||
return value;
|
||||
@@ -114,7 +114,7 @@ public class GrpcSingleNodeChannelPool<STUB> extends ServiceChangeMonitor {
|
||||
}
|
||||
}
|
||||
catch (Exception e) {
|
||||
logger.error("Failed to get channel for " + address, e);
|
||||
logger.error(grpcMarker, "Failed to get channel for " + address, e);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@@ -206,7 +206,7 @@ public class GrpcSingleNodeChannelPool<STUB> extends ServiceChangeMonitor {
|
||||
}
|
||||
|
||||
for (var e : exceptions) {
|
||||
logger.error("Failed to call service {}", serviceKey, e);
|
||||
logger.error(grpcMarker, "Failed to call service {}", serviceKey, e);
|
||||
}
|
||||
|
||||
throw new ServiceNotAvailableException(serviceKey);
|
||||
|
@@ -4,6 +4,11 @@ import nu.marginalia.service.discovery.property.ServiceKey;
|
||||
|
||||
public class ServiceNotAvailableException extends RuntimeException {
|
||||
public ServiceNotAvailableException(ServiceKey<?> key) {
|
||||
super("Service " + key + " not available");
|
||||
super(key.toString());
|
||||
}
|
||||
|
||||
@Override
|
||||
public StackTraceElement[] getStackTrace() { // Suppress stack trace
|
||||
return new StackTraceElement[0];
|
||||
}
|
||||
}
|
||||
|
@@ -48,5 +48,10 @@ public record ServiceEndpoint(String host, int port) {
|
||||
public int port() {
|
||||
return endpoint.port();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return endpoint().host() + ":" + endpoint.port() + " [" + instance + "]";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -48,6 +48,19 @@ public sealed interface ServiceKey<P extends ServicePartition> {
|
||||
{
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
final String shortName;
|
||||
|
||||
int periodIndex = name.lastIndexOf('.');
|
||||
|
||||
if (periodIndex >= 0) shortName = name.substring(periodIndex+1);
|
||||
else shortName = name;
|
||||
|
||||
return "rest:" + shortName;
|
||||
}
|
||||
|
||||
}
|
||||
record Grpc<P extends ServicePartition>(String name, P partition) implements ServiceKey<P> {
|
||||
public String baseName() {
|
||||
@@ -64,6 +77,18 @@ public sealed interface ServiceKey<P extends ServicePartition> {
|
||||
{
|
||||
return new Grpc<>(name, partition);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
final String shortName;
|
||||
|
||||
int periodIndex = name.lastIndexOf('.');
|
||||
|
||||
if (periodIndex >= 0) shortName = name.substring(periodIndex+1);
|
||||
else shortName = name;
|
||||
|
||||
return "grpc:" + shortName + "[" + partition.identifier() + "]";
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -89,7 +89,7 @@ public class DatabaseModule extends AbstractModule {
|
||||
config.addDataSourceProperty("prepStmtCacheSize", "250");
|
||||
config.addDataSourceProperty("prepStmtCacheSqlLimit", "2048");
|
||||
|
||||
config.setMaximumPoolSize(5);
|
||||
config.setMaximumPoolSize(Integer.getInteger("db.poolSize", 5));
|
||||
config.setMinimumIdle(2);
|
||||
|
||||
config.setMaxLifetime(Duration.ofMinutes(9).toMillis());
|
||||
|
@@ -0,0 +1,178 @@
|
||||
package nu.marginalia.service.server;
|
||||
|
||||
import io.jooby.*;
|
||||
import io.prometheus.client.Counter;
|
||||
import nu.marginalia.mq.inbox.MqInboxIf;
|
||||
import nu.marginalia.service.client.ServiceNotAvailableException;
|
||||
import nu.marginalia.service.discovery.property.ServiceEndpoint;
|
||||
import nu.marginalia.service.discovery.property.ServiceKey;
|
||||
import nu.marginalia.service.discovery.property.ServicePartition;
|
||||
import nu.marginalia.service.module.ServiceConfiguration;
|
||||
import nu.marginalia.service.server.jte.JteModule;
|
||||
import nu.marginalia.service.server.mq.ServiceMqSubscription;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.slf4j.Marker;
|
||||
import org.slf4j.MarkerFactory;
|
||||
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.List;
|
||||
|
||||
public class JoobyService {
|
||||
private final Logger logger = LoggerFactory.getLogger(getClass());
|
||||
|
||||
// Marker for filtering out sensitive content from the persistent logs
|
||||
private final Marker httpMarker = MarkerFactory.getMarker("HTTP");
|
||||
|
||||
private final Initialization initialization;
|
||||
|
||||
private final static Counter request_counter = Counter.build("wmsa_request_counter", "Request Counter")
|
||||
.labelNames("service", "node")
|
||||
.register();
|
||||
private final static Counter request_counter_good = Counter.build("wmsa_request_counter_good", "Good Requests")
|
||||
.labelNames("service", "node")
|
||||
.register();
|
||||
private final static Counter request_counter_bad = Counter.build("wmsa_request_counter_bad", "Bad Requests")
|
||||
.labelNames("service", "node")
|
||||
.register();
|
||||
private final static Counter request_counter_err = Counter.build("wmsa_request_counter_err", "Error Requests")
|
||||
.labelNames("service", "node")
|
||||
.register();
|
||||
private final String serviceName;
|
||||
private static volatile boolean initialized = false;
|
||||
|
||||
protected final MqInboxIf messageQueueInbox;
|
||||
private final int node;
|
||||
private GrpcServer grpcServer;
|
||||
|
||||
private ServiceConfiguration config;
|
||||
private final List<MvcExtension> joobyServices;
|
||||
private final ServiceEndpoint restEndpoint;
|
||||
|
||||
public JoobyService(BaseServiceParams params,
|
||||
ServicePartition partition,
|
||||
List<DiscoverableService> grpcServices,
|
||||
List<MvcExtension> joobyServices
|
||||
) throws Exception {
|
||||
|
||||
this.joobyServices = joobyServices;
|
||||
this.initialization = params.initialization;
|
||||
config = params.configuration;
|
||||
node = config.node();
|
||||
|
||||
String inboxName = config.serviceName();
|
||||
logger.info("Inbox name: {}", inboxName);
|
||||
|
||||
var serviceRegistry = params.serviceRegistry;
|
||||
|
||||
restEndpoint = serviceRegistry.registerService(ServiceKey.forRest(config.serviceId(), config.node()),
|
||||
config.instanceUuid(), config.externalAddress());
|
||||
|
||||
var mqInboxFactory = params.messageQueueInboxFactory;
|
||||
messageQueueInbox = mqInboxFactory.createSynchronousInbox(inboxName, config.node(), config.instanceUuid());
|
||||
messageQueueInbox.subscribe(new ServiceMqSubscription(this));
|
||||
|
||||
serviceName = System.getProperty("service-name");
|
||||
|
||||
initialization.addCallback(params.heartbeat::start);
|
||||
initialization.addCallback(messageQueueInbox::start);
|
||||
initialization.addCallback(() -> params.eventLog.logEvent("SVC-INIT", serviceName + ":" + config.node()));
|
||||
initialization.addCallback(() -> serviceRegistry.announceInstance(config.instanceUuid()));
|
||||
|
||||
Thread.setDefaultUncaughtExceptionHandler((t, e) -> {
|
||||
if (e instanceof ServiceNotAvailableException) {
|
||||
// reduce log spam for this common case
|
||||
logger.error("Service not available: {}", e.getMessage());
|
||||
}
|
||||
else {
|
||||
logger.error("Uncaught exception", e);
|
||||
}
|
||||
request_counter_err.labels(serviceName, Integer.toString(node)).inc();
|
||||
});
|
||||
|
||||
if (!initialization.isReady() && ! initialized ) {
|
||||
initialized = true;
|
||||
grpcServer = new GrpcServer(config, serviceRegistry, partition, grpcServices);
|
||||
grpcServer.start();
|
||||
}
|
||||
}
|
||||
|
||||
public void startJooby(Jooby jooby) {
|
||||
|
||||
logger.info("{} Listening to {}:{} ({})", getClass().getSimpleName(),
|
||||
restEndpoint.host(),
|
||||
restEndpoint.port(),
|
||||
config.externalAddress());
|
||||
|
||||
// FIXME: This won't work outside of docker, may need to submit a PR to jooby to allow classpaths here
|
||||
jooby.install(new JteModule(Path.of("/app/resources/jte"), Path.of("/app/classes/jte-precompiled")));
|
||||
jooby.assets("/*", Paths.get("/app/resources/static"));
|
||||
|
||||
var options = new ServerOptions();
|
||||
options.setHost(config.bindAddress());
|
||||
options.setPort(restEndpoint.port());
|
||||
|
||||
// Enable gzip compression of response data, but set compression to the lowest level
|
||||
// since it doesn't really save much more space to dial it up. It's typically a
|
||||
// single digit percentage difference since HTML already compresses very well with level = 1.
|
||||
options.setCompressionLevel(1);
|
||||
|
||||
|
||||
jooby.setServerOptions(options);
|
||||
|
||||
jooby.get("/internal/ping", ctx -> "pong");
|
||||
jooby.get("/internal/started", this::isInitialized);
|
||||
jooby.get("/internal/ready", this::isReady);
|
||||
|
||||
for (var service : joobyServices) {
|
||||
jooby.mvc(service);
|
||||
}
|
||||
|
||||
jooby.before(this::auditRequestIn);
|
||||
jooby.after(this::auditRequestOut);
|
||||
}
|
||||
|
||||
private Object isInitialized(Context ctx) {
|
||||
if (initialization.isReady()) {
|
||||
return "ok";
|
||||
}
|
||||
else {
|
||||
ctx.setResponseCode(StatusCode.FAILED_DEPENDENCY_CODE);
|
||||
return "bad";
|
||||
}
|
||||
}
|
||||
|
||||
public boolean isReady() {
|
||||
return true;
|
||||
}
|
||||
|
||||
private String isReady(Context ctx) {
|
||||
if (isReady()) {
|
||||
return "ok";
|
||||
}
|
||||
else {
|
||||
ctx.setResponseCode(StatusCode.FAILED_DEPENDENCY_CODE);
|
||||
return "bad";
|
||||
}
|
||||
}
|
||||
|
||||
private void auditRequestIn(Context ctx) {
|
||||
request_counter.labels(serviceName, Integer.toString(node)).inc();
|
||||
}
|
||||
|
||||
private void auditRequestOut(Context ctx, Object result, Throwable failure) {
|
||||
if (ctx.getResponseCode().value() < 400) {
|
||||
request_counter_good.labels(serviceName, Integer.toString(node)).inc();
|
||||
}
|
||||
else {
|
||||
request_counter_bad.labels(serviceName, Integer.toString(node)).inc();
|
||||
}
|
||||
|
||||
if (failure != null) {
|
||||
logger.error("Request failed " + ctx.getMethod() + " " + ctx.getRequestURL(), failure);
|
||||
request_counter_err.labels(serviceName, Integer.toString(node)).inc();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@@ -16,7 +16,7 @@ import spark.Spark;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public class Service {
|
||||
public class SparkService {
|
||||
private final Logger logger = LoggerFactory.getLogger(getClass());
|
||||
|
||||
// Marker for filtering out sensitive content from the persistent logs
|
||||
@@ -43,10 +43,10 @@ public class Service {
|
||||
private final int node;
|
||||
private GrpcServer grpcServer;
|
||||
|
||||
public Service(BaseServiceParams params,
|
||||
Runnable configureStaticFiles,
|
||||
ServicePartition partition,
|
||||
List<DiscoverableService> grpcServices) throws Exception {
|
||||
public SparkService(BaseServiceParams params,
|
||||
Runnable configureStaticFiles,
|
||||
ServicePartition partition,
|
||||
List<DiscoverableService> grpcServices) throws Exception {
|
||||
|
||||
this.initialization = params.initialization;
|
||||
var config = params.configuration;
|
||||
@@ -126,18 +126,18 @@ public class Service {
|
||||
}
|
||||
}
|
||||
|
||||
public Service(BaseServiceParams params,
|
||||
ServicePartition partition,
|
||||
List<DiscoverableService> grpcServices) throws Exception {
|
||||
public SparkService(BaseServiceParams params,
|
||||
ServicePartition partition,
|
||||
List<DiscoverableService> grpcServices) throws Exception {
|
||||
this(params,
|
||||
Service::defaultSparkConfig,
|
||||
SparkService::defaultSparkConfig,
|
||||
partition,
|
||||
grpcServices);
|
||||
}
|
||||
|
||||
public Service(BaseServiceParams params) throws Exception {
|
||||
public SparkService(BaseServiceParams params) throws Exception {
|
||||
this(params,
|
||||
Service::defaultSparkConfig,
|
||||
SparkService::defaultSparkConfig,
|
||||
ServicePartition.any(),
|
||||
List.of());
|
||||
}
|
@@ -0,0 +1,61 @@
|
||||
package nu.marginalia.service.server.jte;
|
||||
|
||||
import edu.umd.cs.findbugs.annotations.NonNull;
|
||||
import edu.umd.cs.findbugs.annotations.Nullable;
|
||||
import gg.jte.ContentType;
|
||||
import gg.jte.TemplateEngine;
|
||||
import gg.jte.resolve.DirectoryCodeResolver;
|
||||
import io.jooby.*;
|
||||
|
||||
import java.io.File;
|
||||
import java.nio.file.Path;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
// Temporary workaround for a bug
|
||||
// APL-2.0 https://github.com/jooby-project/jooby
|
||||
public class JteModule implements Extension {
|
||||
private Path sourceDirectory;
|
||||
private Path classDirectory;
|
||||
private TemplateEngine templateEngine;
|
||||
|
||||
public JteModule(@NonNull Path sourceDirectory, @NonNull Path classDirectory) {
|
||||
this.sourceDirectory = (Path)Objects.requireNonNull(sourceDirectory, "Source directory is required.");
|
||||
this.classDirectory = (Path)Objects.requireNonNull(classDirectory, "Class directory is required.");
|
||||
}
|
||||
|
||||
public JteModule(@NonNull Path sourceDirectory) {
|
||||
this.sourceDirectory = (Path)Objects.requireNonNull(sourceDirectory, "Source directory is required.");
|
||||
}
|
||||
|
||||
public JteModule(@NonNull TemplateEngine templateEngine) {
|
||||
this.templateEngine = (TemplateEngine)Objects.requireNonNull(templateEngine, "Template engine is required.");
|
||||
}
|
||||
|
||||
public void install(@NonNull Jooby application) {
|
||||
if (this.templateEngine == null) {
|
||||
this.templateEngine = create(application.getEnvironment(), this.sourceDirectory, this.classDirectory);
|
||||
}
|
||||
|
||||
ServiceRegistry services = application.getServices();
|
||||
services.put(TemplateEngine.class, this.templateEngine);
|
||||
application.encoder(MediaType.html, new JteTemplateEngine(this.templateEngine));
|
||||
}
|
||||
|
||||
public static TemplateEngine create(@NonNull Environment environment, @NonNull Path sourceDirectory, @Nullable Path classDirectory) {
|
||||
boolean dev = environment.isActive("dev", new String[]{"test"});
|
||||
if (dev) {
|
||||
Objects.requireNonNull(sourceDirectory, "Source directory is required.");
|
||||
Path requiredClassDirectory = (Path)Optional.ofNullable(classDirectory).orElseGet(() -> sourceDirectory.resolve("jte-classes"));
|
||||
TemplateEngine engine = TemplateEngine.create(new DirectoryCodeResolver(sourceDirectory), requiredClassDirectory, ContentType.Html, environment.getClassLoader());
|
||||
Optional<List<String>> var10000 = Optional.ofNullable(System.getProperty("jooby.run.classpath")).map((it) -> it.split(File.pathSeparator)).map(Stream::of).map(Stream::toList);
|
||||
Objects.requireNonNull(engine);
|
||||
var10000.ifPresent(engine::setClassPath);
|
||||
return engine;
|
||||
} else {
|
||||
return classDirectory == null ? TemplateEngine.createPrecompiled(ContentType.Html) : TemplateEngine.createPrecompiled(classDirectory, ContentType.Html);
|
||||
}
|
||||
}
|
||||
}
|
@@ -0,0 +1,48 @@
|
||||
package nu.marginalia.service.server.jte;
|
||||
|
||||
import edu.umd.cs.findbugs.annotations.NonNull;
|
||||
import gg.jte.TemplateEngine;
|
||||
import io.jooby.Context;
|
||||
import io.jooby.MapModelAndView;
|
||||
import io.jooby.ModelAndView;
|
||||
import io.jooby.buffer.DataBuffer;
|
||||
import io.jooby.internal.jte.DataBufferOutput;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
|
||||
// Temporary workaround for a bug
|
||||
// APL-2.0 https://github.com/jooby-project/jooby
|
||||
class JteTemplateEngine implements io.jooby.TemplateEngine {
|
||||
private final TemplateEngine jte;
|
||||
private final List<String> extensions;
|
||||
|
||||
public JteTemplateEngine(TemplateEngine jte) {
|
||||
this.jte = jte;
|
||||
this.extensions = List.of(".jte", ".kte");
|
||||
}
|
||||
|
||||
|
||||
@NonNull @Override
|
||||
public List<String> extensions() {
|
||||
return extensions;
|
||||
}
|
||||
|
||||
@Override
|
||||
public DataBuffer render(Context ctx, ModelAndView modelAndView) {
|
||||
var buffer = ctx.getBufferFactory().allocateBuffer();
|
||||
var output = new DataBufferOutput(buffer, StandardCharsets.UTF_8);
|
||||
var attributes = ctx.getAttributes();
|
||||
if (modelAndView instanceof MapModelAndView mapModelAndView) {
|
||||
var mapModel = new HashMap<String, Object>();
|
||||
mapModel.putAll(attributes);
|
||||
mapModel.putAll(mapModelAndView.getModel());
|
||||
jte.render(modelAndView.getView(), mapModel, output);
|
||||
} else {
|
||||
jte.render(modelAndView.getView(), modelAndView.getModel(), output);
|
||||
}
|
||||
|
||||
return buffer;
|
||||
}
|
||||
}
|
@@ -3,7 +3,6 @@ package nu.marginalia.service.server.mq;
|
||||
import nu.marginalia.mq.MqMessage;
|
||||
import nu.marginalia.mq.inbox.MqInboxResponse;
|
||||
import nu.marginalia.mq.inbox.MqSubscription;
|
||||
import nu.marginalia.service.server.Service;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@@ -15,10 +14,10 @@ import java.util.Map;
|
||||
public class ServiceMqSubscription implements MqSubscription {
|
||||
private static final Logger logger = LoggerFactory.getLogger(ServiceMqSubscription.class);
|
||||
private final Map<String, Method> requests = new HashMap<>();
|
||||
private final Service service;
|
||||
private final Object service;
|
||||
|
||||
|
||||
public ServiceMqSubscription(Service service) {
|
||||
public ServiceMqSubscription(Object service) {
|
||||
this.service = service;
|
||||
|
||||
/* Wire up all methods annotated with @MqRequest and @MqNotification
|
||||
|
@@ -6,4 +6,8 @@ public record BrowseResultSet(Collection<BrowseResult> results, String focusDoma
|
||||
public BrowseResultSet(Collection<BrowseResult> results) {
|
||||
this(results, "");
|
||||
}
|
||||
|
||||
public boolean hasFocusDomain() {
|
||||
return focusDomain != null && !focusDomain.isBlank();
|
||||
}
|
||||
}
|
||||
|
@@ -38,6 +38,7 @@ public class DomainsProtobufCodec {
|
||||
sd.getIndexed(),
|
||||
sd.getActive(),
|
||||
sd.getScreenshot(),
|
||||
sd.getFeed(),
|
||||
SimilarDomain.LinkType.valueOf(sd.getLinkType().name())
|
||||
);
|
||||
}
|
||||
|
@@ -71,6 +71,23 @@ public class DomainInformation {
|
||||
return new String(Character.toChars(firstChar)) + new String(Character.toChars(secondChar));
|
||||
}
|
||||
|
||||
public String getAsnFlag() {
|
||||
if (asnCountry == null || asnCountry.codePointCount(0, asnCountry.length()) != 2) {
|
||||
return "";
|
||||
}
|
||||
String country = asnCountry;
|
||||
|
||||
if ("UK".equals(country)) {
|
||||
country = "GB";
|
||||
}
|
||||
|
||||
int offset = 0x1F1E6;
|
||||
int asciiOffset = 0x41;
|
||||
int firstChar = Character.codePointAt(country, 0) - asciiOffset + offset;
|
||||
int secondChar = Character.codePointAt(country, 1) - asciiOffset + offset;
|
||||
return new String(Character.toChars(firstChar)) + new String(Character.toChars(secondChar));
|
||||
}
|
||||
|
||||
public EdgeDomain getDomain() {
|
||||
return this.domain;
|
||||
}
|
||||
|
@@ -9,6 +9,7 @@ public record SimilarDomain(EdgeUrl url,
|
||||
boolean indexed,
|
||||
boolean active,
|
||||
boolean screenshot,
|
||||
boolean feed,
|
||||
LinkType linkType) {
|
||||
|
||||
public String getRankSymbols() {
|
||||
@@ -52,12 +53,12 @@ public record SimilarDomain(EdgeUrl url,
|
||||
return NONE;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
public String faIcon() {
|
||||
return switch (this) {
|
||||
case FOWARD -> "→";
|
||||
case BACKWARD -> "←";
|
||||
case BIDIRECTIONAL -> "⇆";
|
||||
case NONE -> "-";
|
||||
case FOWARD -> "fa-solid fa-arrow-right";
|
||||
case BACKWARD -> "fa-solid fa-arrow-left";
|
||||
case BIDIRECTIONAL -> "fa-solid fa-arrow-right-arrow-left";
|
||||
case NONE -> "";
|
||||
};
|
||||
}
|
||||
|
||||
|
@@ -101,6 +101,7 @@ message RpcSimilarDomain {
|
||||
bool active = 6;
|
||||
bool screenshot = 7;
|
||||
LINK_TYPE linkType = 8;
|
||||
bool feed = 9;
|
||||
|
||||
enum LINK_TYPE {
|
||||
BACKWARD = 0;
|
||||
|
@@ -9,6 +9,7 @@ import gnu.trove.map.hash.TIntIntHashMap;
|
||||
import gnu.trove.set.TIntSet;
|
||||
import gnu.trove.set.hash.TIntHashSet;
|
||||
import it.unimi.dsi.fastutil.ints.Int2DoubleArrayMap;
|
||||
import nu.marginalia.WmsaHome;
|
||||
import nu.marginalia.api.domains.RpcSimilarDomain;
|
||||
import nu.marginalia.api.domains.model.SimilarDomain;
|
||||
import nu.marginalia.api.linkgraph.AggregateLinkGraphClient;
|
||||
@@ -17,10 +18,14 @@ import org.roaringbitmap.RoaringBitmap;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.nio.file.Path;
|
||||
import java.sql.DriverManager;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.ScheduledExecutorService;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
@@ -32,12 +37,13 @@ public class SimilarDomainsService {
|
||||
private final HikariDataSource dataSource;
|
||||
private final AggregateLinkGraphClient linkGraphClient;
|
||||
|
||||
private volatile TIntIntHashMap domainIdToIdx = new TIntIntHashMap(100_000);
|
||||
private final TIntIntHashMap domainIdToIdx = new TIntIntHashMap(100_000);
|
||||
private volatile int[] domainIdxToId;
|
||||
|
||||
public volatile Int2DoubleArrayMap[] relatedDomains;
|
||||
public volatile TIntList[] domainNeighbors = null;
|
||||
public volatile RoaringBitmap screenshotDomains = null;
|
||||
public volatile RoaringBitmap feedDomains = null;
|
||||
public volatile RoaringBitmap activeDomains = null;
|
||||
public volatile RoaringBitmap indexedDomains = null;
|
||||
public volatile TIntDoubleHashMap domainRanks = null;
|
||||
@@ -82,6 +88,7 @@ public class SimilarDomainsService {
|
||||
domainNames = new String[domainIdToIdx.size()];
|
||||
domainNeighbors = new TIntList[domainIdToIdx.size()];
|
||||
screenshotDomains = new RoaringBitmap();
|
||||
feedDomains = new RoaringBitmap();
|
||||
activeDomains = new RoaringBitmap();
|
||||
indexedDomains = new RoaringBitmap();
|
||||
relatedDomains = new Int2DoubleArrayMap[domainIdToIdx.size()];
|
||||
@@ -145,10 +152,12 @@ public class SimilarDomainsService {
|
||||
activeDomains.add(idx);
|
||||
}
|
||||
|
||||
updateScreenshotInfo();
|
||||
|
||||
logger.info("Loaded {} domains", domainRanks.size());
|
||||
isReady = true;
|
||||
|
||||
// We can defer these as they only populate a roaringbitmap, and will degrade gracefully when not complete
|
||||
updateScreenshotInfo();
|
||||
updateFeedInfo();
|
||||
}
|
||||
}
|
||||
catch (SQLException throwables) {
|
||||
@@ -156,6 +165,42 @@ public class SimilarDomainsService {
|
||||
}
|
||||
}
|
||||
|
||||
private void updateFeedInfo() {
|
||||
Set<String> feedsDomainNames = new HashSet<>(500_000);
|
||||
Path readerDbPath = WmsaHome.getDataPath().resolve("rss-feeds.db").toAbsolutePath();
|
||||
String dbUrl = "jdbc:sqlite:" + readerDbPath;
|
||||
|
||||
logger.info("Opening feed db at " + dbUrl);
|
||||
|
||||
try (var conn = DriverManager.getConnection(dbUrl);
|
||||
var stmt = conn.createStatement()) {
|
||||
var rs = stmt.executeQuery("""
|
||||
select
|
||||
json_extract(feed, '$.domain') as domain
|
||||
from feed
|
||||
where json_array_length(feed, '$.items') > 0
|
||||
""");
|
||||
while (rs.next()) {
|
||||
feedsDomainNames.add(rs.getString(1));
|
||||
}
|
||||
}
|
||||
catch (SQLException ex) {
|
||||
logger.error("Failed to read RSS feed items", ex);
|
||||
}
|
||||
|
||||
for (int idx = 0; idx < domainNames.length; idx++) {
|
||||
String name = domainNames[idx];
|
||||
if (name == null) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (feedsDomainNames.contains(name)) {
|
||||
feedDomains.add(idx);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private void updateScreenshotInfo() {
|
||||
try (var connection = dataSource.getConnection()) {
|
||||
try (var stmt = connection.createStatement()) {
|
||||
@@ -254,6 +299,7 @@ public class SimilarDomainsService {
|
||||
.setIndexed(indexedDomains.contains(idx))
|
||||
.setActive(activeDomains.contains(idx))
|
||||
.setScreenshot(screenshotDomains.contains(idx))
|
||||
.setFeed(feedDomains.contains(idx))
|
||||
.setLinkType(RpcSimilarDomain.LINK_TYPE.valueOf(linkType.name()))
|
||||
.build());
|
||||
|
||||
@@ -369,6 +415,7 @@ public class SimilarDomainsService {
|
||||
.setIndexed(indexedDomains.contains(idx))
|
||||
.setActive(activeDomains.contains(idx))
|
||||
.setScreenshot(screenshotDomains.contains(idx))
|
||||
.setFeed(feedDomains.contains(idx))
|
||||
.setLinkType(RpcSimilarDomain.LINK_TYPE.valueOf(linkType.name()))
|
||||
.build());
|
||||
|
||||
|
@@ -5,6 +5,7 @@ import com.google.inject.Singleton;
|
||||
import nu.marginalia.api.livecapture.LiveCaptureApiGrpc.LiveCaptureApiBlockingStub;
|
||||
import nu.marginalia.service.client.GrpcChannelPoolFactory;
|
||||
import nu.marginalia.service.client.GrpcSingleNodeChannelPool;
|
||||
import nu.marginalia.service.client.ServiceNotAvailableException;
|
||||
import nu.marginalia.service.discovery.property.ServiceKey;
|
||||
import nu.marginalia.service.discovery.property.ServicePartition;
|
||||
import org.slf4j.Logger;
|
||||
@@ -29,6 +30,9 @@ public class LiveCaptureClient {
|
||||
channelPool.call(LiveCaptureApiBlockingStub::requestScreengrab)
|
||||
.run(RpcDomainId.newBuilder().setDomainId(domainId).build());
|
||||
}
|
||||
catch (ServiceNotAvailableException e) {
|
||||
logger.info("requestScreengrab() failed since the service is not available");
|
||||
}
|
||||
catch (Exception e) {
|
||||
logger.error("API Exception", e);
|
||||
}
|
||||
|
@@ -27,8 +27,9 @@ dependencies {
|
||||
implementation project(':code:processes:crawling-process:ft-content-type')
|
||||
|
||||
implementation libs.jsoup
|
||||
implementation libs.rssreader
|
||||
implementation project(':third-party:rssreader')
|
||||
implementation libs.opencsv
|
||||
implementation libs.slop
|
||||
implementation libs.sqlite
|
||||
implementation libs.bundles.slf4j
|
||||
implementation libs.commons.lang3
|
||||
|
@@ -15,7 +15,9 @@ import java.util.Map;
|
||||
|
||||
/** Client for local browserless.io API */
|
||||
public class BrowserlessClient implements AutoCloseable {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(BrowserlessClient.class);
|
||||
private static final String BROWSERLESS_TOKEN = System.getProperty("live-capture.browserless-token", "BROWSERLESS_TOKEN");
|
||||
|
||||
private final HttpClient httpClient = HttpClient.newBuilder()
|
||||
.version(HttpClient.Version.HTTP_1_1)
|
||||
@@ -36,7 +38,7 @@ public class BrowserlessClient implements AutoCloseable {
|
||||
);
|
||||
|
||||
var request = HttpRequest.newBuilder()
|
||||
.uri(browserlessURI.resolve("/content"))
|
||||
.uri(browserlessURI.resolve("/content?token="+BROWSERLESS_TOKEN))
|
||||
.method("POST", HttpRequest.BodyPublishers.ofString(
|
||||
gson.toJson(requestData)
|
||||
))
|
||||
@@ -63,7 +65,7 @@ public class BrowserlessClient implements AutoCloseable {
|
||||
);
|
||||
|
||||
var request = HttpRequest.newBuilder()
|
||||
.uri(browserlessURI.resolve("/screenshot"))
|
||||
.uri(browserlessURI.resolve("/screenshot?token="+BROWSERLESS_TOKEN))
|
||||
.method("POST", HttpRequest.BodyPublishers.ofString(
|
||||
gson.toJson(requestData)
|
||||
))
|
||||
|
@@ -1,6 +1,6 @@
|
||||
package nu.marginalia.rss.model;
|
||||
|
||||
import com.apptasticsoftware.rssreader.Item;
|
||||
import nu.marginalia.rss.svc.SimpleFeedParser;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.jsoup.Jsoup;
|
||||
@@ -18,37 +18,33 @@ public record FeedItem(String title,
|
||||
public static final int MAX_DESC_LENGTH = 255;
|
||||
public static final DateTimeFormatter DATE_FORMAT = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSSZ");
|
||||
|
||||
public static FeedItem fromItem(Item item, boolean keepFragment) {
|
||||
String title = item.getTitle().orElse("");
|
||||
public static FeedItem fromItem(SimpleFeedParser.ItemData item, boolean keepFragment) {
|
||||
String title = item.title();
|
||||
String date = getItemDate(item);
|
||||
String description = getItemDescription(item);
|
||||
String url;
|
||||
|
||||
if (keepFragment || item.getLink().isEmpty()) {
|
||||
url = item.getLink().orElse("");
|
||||
if (keepFragment) {
|
||||
url = item.url();
|
||||
}
|
||||
else {
|
||||
try {
|
||||
String link = item.getLink().get();
|
||||
String link = item.url();
|
||||
var linkUri = new URI(link);
|
||||
var cleanUri = new URI(linkUri.getScheme(), linkUri.getAuthority(), linkUri.getPath(), linkUri.getQuery(), null);
|
||||
url = cleanUri.toString();
|
||||
}
|
||||
catch (Exception e) {
|
||||
// fallback to original link if we can't clean it, this is not a very important step
|
||||
url = item.getLink().get();
|
||||
url = item.url();
|
||||
}
|
||||
}
|
||||
|
||||
return new FeedItem(title, date, description, url);
|
||||
}
|
||||
|
||||
private static String getItemDescription(Item item) {
|
||||
Optional<String> description = item.getDescription();
|
||||
if (description.isEmpty())
|
||||
return "";
|
||||
|
||||
String rawDescription = description.get();
|
||||
private static String getItemDescription(SimpleFeedParser.ItemData item) {
|
||||
String rawDescription = item.description();
|
||||
if (rawDescription.indexOf('<') >= 0) {
|
||||
rawDescription = Jsoup.parseBodyFragment(rawDescription).text();
|
||||
}
|
||||
@@ -58,15 +54,18 @@ public record FeedItem(String title,
|
||||
|
||||
// e.g. http://fabiensanglard.net/rss.xml does dates like this: 1 Apr 2021 00:00:00 +0000
|
||||
private static final DateTimeFormatter extraFormatter = DateTimeFormatter.ofPattern("d MMM yyyy HH:mm:ss Z");
|
||||
private static String getItemDate(Item item) {
|
||||
private static String getItemDate(SimpleFeedParser.ItemData item) {
|
||||
Optional<ZonedDateTime> zonedDateTime = Optional.empty();
|
||||
try {
|
||||
zonedDateTime = item.getPubDateZonedDateTime();
|
||||
}
|
||||
catch (Exception e) {
|
||||
zonedDateTime = item.getPubDate()
|
||||
.map(extraFormatter::parse)
|
||||
.map(ZonedDateTime::from);
|
||||
try {
|
||||
zonedDateTime = Optional.of(ZonedDateTime.from(extraFormatter.parse(item.pubDate())));
|
||||
}
|
||||
catch (Exception e2) {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
|
||||
return zonedDateTime.map(date -> date.format(DATE_FORMAT)).orElse("");
|
||||
|
@@ -1,7 +1,5 @@
|
||||
package nu.marginalia.rss.svc;
|
||||
|
||||
import com.apptasticsoftware.rssreader.Item;
|
||||
import com.apptasticsoftware.rssreader.RssReader;
|
||||
import com.google.inject.Inject;
|
||||
import com.opencsv.CSVReader;
|
||||
import nu.marginalia.WmsaHome;
|
||||
@@ -20,7 +18,6 @@ import nu.marginalia.storage.FileStorageService;
|
||||
import nu.marginalia.storage.model.FileStorage;
|
||||
import nu.marginalia.storage.model.FileStorageType;
|
||||
import nu.marginalia.util.SimpleBlockingThreadPool;
|
||||
import org.apache.commons.io.input.BOMInputStream;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@@ -32,7 +29,6 @@ import java.net.URISyntaxException;
|
||||
import java.net.http.HttpClient;
|
||||
import java.net.http.HttpRequest;
|
||||
import java.net.http.HttpResponse;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.sql.SQLException;
|
||||
import java.time.*;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
@@ -48,8 +44,6 @@ public class FeedFetcherService {
|
||||
private static final int MAX_FEED_ITEMS = 10;
|
||||
private static final Logger logger = LoggerFactory.getLogger(FeedFetcherService.class);
|
||||
|
||||
private final RssReader rssReader = new RssReader();
|
||||
|
||||
private final FeedDb feedDb;
|
||||
private final FileStorageService fileStorageService;
|
||||
private final NodeConfigurationService nodeConfigurationService;
|
||||
@@ -72,17 +66,6 @@ public class FeedFetcherService {
|
||||
this.nodeConfigurationService = nodeConfigurationService;
|
||||
this.serviceHeartbeat = serviceHeartbeat;
|
||||
this.executorClient = executorClient;
|
||||
|
||||
|
||||
// Add support for some alternate date tags for atom
|
||||
rssReader.addItemExtension("issued", this::setDateFallback);
|
||||
rssReader.addItemExtension("created", this::setDateFallback);
|
||||
}
|
||||
|
||||
private void setDateFallback(Item item, String value) {
|
||||
if (item.getPubDate().isEmpty()) {
|
||||
item.setPubDate(value);
|
||||
}
|
||||
}
|
||||
|
||||
public enum UpdateMode {
|
||||
@@ -96,6 +79,7 @@ public class FeedFetcherService {
|
||||
throw new IllegalStateException("Already updating feeds, refusing to start another update");
|
||||
}
|
||||
|
||||
|
||||
try (FeedDbWriter writer = feedDb.createWriter();
|
||||
HttpClient client = HttpClient.newBuilder()
|
||||
.connectTimeout(Duration.ofSeconds(15))
|
||||
@@ -103,6 +87,7 @@ public class FeedFetcherService {
|
||||
.followRedirects(HttpClient.Redirect.NORMAL)
|
||||
.version(HttpClient.Version.HTTP_2)
|
||||
.build();
|
||||
FeedJournal feedJournal = FeedJournal.create();
|
||||
var heartbeat = serviceHeartbeat.createServiceAdHocTaskHeartbeat("Update Rss Feeds")
|
||||
) {
|
||||
updating = true;
|
||||
@@ -155,6 +140,8 @@ public class FeedFetcherService {
|
||||
case FetchResult.Success(String value, String etag) -> {
|
||||
writer.saveEtag(feed.domain(), etag);
|
||||
writer.saveFeed(parseFeed(value, feed));
|
||||
|
||||
feedJournal.record(feed.feedUrl(), value);
|
||||
}
|
||||
case FetchResult.NotModified() -> {
|
||||
writer.saveEtag(feed.domain(), ifNoneMatchTag);
|
||||
@@ -367,12 +354,7 @@ public class FeedFetcherService {
|
||||
|
||||
public FeedItems parseFeed(String feedData, FeedDefinition definition) {
|
||||
try {
|
||||
feedData = sanitizeEntities(feedData);
|
||||
|
||||
List<Item> rawItems = rssReader.read(
|
||||
// Massage the data to maximize the possibility of the flaky XML parser consuming it
|
||||
new BOMInputStream(new ByteArrayInputStream(feedData.trim().getBytes(StandardCharsets.UTF_8)), false)
|
||||
).toList();
|
||||
List<SimpleFeedParser.ItemData> rawItems = SimpleFeedParser.parse(feedData);
|
||||
|
||||
boolean keepUriFragment = rawItems.size() < 2 || areFragmentsDisparate(rawItems);
|
||||
|
||||
@@ -395,32 +377,6 @@ public class FeedFetcherService {
|
||||
}
|
||||
}
|
||||
|
||||
private static final Map<String, String> HTML_ENTITIES = Map.of(
|
||||
"»", "»",
|
||||
"«", "«",
|
||||
"—", "--",
|
||||
"–", "-",
|
||||
"’", "'",
|
||||
"‘", "'",
|
||||
" ", ""
|
||||
);
|
||||
|
||||
/** The XML parser will blow up if you insert HTML entities in the feed XML,
|
||||
* which is unfortunately relatively common. Replace them as far as is possible
|
||||
* with their corresponding characters
|
||||
*/
|
||||
static String sanitizeEntities(String feedData) {
|
||||
String result = feedData;
|
||||
for (Map.Entry<String, String> entry : HTML_ENTITIES.entrySet()) {
|
||||
result = result.replace(entry.getKey(), entry.getValue());
|
||||
}
|
||||
|
||||
// Handle lone ampersands not part of a recognized XML entity
|
||||
result = result.replaceAll("&(?!(amp|lt|gt|apos|quot);)", "&");
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/** Decide whether to keep URI fragments in the feed items.
|
||||
* <p></p>
|
||||
* We keep fragments if there are multiple different fragments in the items.
|
||||
@@ -428,16 +384,16 @@ public class FeedFetcherService {
|
||||
* @param items The items to check
|
||||
* @return True if we should keep the fragments, false otherwise
|
||||
*/
|
||||
private boolean areFragmentsDisparate(List<Item> items) {
|
||||
private boolean areFragmentsDisparate(List<SimpleFeedParser.ItemData> items) {
|
||||
Set<String> seenFragments = new HashSet<>();
|
||||
|
||||
try {
|
||||
for (var item : items) {
|
||||
if (item.getLink().isEmpty()) {
|
||||
if (item.url().isBlank()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
var link = item.getLink().get();
|
||||
var link = item.url();
|
||||
if (!link.contains("#")) {
|
||||
continue;
|
||||
}
|
||||
|
@@ -0,0 +1,76 @@
|
||||
package nu.marginalia.rss.svc;
|
||||
|
||||
import nu.marginalia.WmsaHome;
|
||||
import nu.marginalia.slop.SlopTable;
|
||||
import nu.marginalia.slop.column.string.StringColumn;
|
||||
import nu.marginalia.slop.desc.StorageType;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.function.BiConsumer;
|
||||
|
||||
/** Utility for recording fetched feeds to a journal, useful in debugging feed parser issues.
|
||||
*/
|
||||
public interface FeedJournal extends AutoCloseable {
|
||||
StringColumn urlColumn = new StringColumn("url");
|
||||
StringColumn contentsColumn = new StringColumn("contents", StandardCharsets.UTF_8, StorageType.ZSTD);
|
||||
|
||||
void record(String url, String contents) throws IOException;
|
||||
void close() throws IOException;
|
||||
|
||||
|
||||
static FeedJournal create() throws IOException {
|
||||
if (Boolean.getBoolean("feedFetcher.persistJournal")) {
|
||||
Path journalPath = WmsaHome.getDataPath().resolve("feed-journal");
|
||||
if (Files.isDirectory(journalPath)) {
|
||||
FileUtils.deleteDirectory(journalPath.toFile());
|
||||
}
|
||||
Files.createDirectories(journalPath);
|
||||
return new RecordingFeedJournal(journalPath);
|
||||
}
|
||||
else {
|
||||
return new NoOpFeedJournal();
|
||||
}
|
||||
}
|
||||
|
||||
class NoOpFeedJournal implements FeedJournal {
|
||||
@Override
|
||||
public void record(String url, String contents) {}
|
||||
|
||||
@Override
|
||||
public void close() {}
|
||||
}
|
||||
|
||||
class RecordingFeedJournal extends SlopTable implements FeedJournal {
|
||||
|
||||
private final StringColumn.Writer urlWriter;
|
||||
private final StringColumn.Writer contentsWriter;
|
||||
|
||||
public RecordingFeedJournal(Path path) throws IOException {
|
||||
super(path, SlopTable.getNumPages(path, FeedJournal.urlColumn));
|
||||
|
||||
urlWriter = urlColumn.create(this);
|
||||
contentsWriter = contentsColumn.create(this);
|
||||
}
|
||||
|
||||
public synchronized void record(String url, String contents) throws IOException {
|
||||
urlWriter.put(url);
|
||||
contentsWriter.put(contents);
|
||||
}
|
||||
}
|
||||
|
||||
static void replay(Path journalPath, BiConsumer<String, String> urlAndContent) throws IOException {
|
||||
try (SlopTable table = new SlopTable(journalPath)) {
|
||||
final StringColumn.Reader urlReader = urlColumn.open(table);
|
||||
final StringColumn.Reader contentsReader = contentsColumn.open(table);
|
||||
|
||||
while (urlReader.hasRemaining()) {
|
||||
urlAndContent.accept(urlReader.get(), contentsReader.get());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
@@ -0,0 +1,94 @@
|
||||
package nu.marginalia.rss.svc;
|
||||
|
||||
import com.apptasticsoftware.rssreader.DateTimeParser;
|
||||
import com.apptasticsoftware.rssreader.util.Default;
|
||||
import org.jsoup.Jsoup;
|
||||
import org.jsoup.parser.Parser;
|
||||
|
||||
import java.time.ZonedDateTime;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
public class SimpleFeedParser {
|
||||
|
||||
private static final DateTimeParser dateTimeParser = Default.getDateTimeParser();
|
||||
|
||||
public record ItemData (
|
||||
String title,
|
||||
String description,
|
||||
String url,
|
||||
String pubDate
|
||||
) {
|
||||
public boolean isWellFormed() {
|
||||
return title != null && !title.isBlank() &&
|
||||
description != null && !description.isBlank() &&
|
||||
url != null && !url.isBlank() &&
|
||||
pubDate != null && !pubDate.isBlank();
|
||||
}
|
||||
|
||||
public Optional<ZonedDateTime> getPubDateZonedDateTime() {
|
||||
try {
|
||||
return Optional.ofNullable(dateTimeParser.parse(pubDate()));
|
||||
}
|
||||
catch (Exception e) {
|
||||
return Optional.empty();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static List<ItemData> parse(String content) {
|
||||
var doc = Jsoup.parse(content, Parser.xmlParser());
|
||||
List<ItemData> ret = new ArrayList<>();
|
||||
|
||||
doc.select("item, entry").forEach(element -> {
|
||||
String link = "";
|
||||
String title = "";
|
||||
String description = "";
|
||||
String pubDate = "";
|
||||
|
||||
for (String attr : List.of("title", "dc:title")) {
|
||||
if (!title.isBlank())
|
||||
break;
|
||||
var tag = element.getElementsByTag(attr).first();
|
||||
if (tag != null) {
|
||||
title = tag.text();
|
||||
}
|
||||
}
|
||||
|
||||
for (String attr : List.of("title", "summary", "content", "description", "dc:description")) {
|
||||
if (!description.isBlank())
|
||||
break;
|
||||
var tag = element.getElementsByTag(attr).first();
|
||||
if (tag != null) {
|
||||
description = tag.text();
|
||||
}
|
||||
}
|
||||
|
||||
for (String attr : List.of("pubDate", "published", "updated", "issued", "created", "dc:date")) {
|
||||
if (!pubDate.isBlank())
|
||||
break;
|
||||
var tag = element.getElementsByTag(attr).first();
|
||||
if (tag != null) {
|
||||
pubDate = tag.text();
|
||||
}
|
||||
}
|
||||
|
||||
for (String attr : List.of("link", "url")) {
|
||||
if (!link.isBlank())
|
||||
break;
|
||||
var tag = element.getElementsByTag(attr).first();
|
||||
if (tag != null) {
|
||||
link = tag.text();
|
||||
}
|
||||
}
|
||||
|
||||
ret.add(new ItemData(title, description, link, pubDate));
|
||||
});
|
||||
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
}
|
27
code/functions/live-capture/test-resources/nlnet.atom
Normal file
27
code/functions/live-capture/test-resources/nlnet.atom
Normal file
@@ -0,0 +1,27 @@
|
||||
<feed xmlns="http://www.w3.org/2005/Atom" xml:base="https://nlnet.nl">
|
||||
<title type="text">NLnet news</title>
|
||||
<updated>2025-01-01T00:00:00Z</updated>
|
||||
<id>https://nlnet.nl/feed.atom</id>
|
||||
<link rel="self" type="application/atom+xml" href="https://nlnet.nl/feed.atom"/>
|
||||
<entry>
|
||||
<id>https://nlnet.nl/news/2025/20250101-announcing-grantees-June-call.html</id>
|
||||
<author>
|
||||
<name>NLnet</name>
|
||||
</author>
|
||||
<title type="xhtml">
|
||||
<div xmlns="http://www.w3.org/1999/xhtml">50 Free and Open Source Projects Selected for NGI Zero grants</div>
|
||||
</title>
|
||||
<link href="/news/2025/20250101-announcing-grantees-June-call.html"/>
|
||||
<updated>2025-01-01T00:00:00Z</updated>
|
||||
<content type="xhtml">
|
||||
<div xmlns="http://www.w3.org/1999/xhtml">
|
||||
<p class="paralead">Happy 2025 everyone! On this first day of the fresh new year we are happy to announce 50 project teams were selected to receive NGI Zero grants. We are welcoming projects from 18 countries involving people and organisations of various types: individuals, associations, small and medium enterprises, foundations, universities, and informal collectives. The new projects are all across the different layers of the NGI technology stack: from trustworthy open hardware to services & applications which provide autonomy for end-users.</p>
|
||||
<p>The 50 free and open source projects were selected across two funds. 19 teams will receive grants from the <a href="/commonsfund/">NGI Zero Commons Fund</a>, a broadly themed fund that supports people working on reclaiming the public nature of the internet. The other 31 projects will work within <a href="/core/">NGI Zero Core</a> which focuses on strengthening the open internet architecture. Both funds offer financial and practical support. The latter consisting of <a href="/NGI0/services/">support services</a> such as accessibility and security audits, advice on license compliance, help with testing, documentation or UX design.</p>
|
||||
<h2>If you applied for a grant</h2>
|
||||
<p>This is the selection for the <a href="https://nlnet.nl/news/2024/20240401-call.html">June call</a>. We always inform <em>all</em> applicants about the outcome of the review ahead of the public announcement, if the are selected or not. If you have not heard anything, you probably applied to a later call that is still under review. You can see which call you applied to by checking the application number assigned to the project when you applied. The second number in the sequence refers to the month of the call, so 06 in the case of the June call. (It should not happen, but if you did apply to the June call and did not hear anything, do contact us.)</p>
|
||||
<h2>Meet the new projects!</h2>
|
||||
</div>
|
||||
</content>
|
||||
</entry>
|
||||
|
||||
</feed>
|
@@ -2,16 +2,21 @@ package nu.marginalia.livecapture;
|
||||
|
||||
import org.junit.jupiter.api.Assertions;
|
||||
import org.junit.jupiter.api.BeforeAll;
|
||||
import org.junit.jupiter.api.Tag;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.testcontainers.containers.GenericContainer;
|
||||
import org.testcontainers.junit.jupiter.Testcontainers;
|
||||
import org.testcontainers.utility.DockerImageName;
|
||||
|
||||
import java.net.URI;
|
||||
import java.util.Map;
|
||||
|
||||
@Testcontainers
|
||||
@Tag("slow")
|
||||
public class BrowserlessClientTest {
|
||||
static GenericContainer<?> container = new GenericContainer<>(DockerImageName.parse("browserless/chrome")).withExposedPorts(3000);
|
||||
static GenericContainer<?> container = new GenericContainer<>(DockerImageName.parse("browserless/chrome"))
|
||||
.withEnv(Map.of("TOKEN", "BROWSERLESS_TOKEN"))
|
||||
.withExposedPorts(3000);
|
||||
|
||||
@BeforeAll
|
||||
public static void setup() {
|
||||
|
@@ -1,26 +0,0 @@
|
||||
package nu.marginalia.rss.svc;
|
||||
|
||||
import org.junit.jupiter.api.Assertions;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
public class TestXmlSanitization {
|
||||
|
||||
@Test
|
||||
public void testPreservedEntities() {
|
||||
Assertions.assertEquals("&", FeedFetcherService.sanitizeEntities("&"));
|
||||
Assertions.assertEquals("<", FeedFetcherService.sanitizeEntities("<"));
|
||||
Assertions.assertEquals(">", FeedFetcherService.sanitizeEntities(">"));
|
||||
Assertions.assertEquals(""", FeedFetcherService.sanitizeEntities("""));
|
||||
Assertions.assertEquals("'", FeedFetcherService.sanitizeEntities("'"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testStrayAmpersand() {
|
||||
Assertions.assertEquals("Bed & Breakfast", FeedFetcherService.sanitizeEntities("Bed & Breakfast"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTranslatedHtmlEntity() {
|
||||
Assertions.assertEquals("Foo -- Bar", FeedFetcherService.sanitizeEntities("Foo — Bar"));
|
||||
}
|
||||
}
|
@@ -7,4 +7,8 @@ public record DictionaryResponse(String word, List<DictionaryEntry> entries) {
|
||||
this.word = word;
|
||||
this.entries = entries.stream().toList(); // Make an immutable copy
|
||||
}
|
||||
|
||||
public boolean hasEntries() {
|
||||
return !entries.isEmpty();
|
||||
}
|
||||
}
|
||||
|
@@ -2,9 +2,6 @@ package nu.marginalia.api.searchquery;
|
||||
|
||||
import nu.marginalia.api.searchquery.model.query.SearchPhraseConstraint;
|
||||
import nu.marginalia.api.searchquery.model.query.SearchQuery;
|
||||
import nu.marginalia.api.searchquery.model.results.Bm25Parameters;
|
||||
import nu.marginalia.api.searchquery.model.results.ResultRankingParameters;
|
||||
import nu.marginalia.index.query.limit.QueryLimits;
|
||||
import nu.marginalia.index.query.limit.SpecificationLimit;
|
||||
import nu.marginalia.index.query.limit.SpecificationLimitType;
|
||||
|
||||
@@ -27,37 +24,19 @@ public class IndexProtobufCodec {
|
||||
.build();
|
||||
}
|
||||
|
||||
public static QueryLimits convertQueryLimits(RpcQueryLimits queryLimits) {
|
||||
return new QueryLimits(
|
||||
queryLimits.getResultsByDomain(),
|
||||
queryLimits.getResultsTotal(),
|
||||
queryLimits.getTimeoutMs(),
|
||||
queryLimits.getFetchSize()
|
||||
);
|
||||
}
|
||||
|
||||
public static RpcQueryLimits convertQueryLimits(QueryLimits queryLimits) {
|
||||
return RpcQueryLimits.newBuilder()
|
||||
.setResultsByDomain(queryLimits.resultsByDomain())
|
||||
.setResultsTotal(queryLimits.resultsTotal())
|
||||
.setTimeoutMs(queryLimits.timeoutMs())
|
||||
.setFetchSize(queryLimits.fetchSize())
|
||||
.build();
|
||||
}
|
||||
|
||||
public static SearchQuery convertRpcQuery(RpcQuery query) {
|
||||
List<SearchPhraseConstraint> phraeConstraints = new ArrayList<>();
|
||||
List<SearchPhraseConstraint> phraseConstraints = new ArrayList<>();
|
||||
|
||||
for (int j = 0; j < query.getPhrasesCount(); j++) {
|
||||
var coh = query.getPhrases(j);
|
||||
if (coh.getType() == RpcPhrases.TYPE.OPTIONAL) {
|
||||
phraeConstraints.add(new SearchPhraseConstraint.Optional(List.copyOf(coh.getTermsList())));
|
||||
phraseConstraints.add(new SearchPhraseConstraint.Optional(List.copyOf(coh.getTermsList())));
|
||||
}
|
||||
else if (coh.getType() == RpcPhrases.TYPE.MANDATORY) {
|
||||
phraeConstraints.add(new SearchPhraseConstraint.Mandatory(List.copyOf(coh.getTermsList())));
|
||||
phraseConstraints.add(new SearchPhraseConstraint.Mandatory(List.copyOf(coh.getTermsList())));
|
||||
}
|
||||
else if (coh.getType() == RpcPhrases.TYPE.FULL) {
|
||||
phraeConstraints.add(new SearchPhraseConstraint.Full(List.copyOf(coh.getTermsList())));
|
||||
phraseConstraints.add(new SearchPhraseConstraint.Full(List.copyOf(coh.getTermsList())));
|
||||
}
|
||||
else {
|
||||
throw new IllegalArgumentException("Unknown phrase constraint type: " + coh.getType());
|
||||
@@ -70,7 +49,7 @@ public class IndexProtobufCodec {
|
||||
query.getExcludeList(),
|
||||
query.getAdviceList(),
|
||||
query.getPriorityList(),
|
||||
phraeConstraints
|
||||
phraseConstraints
|
||||
);
|
||||
}
|
||||
|
||||
@@ -103,60 +82,4 @@ public class IndexProtobufCodec {
|
||||
return subqueryBuilder.build();
|
||||
}
|
||||
|
||||
public static ResultRankingParameters convertRankingParameterss(RpcResultRankingParameters params) {
|
||||
if (params == null)
|
||||
return ResultRankingParameters.sensibleDefaults();
|
||||
|
||||
return new ResultRankingParameters(
|
||||
new Bm25Parameters(params.getBm25K(), params.getBm25B()),
|
||||
params.getShortDocumentThreshold(),
|
||||
params.getShortDocumentPenalty(),
|
||||
params.getDomainRankBonus(),
|
||||
params.getQualityPenalty(),
|
||||
params.getShortSentenceThreshold(),
|
||||
params.getShortSentencePenalty(),
|
||||
params.getBm25Weight(),
|
||||
params.getTcfFirstPositionWeight(),
|
||||
params.getTcfVerbatimWeight(),
|
||||
params.getTcfProximityWeight(),
|
||||
ResultRankingParameters.TemporalBias.valueOf(params.getTemporalBias().getBias().name()),
|
||||
params.getTemporalBiasWeight(),
|
||||
params.getExportDebugData()
|
||||
);
|
||||
}
|
||||
|
||||
public static RpcResultRankingParameters convertRankingParameterss(ResultRankingParameters rankingParams,
|
||||
RpcTemporalBias temporalBias)
|
||||
{
|
||||
if (rankingParams == null) {
|
||||
rankingParams = ResultRankingParameters.sensibleDefaults();
|
||||
}
|
||||
|
||||
var builder = RpcResultRankingParameters.newBuilder()
|
||||
.setBm25B(rankingParams.bm25Params.b())
|
||||
.setBm25K(rankingParams.bm25Params.k())
|
||||
.setShortDocumentThreshold(rankingParams.shortDocumentThreshold)
|
||||
.setShortDocumentPenalty(rankingParams.shortDocumentPenalty)
|
||||
.setDomainRankBonus(rankingParams.domainRankBonus)
|
||||
.setQualityPenalty(rankingParams.qualityPenalty)
|
||||
.setShortSentenceThreshold(rankingParams.shortSentenceThreshold)
|
||||
.setShortSentencePenalty(rankingParams.shortSentencePenalty)
|
||||
.setBm25Weight(rankingParams.bm25Weight)
|
||||
.setTcfFirstPositionWeight(rankingParams.tcfFirstPosition)
|
||||
.setTcfProximityWeight(rankingParams.tcfProximity)
|
||||
.setTcfVerbatimWeight(rankingParams.tcfVerbatim)
|
||||
.setTemporalBiasWeight(rankingParams.temporalBiasWeight)
|
||||
.setExportDebugData(rankingParams.exportDebugData);
|
||||
|
||||
if (temporalBias != null && temporalBias.getBias() != RpcTemporalBias.Bias.NONE) {
|
||||
builder.setTemporalBias(temporalBias);
|
||||
}
|
||||
else {
|
||||
builder.setTemporalBias(RpcTemporalBias.newBuilder()
|
||||
.setBias(RpcTemporalBias.Bias.valueOf(rankingParams.temporalBias.name())));
|
||||
}
|
||||
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -9,10 +9,9 @@ import nu.marginalia.service.client.GrpcChannelPoolFactory;
|
||||
import nu.marginalia.service.client.GrpcSingleNodeChannelPool;
|
||||
import nu.marginalia.service.discovery.property.ServiceKey;
|
||||
import nu.marginalia.service.discovery.property.ServicePartition;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import javax.annotation.CheckReturnValue;
|
||||
import java.time.Duration;
|
||||
|
||||
@Singleton
|
||||
public class QueryClient {
|
||||
@@ -24,13 +23,14 @@ public class QueryClient {
|
||||
|
||||
private final GrpcSingleNodeChannelPool<QueryApiGrpc.QueryApiBlockingStub> queryApiPool;
|
||||
|
||||
private final Logger logger = LoggerFactory.getLogger(getClass());
|
||||
|
||||
@Inject
|
||||
public QueryClient(GrpcChannelPoolFactory channelPoolFactory) {
|
||||
public QueryClient(GrpcChannelPoolFactory channelPoolFactory) throws InterruptedException {
|
||||
this.queryApiPool = channelPoolFactory.createSingle(
|
||||
ServiceKey.forGrpcApi(QueryApiGrpc.class, ServicePartition.any()),
|
||||
QueryApiGrpc::newBlockingStub);
|
||||
|
||||
// Hold up initialization until we have a downstream connection
|
||||
this.queryApiPool.awaitChannel(Duration.ofSeconds(5));
|
||||
}
|
||||
|
||||
@CheckReturnValue
|
||||
|
@@ -5,7 +5,7 @@ import nu.marginalia.api.searchquery.model.query.QueryParams;
|
||||
import nu.marginalia.api.searchquery.model.query.QueryResponse;
|
||||
import nu.marginalia.api.searchquery.model.query.SearchSpecification;
|
||||
import nu.marginalia.api.searchquery.model.results.DecoratedSearchResultItem;
|
||||
import nu.marginalia.api.searchquery.model.results.ResultRankingParameters;
|
||||
import nu.marginalia.api.searchquery.model.results.PrototypeRankingParameters;
|
||||
import nu.marginalia.api.searchquery.model.results.SearchResultItem;
|
||||
import nu.marginalia.api.searchquery.model.results.SearchResultKeywordScore;
|
||||
import nu.marginalia.api.searchquery.model.results.debug.DebugFactor;
|
||||
@@ -37,7 +37,7 @@ public class QueryProtobufCodec {
|
||||
builder.setSize(IndexProtobufCodec.convertSpecLimit(query.specs.size));
|
||||
builder.setRank(IndexProtobufCodec.convertSpecLimit(query.specs.rank));
|
||||
|
||||
builder.setQueryLimits(IndexProtobufCodec.convertQueryLimits(query.specs.queryLimits));
|
||||
builder.setQueryLimits(query.specs.queryLimits);
|
||||
|
||||
// Query strategy may be overridden by the query, but if not, use the one from the request
|
||||
if (query.specs.queryStrategy != null && query.specs.queryStrategy != QueryStrategy.AUTO)
|
||||
@@ -45,9 +45,27 @@ public class QueryProtobufCodec {
|
||||
else
|
||||
builder.setQueryStrategy(request.getQueryStrategy());
|
||||
|
||||
if (query.specs.rankingParams != null) {
|
||||
builder.setParameters(IndexProtobufCodec.convertRankingParameterss(query.specs.rankingParams, request.getTemporalBias()));
|
||||
if (request.getTemporalBias().getBias() != RpcTemporalBias.Bias.NONE) {
|
||||
if (query.specs.rankingParams != null) {
|
||||
builder.setParameters(
|
||||
RpcResultRankingParameters.newBuilder(query.specs.rankingParams)
|
||||
.setTemporalBias(request.getTemporalBias())
|
||||
.build()
|
||||
);
|
||||
} else {
|
||||
builder.setParameters(
|
||||
RpcResultRankingParameters.newBuilder(PrototypeRankingParameters.sensibleDefaults())
|
||||
.setTemporalBias(request.getTemporalBias())
|
||||
.build()
|
||||
);
|
||||
}
|
||||
} else if (query.specs.rankingParams != null) {
|
||||
builder.setParameters(query.specs.rankingParams);
|
||||
}
|
||||
// else {
|
||||
// if we have no ranking params, we don't need to set them, the client check and use the default values
|
||||
// so we don't need to send this huge object over the wire
|
||||
// }
|
||||
|
||||
return builder.build();
|
||||
}
|
||||
@@ -65,18 +83,13 @@ public class QueryProtobufCodec {
|
||||
builder.setSize(IndexProtobufCodec.convertSpecLimit(query.specs.size));
|
||||
builder.setRank(IndexProtobufCodec.convertSpecLimit(query.specs.rank));
|
||||
|
||||
builder.setQueryLimits(IndexProtobufCodec.convertQueryLimits(query.specs.queryLimits));
|
||||
builder.setQueryLimits(query.specs.queryLimits);
|
||||
|
||||
// Query strategy may be overridden by the query, but if not, use the one from the request
|
||||
builder.setQueryStrategy(query.specs.queryStrategy.name());
|
||||
|
||||
if (query.specs.rankingParams != null) {
|
||||
builder.setParameters(IndexProtobufCodec.convertRankingParameterss(
|
||||
query.specs.rankingParams,
|
||||
RpcTemporalBias.newBuilder().setBias(
|
||||
RpcTemporalBias.Bias.NONE)
|
||||
.build())
|
||||
);
|
||||
builder.setParameters(query.specs.rankingParams);
|
||||
}
|
||||
|
||||
return builder.build();
|
||||
@@ -95,10 +108,10 @@ public class QueryProtobufCodec {
|
||||
IndexProtobufCodec.convertSpecLimit(request.getSize()),
|
||||
IndexProtobufCodec.convertSpecLimit(request.getRank()),
|
||||
request.getDomainIdsList(),
|
||||
IndexProtobufCodec.convertQueryLimits(request.getQueryLimits()),
|
||||
request.getQueryLimits(),
|
||||
request.getSearchSetIdentifier(),
|
||||
QueryStrategy.valueOf(request.getQueryStrategy()),
|
||||
ResultRankingParameters.TemporalBias.valueOf(request.getTemporalBias().getBias().name()),
|
||||
RpcTemporalBias.Bias.valueOf(request.getTemporalBias().getBias().name()),
|
||||
request.getPagination().getPage()
|
||||
);
|
||||
}
|
||||
@@ -294,9 +307,9 @@ public class QueryProtobufCodec {
|
||||
IndexProtobufCodec.convertSpecLimit(specs.getYear()),
|
||||
IndexProtobufCodec.convertSpecLimit(specs.getSize()),
|
||||
IndexProtobufCodec.convertSpecLimit(specs.getRank()),
|
||||
IndexProtobufCodec.convertQueryLimits(specs.getQueryLimits()),
|
||||
specs.getQueryLimits(),
|
||||
QueryStrategy.valueOf(specs.getQueryStrategy()),
|
||||
IndexProtobufCodec.convertRankingParameterss(specs.getParameters())
|
||||
specs.hasParameters() ? specs.getParameters() : null
|
||||
);
|
||||
}
|
||||
|
||||
@@ -307,7 +320,7 @@ public class QueryProtobufCodec {
|
||||
.addAllTacitExcludes(params.tacitExcludes())
|
||||
.addAllTacitPriority(params.tacitPriority())
|
||||
.setHumanQuery(params.humanQuery())
|
||||
.setQueryLimits(IndexProtobufCodec.convertQueryLimits(params.limits()))
|
||||
.setQueryLimits(params.limits())
|
||||
.setQuality(IndexProtobufCodec.convertSpecLimit(params.quality()))
|
||||
.setYear(IndexProtobufCodec.convertSpecLimit(params.year()))
|
||||
.setSize(IndexProtobufCodec.convertSpecLimit(params.size()))
|
||||
@@ -319,7 +332,7 @@ public class QueryProtobufCodec {
|
||||
.build())
|
||||
.setPagination(RpcQsQueryPagination.newBuilder()
|
||||
.setPage(params.page())
|
||||
.setPageSize(Math.min(100, params.limits().resultsTotal()))
|
||||
.setPageSize(Math.min(100, params.limits().getResultsTotal()))
|
||||
.build());
|
||||
|
||||
if (params.nearDomain() != null)
|
||||
|
@@ -1,7 +1,7 @@
|
||||
package nu.marginalia.api.searchquery.model.query;
|
||||
|
||||
import nu.marginalia.api.searchquery.model.results.ResultRankingParameters;
|
||||
import nu.marginalia.index.query.limit.QueryLimits;
|
||||
import nu.marginalia.api.searchquery.RpcQueryLimits;
|
||||
import nu.marginalia.api.searchquery.RpcTemporalBias;
|
||||
import nu.marginalia.index.query.limit.QueryStrategy;
|
||||
import nu.marginalia.index.query.limit.SpecificationLimit;
|
||||
|
||||
@@ -21,14 +21,14 @@ public record QueryParams(
|
||||
SpecificationLimit size,
|
||||
SpecificationLimit rank,
|
||||
List<Integer> domainIds,
|
||||
QueryLimits limits,
|
||||
RpcQueryLimits limits,
|
||||
String identifier,
|
||||
QueryStrategy queryStrategy,
|
||||
ResultRankingParameters.TemporalBias temporalBias,
|
||||
RpcTemporalBias.Bias temporalBias,
|
||||
int page
|
||||
)
|
||||
{
|
||||
public QueryParams(String query, QueryLimits limits, String identifier) {
|
||||
public QueryParams(String query, RpcQueryLimits limits, String identifier) {
|
||||
this(query, null,
|
||||
List.of(),
|
||||
List.of(),
|
||||
@@ -42,7 +42,7 @@ public record QueryParams(
|
||||
limits,
|
||||
identifier,
|
||||
QueryStrategy.AUTO,
|
||||
ResultRankingParameters.TemporalBias.NONE,
|
||||
RpcTemporalBias.Bias.NONE,
|
||||
1 // page
|
||||
);
|
||||
}
|
||||
|
@@ -1,10 +1,11 @@
|
||||
package nu.marginalia.api.searchquery.model.query;
|
||||
|
||||
import nu.marginalia.api.searchquery.model.results.ResultRankingParameters;
|
||||
import nu.marginalia.index.query.limit.QueryLimits;
|
||||
import nu.marginalia.api.searchquery.RpcQueryLimits;
|
||||
import nu.marginalia.api.searchquery.RpcResultRankingParameters;
|
||||
import nu.marginalia.index.query.limit.QueryStrategy;
|
||||
import nu.marginalia.index.query.limit.SpecificationLimit;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
import java.util.List;
|
||||
|
||||
public class SearchSpecification {
|
||||
@@ -24,11 +25,12 @@ public class SearchSpecification {
|
||||
public SpecificationLimit size;
|
||||
public SpecificationLimit rank;
|
||||
|
||||
public final QueryLimits queryLimits;
|
||||
public final RpcQueryLimits queryLimits;
|
||||
|
||||
public final QueryStrategy queryStrategy;
|
||||
|
||||
public final ResultRankingParameters rankingParams;
|
||||
@Nullable
|
||||
public final RpcResultRankingParameters rankingParams;
|
||||
|
||||
public SearchSpecification(SearchQuery query,
|
||||
List<Integer> domains,
|
||||
@@ -38,9 +40,9 @@ public class SearchSpecification {
|
||||
SpecificationLimit year,
|
||||
SpecificationLimit size,
|
||||
SpecificationLimit rank,
|
||||
QueryLimits queryLimits,
|
||||
RpcQueryLimits queryLimits,
|
||||
QueryStrategy queryStrategy,
|
||||
ResultRankingParameters rankingParams)
|
||||
@Nullable RpcResultRankingParameters rankingParams)
|
||||
{
|
||||
this.query = query;
|
||||
this.domains = domains;
|
||||
@@ -91,7 +93,7 @@ public class SearchSpecification {
|
||||
return this.rank;
|
||||
}
|
||||
|
||||
public QueryLimits getQueryLimits() {
|
||||
public RpcQueryLimits getQueryLimits() {
|
||||
return this.queryLimits;
|
||||
}
|
||||
|
||||
@@ -99,7 +101,7 @@ public class SearchSpecification {
|
||||
return this.queryStrategy;
|
||||
}
|
||||
|
||||
public ResultRankingParameters getRankingParams() {
|
||||
public RpcResultRankingParameters getRankingParams() {
|
||||
return this.rankingParams;
|
||||
}
|
||||
|
||||
@@ -120,9 +122,9 @@ public class SearchSpecification {
|
||||
private boolean size$set;
|
||||
private SpecificationLimit rank$value;
|
||||
private boolean rank$set;
|
||||
private QueryLimits queryLimits;
|
||||
private RpcQueryLimits queryLimits;
|
||||
private QueryStrategy queryStrategy;
|
||||
private ResultRankingParameters rankingParams;
|
||||
private RpcResultRankingParameters rankingParams;
|
||||
|
||||
SearchSpecificationBuilder() {
|
||||
}
|
||||
@@ -171,7 +173,7 @@ public class SearchSpecification {
|
||||
return this;
|
||||
}
|
||||
|
||||
public SearchSpecificationBuilder queryLimits(QueryLimits queryLimits) {
|
||||
public SearchSpecificationBuilder queryLimits(RpcQueryLimits queryLimits) {
|
||||
this.queryLimits = queryLimits;
|
||||
return this;
|
||||
}
|
||||
@@ -181,7 +183,7 @@ public class SearchSpecification {
|
||||
return this;
|
||||
}
|
||||
|
||||
public SearchSpecificationBuilder rankingParams(ResultRankingParameters rankingParams) {
|
||||
public SearchSpecificationBuilder rankingParams(RpcResultRankingParameters rankingParams) {
|
||||
this.rankingParams = rankingParams;
|
||||
return this;
|
||||
}
|
||||
|
@@ -0,0 +1,33 @@
|
||||
package nu.marginalia.api.searchquery.model.results;
|
||||
|
||||
import nu.marginalia.api.searchquery.RpcResultRankingParameters;
|
||||
import nu.marginalia.api.searchquery.RpcTemporalBias;
|
||||
|
||||
public class PrototypeRankingParameters {
|
||||
|
||||
/** These are the default ranking parameters that are used when no parameters are specified. */
|
||||
|
||||
private static final RpcResultRankingParameters _sensibleDefaults = RpcResultRankingParameters.newBuilder()
|
||||
.setBm25B(0.5)
|
||||
.setBm25K(1.2)
|
||||
.setShortDocumentThreshold(2000)
|
||||
.setShortDocumentPenalty(2.)
|
||||
.setDomainRankBonus(1 / 100.)
|
||||
.setQualityPenalty(1 / 15.)
|
||||
.setShortSentenceThreshold(2)
|
||||
.setShortSentencePenalty(5)
|
||||
.setBm25Weight(1.)
|
||||
.setTcfVerbatimWeight(1.)
|
||||
.setTcfProximityWeight(1.)
|
||||
.setTcfFirstPositionWeight(5)
|
||||
.setTemporalBias(RpcTemporalBias.newBuilder().setBias(RpcTemporalBias.Bias.NONE))
|
||||
.setTemporalBiasWeight(5.0)
|
||||
.setExportDebugData(false)
|
||||
.setDisablePenalties(false)
|
||||
.build();
|
||||
|
||||
public static RpcResultRankingParameters sensibleDefaults() {
|
||||
return _sensibleDefaults;
|
||||
}
|
||||
|
||||
}
|
@@ -1,12 +1,13 @@
|
||||
package nu.marginalia.api.searchquery.model.results;
|
||||
|
||||
import nu.marginalia.api.searchquery.RpcResultRankingParameters;
|
||||
import nu.marginalia.api.searchquery.model.compiled.CqDataInt;
|
||||
|
||||
import java.util.BitSet;
|
||||
|
||||
public class ResultRankingContext {
|
||||
private final int docCount;
|
||||
public final ResultRankingParameters params;
|
||||
public final RpcResultRankingParameters params;
|
||||
|
||||
|
||||
public final BitSet regularMask;
|
||||
@@ -21,7 +22,7 @@ public class ResultRankingContext {
|
||||
public final CqDataInt priorityCounts;
|
||||
|
||||
public ResultRankingContext(int docCount,
|
||||
ResultRankingParameters params,
|
||||
RpcResultRankingParameters params,
|
||||
BitSet ngramsMask,
|
||||
BitSet regularMask,
|
||||
CqDataInt fullCounts,
|
||||
|
@@ -1,278 +0,0 @@
|
||||
package nu.marginalia.api.searchquery.model.results;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
public class ResultRankingParameters {
|
||||
|
||||
/**
|
||||
* Tuning for BM25 when applied to full document matches
|
||||
*/
|
||||
public final Bm25Parameters bm25Params;
|
||||
|
||||
/**
|
||||
* Documents below this length are penalized
|
||||
*/
|
||||
public int shortDocumentThreshold;
|
||||
|
||||
public double shortDocumentPenalty;
|
||||
|
||||
|
||||
/**
|
||||
* Scaling factor associated with domain rank (unscaled rank value is 0-255; high is good)
|
||||
*/
|
||||
public double domainRankBonus;
|
||||
|
||||
/**
|
||||
* Scaling factor associated with document quality (unscaled rank value is 0-15; high is bad)
|
||||
*/
|
||||
public double qualityPenalty;
|
||||
|
||||
/**
|
||||
* Average sentence length values below this threshold are penalized, range [0-4), 2 or 3 is probably what you want
|
||||
*/
|
||||
public int shortSentenceThreshold;
|
||||
|
||||
/**
|
||||
* Magnitude of penalty for documents with low average sentence length
|
||||
*/
|
||||
public double shortSentencePenalty;
|
||||
|
||||
public double bm25Weight;
|
||||
public double tcfFirstPosition;
|
||||
public double tcfVerbatim;
|
||||
public double tcfProximity;
|
||||
|
||||
public TemporalBias temporalBias;
|
||||
public double temporalBiasWeight;
|
||||
|
||||
public boolean exportDebugData;
|
||||
|
||||
public ResultRankingParameters(Bm25Parameters bm25Params, int shortDocumentThreshold, double shortDocumentPenalty, double domainRankBonus, double qualityPenalty, int shortSentenceThreshold, double shortSentencePenalty, double bm25Weight, double tcfFirstPosition, double tcfVerbatim, double tcfProximity, TemporalBias temporalBias, double temporalBiasWeight, boolean exportDebugData) {
|
||||
this.bm25Params = bm25Params;
|
||||
this.shortDocumentThreshold = shortDocumentThreshold;
|
||||
this.shortDocumentPenalty = shortDocumentPenalty;
|
||||
this.domainRankBonus = domainRankBonus;
|
||||
this.qualityPenalty = qualityPenalty;
|
||||
this.shortSentenceThreshold = shortSentenceThreshold;
|
||||
this.shortSentencePenalty = shortSentencePenalty;
|
||||
this.bm25Weight = bm25Weight;
|
||||
this.tcfFirstPosition = tcfFirstPosition;
|
||||
this.tcfVerbatim = tcfVerbatim;
|
||||
this.tcfProximity = tcfProximity;
|
||||
this.temporalBias = temporalBias;
|
||||
this.temporalBiasWeight = temporalBiasWeight;
|
||||
this.exportDebugData = exportDebugData;
|
||||
}
|
||||
|
||||
public static ResultRankingParameters sensibleDefaults() {
|
||||
return builder()
|
||||
.bm25Params(new Bm25Parameters(1.2, 0.5))
|
||||
.shortDocumentThreshold(2000)
|
||||
.shortDocumentPenalty(2.)
|
||||
.domainRankBonus(1 / 100.)
|
||||
.qualityPenalty(1 / 15.)
|
||||
.shortSentenceThreshold(2)
|
||||
.shortSentencePenalty(5)
|
||||
.bm25Weight(1.)
|
||||
.tcfVerbatim(1.)
|
||||
.tcfProximity(1.)
|
||||
.tcfFirstPosition(5)
|
||||
.temporalBias(TemporalBias.NONE)
|
||||
.temporalBiasWeight(5.0)
|
||||
.exportDebugData(false)
|
||||
.build();
|
||||
}
|
||||
|
||||
public static ResultRankingParametersBuilder builder() {
|
||||
return new ResultRankingParametersBuilder();
|
||||
}
|
||||
|
||||
public Bm25Parameters getBm25Params() {
|
||||
return this.bm25Params;
|
||||
}
|
||||
|
||||
public int getShortDocumentThreshold() {
|
||||
return this.shortDocumentThreshold;
|
||||
}
|
||||
|
||||
public double getShortDocumentPenalty() {
|
||||
return this.shortDocumentPenalty;
|
||||
}
|
||||
|
||||
public double getDomainRankBonus() {
|
||||
return this.domainRankBonus;
|
||||
}
|
||||
|
||||
public double getQualityPenalty() {
|
||||
return this.qualityPenalty;
|
||||
}
|
||||
|
||||
public int getShortSentenceThreshold() {
|
||||
return this.shortSentenceThreshold;
|
||||
}
|
||||
|
||||
public double getShortSentencePenalty() {
|
||||
return this.shortSentencePenalty;
|
||||
}
|
||||
|
||||
public double getBm25Weight() {
|
||||
return this.bm25Weight;
|
||||
}
|
||||
|
||||
public double getTcfFirstPosition() {
|
||||
return this.tcfFirstPosition;
|
||||
}
|
||||
|
||||
public double getTcfVerbatim() {
|
||||
return this.tcfVerbatim;
|
||||
}
|
||||
|
||||
public double getTcfProximity() {
|
||||
return this.tcfProximity;
|
||||
}
|
||||
|
||||
public TemporalBias getTemporalBias() {
|
||||
return this.temporalBias;
|
||||
}
|
||||
|
||||
public double getTemporalBiasWeight() {
|
||||
return this.temporalBiasWeight;
|
||||
}
|
||||
|
||||
public boolean isExportDebugData() {
|
||||
return this.exportDebugData;
|
||||
}
|
||||
|
||||
@Override
|
||||
public final boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (!(o instanceof ResultRankingParameters that)) return false;
|
||||
|
||||
return shortDocumentThreshold == that.shortDocumentThreshold && Double.compare(shortDocumentPenalty, that.shortDocumentPenalty) == 0 && Double.compare(domainRankBonus, that.domainRankBonus) == 0 && Double.compare(qualityPenalty, that.qualityPenalty) == 0 && shortSentenceThreshold == that.shortSentenceThreshold && Double.compare(shortSentencePenalty, that.shortSentencePenalty) == 0 && Double.compare(bm25Weight, that.bm25Weight) == 0 && Double.compare(tcfFirstPosition, that.tcfFirstPosition) == 0 && Double.compare(tcfVerbatim, that.tcfVerbatim) == 0 && Double.compare(tcfProximity, that.tcfProximity) == 0 && Double.compare(temporalBiasWeight, that.temporalBiasWeight) == 0 && exportDebugData == that.exportDebugData && Objects.equals(bm25Params, that.bm25Params) && temporalBias == that.temporalBias;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = Objects.hashCode(bm25Params);
|
||||
result = 31 * result + shortDocumentThreshold;
|
||||
result = 31 * result + Double.hashCode(shortDocumentPenalty);
|
||||
result = 31 * result + Double.hashCode(domainRankBonus);
|
||||
result = 31 * result + Double.hashCode(qualityPenalty);
|
||||
result = 31 * result + shortSentenceThreshold;
|
||||
result = 31 * result + Double.hashCode(shortSentencePenalty);
|
||||
result = 31 * result + Double.hashCode(bm25Weight);
|
||||
result = 31 * result + Double.hashCode(tcfFirstPosition);
|
||||
result = 31 * result + Double.hashCode(tcfVerbatim);
|
||||
result = 31 * result + Double.hashCode(tcfProximity);
|
||||
result = 31 * result + Objects.hashCode(temporalBias);
|
||||
result = 31 * result + Double.hashCode(temporalBiasWeight);
|
||||
result = 31 * result + Boolean.hashCode(exportDebugData);
|
||||
return result;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return "ResultRankingParameters(bm25Params=" + this.getBm25Params() + ", shortDocumentThreshold=" + this.getShortDocumentThreshold() + ", shortDocumentPenalty=" + this.getShortDocumentPenalty() + ", domainRankBonus=" + this.getDomainRankBonus() + ", qualityPenalty=" + this.getQualityPenalty() + ", shortSentenceThreshold=" + this.getShortSentenceThreshold() + ", shortSentencePenalty=" + this.getShortSentencePenalty() + ", bm25Weight=" + this.getBm25Weight() + ", tcfFirstPosition=" + this.getTcfFirstPosition() + ", tcfVerbatim=" + this.getTcfVerbatim() + ", tcfProximity=" + this.getTcfProximity() + ", temporalBias=" + this.getTemporalBias() + ", temporalBiasWeight=" + this.getTemporalBiasWeight() + ", exportDebugData=" + this.isExportDebugData() + ")";
|
||||
}
|
||||
|
||||
public enum TemporalBias {
|
||||
RECENT, OLD, NONE
|
||||
}
|
||||
|
||||
public static class ResultRankingParametersBuilder {
|
||||
private Bm25Parameters bm25Params;
|
||||
private int shortDocumentThreshold;
|
||||
private double shortDocumentPenalty;
|
||||
private double domainRankBonus;
|
||||
private double qualityPenalty;
|
||||
private int shortSentenceThreshold;
|
||||
private double shortSentencePenalty;
|
||||
private double bm25Weight;
|
||||
private double tcfFirstPosition;
|
||||
private double tcfVerbatim;
|
||||
private double tcfProximity;
|
||||
private TemporalBias temporalBias;
|
||||
private double temporalBiasWeight;
|
||||
private boolean exportDebugData;
|
||||
|
||||
ResultRankingParametersBuilder() {
|
||||
}
|
||||
|
||||
public ResultRankingParametersBuilder bm25Params(Bm25Parameters bm25Params) {
|
||||
this.bm25Params = bm25Params;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ResultRankingParametersBuilder shortDocumentThreshold(int shortDocumentThreshold) {
|
||||
this.shortDocumentThreshold = shortDocumentThreshold;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ResultRankingParametersBuilder shortDocumentPenalty(double shortDocumentPenalty) {
|
||||
this.shortDocumentPenalty = shortDocumentPenalty;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ResultRankingParametersBuilder domainRankBonus(double domainRankBonus) {
|
||||
this.domainRankBonus = domainRankBonus;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ResultRankingParametersBuilder qualityPenalty(double qualityPenalty) {
|
||||
this.qualityPenalty = qualityPenalty;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ResultRankingParametersBuilder shortSentenceThreshold(int shortSentenceThreshold) {
|
||||
this.shortSentenceThreshold = shortSentenceThreshold;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ResultRankingParametersBuilder shortSentencePenalty(double shortSentencePenalty) {
|
||||
this.shortSentencePenalty = shortSentencePenalty;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ResultRankingParametersBuilder bm25Weight(double bm25Weight) {
|
||||
this.bm25Weight = bm25Weight;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ResultRankingParametersBuilder tcfFirstPosition(double tcfFirstPosition) {
|
||||
this.tcfFirstPosition = tcfFirstPosition;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ResultRankingParametersBuilder tcfVerbatim(double tcfVerbatim) {
|
||||
this.tcfVerbatim = tcfVerbatim;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ResultRankingParametersBuilder tcfProximity(double tcfProximity) {
|
||||
this.tcfProximity = tcfProximity;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ResultRankingParametersBuilder temporalBias(TemporalBias temporalBias) {
|
||||
this.temporalBias = temporalBias;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ResultRankingParametersBuilder temporalBiasWeight(double temporalBiasWeight) {
|
||||
this.temporalBiasWeight = temporalBiasWeight;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ResultRankingParametersBuilder exportDebugData(boolean exportDebugData) {
|
||||
this.exportDebugData = exportDebugData;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ResultRankingParameters build() {
|
||||
return new ResultRankingParameters(this.bm25Params, this.shortDocumentThreshold, this.shortDocumentPenalty, this.domainRankBonus, this.qualityPenalty, this.shortSentenceThreshold, this.shortSentencePenalty, this.bm25Weight, this.tcfFirstPosition, this.tcfVerbatim, this.tcfProximity, this.temporalBias, this.temporalBiasWeight, this.exportDebugData);
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return "ResultRankingParameters.ResultRankingParametersBuilder(bm25Params=" + this.bm25Params + ", shortDocumentThreshold=" + this.shortDocumentThreshold + ", shortDocumentPenalty=" + this.shortDocumentPenalty + ", domainRankBonus=" + this.domainRankBonus + ", qualityPenalty=" + this.qualityPenalty + ", shortSentenceThreshold=" + this.shortSentenceThreshold + ", shortSentencePenalty=" + this.shortSentencePenalty + ", bm25Weight=" + this.bm25Weight + ", tcfFirstPosition=" + this.tcfFirstPosition + ", tcfVerbatim=" + this.tcfVerbatim + ", tcfProximity=" + this.tcfProximity + ", temporalBias=" + this.temporalBias + ", temporalBiasWeight=" + this.temporalBiasWeight + ", exportDebugData=" + this.exportDebugData + ")";
|
||||
}
|
||||
}
|
||||
}
|
@@ -162,6 +162,7 @@ message RpcResultRankingParameters {
|
||||
double temporalBiasWeight = 17;
|
||||
|
||||
bool exportDebugData = 18;
|
||||
bool disablePenalties = 19;
|
||||
|
||||
}
|
||||
|
||||
|
@@ -3,8 +3,6 @@ package nu.marginalia.index.client;
|
||||
import nu.marginalia.api.searchquery.IndexProtobufCodec;
|
||||
import nu.marginalia.api.searchquery.model.query.SearchPhraseConstraint;
|
||||
import nu.marginalia.api.searchquery.model.query.SearchQuery;
|
||||
import nu.marginalia.api.searchquery.model.results.ResultRankingParameters;
|
||||
import nu.marginalia.index.query.limit.QueryLimits;
|
||||
import nu.marginalia.index.query.limit.SpecificationLimit;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
@@ -22,18 +20,6 @@ class IndexProtobufCodecTest {
|
||||
verifyIsIdentityTransformation(SpecificationLimit.lessThan(1), l -> IndexProtobufCodec.convertSpecLimit(IndexProtobufCodec.convertSpecLimit(l)));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRankingParameters() {
|
||||
verifyIsIdentityTransformation(ResultRankingParameters.sensibleDefaults(),
|
||||
p -> IndexProtobufCodec.convertRankingParameterss(IndexProtobufCodec.convertRankingParameterss(p, null)));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testQueryLimits() {
|
||||
verifyIsIdentityTransformation(new QueryLimits(1,2,3,4),
|
||||
l -> IndexProtobufCodec.convertQueryLimits(IndexProtobufCodec.convertQueryLimits(l))
|
||||
);
|
||||
}
|
||||
@Test
|
||||
public void testSubqery() {
|
||||
verifyIsIdentityTransformation(new SearchQuery(
|
||||
|
@@ -2,8 +2,9 @@ package nu.marginalia.functions.searchquery;
|
||||
|
||||
import com.google.inject.Inject;
|
||||
import com.google.inject.Singleton;
|
||||
import nu.marginalia.api.searchquery.RpcQueryLimits;
|
||||
import nu.marginalia.api.searchquery.RpcResultRankingParameters;
|
||||
import nu.marginalia.api.searchquery.model.query.*;
|
||||
import nu.marginalia.api.searchquery.model.results.ResultRankingParameters;
|
||||
import nu.marginalia.functions.searchquery.query_parser.QueryExpansion;
|
||||
import nu.marginalia.functions.searchquery.query_parser.QueryParser;
|
||||
import nu.marginalia.functions.searchquery.query_parser.token.QueryToken;
|
||||
@@ -36,7 +37,7 @@ public class QueryFactory {
|
||||
|
||||
|
||||
public ProcessedQuery createQuery(QueryParams params,
|
||||
@Nullable ResultRankingParameters rankingParams) {
|
||||
@Nullable RpcResultRankingParameters rankingParams) {
|
||||
final var query = params.humanQuery();
|
||||
|
||||
if (query.length() > 1000) {
|
||||
@@ -71,6 +72,17 @@ public class QueryFactory {
|
||||
|
||||
String[] parts = StringUtils.split(str, '_');
|
||||
|
||||
// Trim down tokens to match the behavior of the tokenizer used in indexing
|
||||
for (int i = 0; i < parts.length; i++) {
|
||||
String part = parts[i];
|
||||
|
||||
if (part.endsWith("'s") && part.length() > 2) {
|
||||
part = part.substring(0, part.length()-2);
|
||||
}
|
||||
|
||||
parts[i] = part;
|
||||
}
|
||||
|
||||
if (parts.length > 1) {
|
||||
// Require that the terms appear in sequence
|
||||
queryBuilder.phraseConstraint(SearchPhraseConstraint.mandatory(parts));
|
||||
@@ -121,7 +133,9 @@ public class QueryFactory {
|
||||
var limits = params.limits();
|
||||
// Disable limits on number of results per domain if we're searching with a site:-type term
|
||||
if (domain != null) {
|
||||
limits = limits.forSingleDomain();
|
||||
limits = RpcQueryLimits.newBuilder(limits)
|
||||
.setResultsByDomain(limits.getResultsTotal())
|
||||
.build();
|
||||
}
|
||||
|
||||
var expansion = queryExpansion.expandQuery(queryBuilder.searchTermsInclude);
|
||||
|
@@ -9,7 +9,7 @@ import nu.marginalia.api.searchquery.*;
|
||||
import nu.marginalia.api.searchquery.model.query.ProcessedQuery;
|
||||
import nu.marginalia.api.searchquery.model.query.QueryParams;
|
||||
import nu.marginalia.api.searchquery.model.results.DecoratedSearchResultItem;
|
||||
import nu.marginalia.api.searchquery.model.results.ResultRankingParameters;
|
||||
import nu.marginalia.api.searchquery.model.results.PrototypeRankingParameters;
|
||||
import nu.marginalia.index.api.IndexClient;
|
||||
import nu.marginalia.service.server.DiscoverableService;
|
||||
import org.slf4j.Logger;
|
||||
@@ -55,7 +55,7 @@ public class QueryGRPCService
|
||||
.time(() -> {
|
||||
|
||||
var params = QueryProtobufCodec.convertRequest(request);
|
||||
var query = queryFactory.createQuery(params, ResultRankingParameters.sensibleDefaults());
|
||||
var query = queryFactory.createQuery(params, PrototypeRankingParameters.sensibleDefaults());
|
||||
|
||||
var indexRequest = QueryProtobufCodec.convertQuery(request, query);
|
||||
|
||||
@@ -102,7 +102,7 @@ public class QueryGRPCService
|
||||
String originalQuery,
|
||||
QueryParams params,
|
||||
IndexClient.Pagination pagination,
|
||||
ResultRankingParameters rankingParameters) {
|
||||
RpcResultRankingParameters rankingParameters) {
|
||||
|
||||
var query = queryFactory.createQuery(params, rankingParameters);
|
||||
IndexClient.AggregateQueryResponse response = indexClient.executeQueries(QueryProtobufCodec.convertQuery(originalQuery, query), pagination);
|
||||
|
@@ -25,6 +25,7 @@ public class QueryExpansion {
|
||||
this::joinDashes,
|
||||
this::splitWordNum,
|
||||
this::joinTerms,
|
||||
this::categoryKeywords,
|
||||
this::ngramAll
|
||||
);
|
||||
|
||||
@@ -98,6 +99,24 @@ public class QueryExpansion {
|
||||
}
|
||||
}
|
||||
|
||||
// Category keyword substitution, e.g. guitar wiki -> guitar generator:wiki
|
||||
public void categoryKeywords(QWordGraph graph) {
|
||||
|
||||
for (var qw : graph) {
|
||||
|
||||
// Ensure we only perform the substitution on the last word in the query
|
||||
if (!graph.getNextOriginal(qw).getFirst().isEnd()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
switch (qw.word()) {
|
||||
case "recipe", "recipes" -> graph.addVariant(qw, "category:food");
|
||||
case "forum" -> graph.addVariant(qw, "generator:forum");
|
||||
case "wiki" -> graph.addVariant(qw, "generator:wiki");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Turn 'lawn chair' into 'lawnchair'
|
||||
public void joinTerms(QWordGraph graph) {
|
||||
QWord prev = null;
|
||||
|
@@ -155,16 +155,25 @@ public class QueryParser {
|
||||
|
||||
// Remove trailing punctuation
|
||||
int lastChar = str.charAt(str.length() - 1);
|
||||
if (":.,!?$'".indexOf(lastChar) >= 0)
|
||||
entity.replace(new QueryToken.LiteralTerm(str.substring(0, str.length() - 1), lt.displayStr()));
|
||||
if (":.,!?$'".indexOf(lastChar) >= 0) {
|
||||
str = str.substring(0, str.length() - 1);
|
||||
entity.replace(new QueryToken.LiteralTerm(str, lt.displayStr()));
|
||||
}
|
||||
|
||||
// Remove term elements that aren't indexed by the search engine
|
||||
if (str.endsWith("'s"))
|
||||
entity.replace(new QueryToken.LiteralTerm(str.substring(0, str.length() - 2), lt.displayStr()));
|
||||
if (str.endsWith("()"))
|
||||
entity.replace(new QueryToken.LiteralTerm(str.substring(0, str.length() - 2), lt.displayStr()));
|
||||
if (str.startsWith("$"))
|
||||
entity.replace(new QueryToken.LiteralTerm(str.substring(1), lt.displayStr()));
|
||||
if (str.endsWith("'s")) {
|
||||
str = str.substring(0, str.length() - 2);
|
||||
entity.replace(new QueryToken.LiteralTerm(str, lt.displayStr()));
|
||||
}
|
||||
if (str.endsWith("()")) {
|
||||
str = str.substring(0, str.length() - 2);
|
||||
entity.replace(new QueryToken.LiteralTerm(str, lt.displayStr()));
|
||||
}
|
||||
|
||||
while (str.startsWith("$") || str.startsWith("_")) {
|
||||
str = str.substring(1);
|
||||
entity.replace(new QueryToken.LiteralTerm(str, lt.displayStr()));
|
||||
}
|
||||
|
||||
if (entity.isBlank()) {
|
||||
entity.remove();
|
||||
@@ -224,9 +233,19 @@ public class QueryParser {
|
||||
entity.replace(new QueryToken.RankTerm(limit, str));
|
||||
} else if (str.startsWith("qs=")) {
|
||||
entity.replace(new QueryToken.QsTerm(str.substring(3)));
|
||||
} else if (str.contains(":")) {
|
||||
} else if (str.startsWith("site:")
|
||||
|| str.startsWith("format:")
|
||||
|| str.startsWith("file:")
|
||||
|| str.startsWith("tld:")
|
||||
|| str.startsWith("ip:")
|
||||
|| str.startsWith("as:")
|
||||
|| str.startsWith("asn:")
|
||||
|| str.startsWith("generator:")
|
||||
)
|
||||
{
|
||||
entity.replace(new QueryToken.AdviceTerm(str, t.displayStr()));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static SpecificationLimit parseSpecificationLimit(String str) {
|
||||
|
@@ -1,165 +0,0 @@
|
||||
package nu.marginalia.util.language;
|
||||
|
||||
import com.google.inject.Inject;
|
||||
import nu.marginalia.term_frequency_dict.TermFrequencyDict;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.InputStreamReader;
|
||||
import java.util.*;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class EnglishDictionary {
|
||||
private final Set<String> englishWords = new HashSet<>();
|
||||
private final TermFrequencyDict tfDict;
|
||||
private final Logger logger = LoggerFactory.getLogger(getClass());
|
||||
|
||||
@Inject
|
||||
public EnglishDictionary(TermFrequencyDict tfDict) {
|
||||
this.tfDict = tfDict;
|
||||
try (var resource = Objects.requireNonNull(ClassLoader.getSystemResourceAsStream("dictionary/en-words"),
|
||||
"Could not load word frequency table");
|
||||
var br = new BufferedReader(new InputStreamReader(resource))
|
||||
) {
|
||||
for (;;) {
|
||||
String s = br.readLine();
|
||||
if (s == null) {
|
||||
break;
|
||||
}
|
||||
englishWords.add(s.toLowerCase());
|
||||
}
|
||||
}
|
||||
catch (Exception ex) {
|
||||
throw new RuntimeException(ex);
|
||||
}
|
||||
}
|
||||
|
||||
public boolean isWord(String word) {
|
||||
return englishWords.contains(word);
|
||||
}
|
||||
|
||||
private static final Pattern ingPattern = Pattern.compile(".*(\\w)\\1ing$");
|
||||
|
||||
public Collection<String> getWordVariants(String s) {
|
||||
var variants = findWordVariants(s);
|
||||
|
||||
var ret = variants.stream()
|
||||
.filter(var -> tfDict.getTermFreq(var) > 100)
|
||||
.collect(Collectors.toList());
|
||||
|
||||
if (s.equals("recipe") || s.equals("recipes")) {
|
||||
ret.add("category:food");
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
||||
public Collection<String> findWordVariants(String s) {
|
||||
int sl = s.length();
|
||||
|
||||
if (sl < 2) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
if (s.endsWith("s")) {
|
||||
String a = s.substring(0, sl-1);
|
||||
String b = s + "es";
|
||||
if (isWord(a) && isWord(b)) {
|
||||
return List.of(a, b);
|
||||
}
|
||||
else if (isWord(a)) {
|
||||
return List.of(a);
|
||||
}
|
||||
else if (isWord(b)) {
|
||||
return List.of(b);
|
||||
}
|
||||
}
|
||||
if (s.endsWith("sm")) {
|
||||
String a = s.substring(0, sl-1)+"t";
|
||||
String b = s.substring(0, sl-1)+"ts";
|
||||
if (isWord(a) && isWord(b)) {
|
||||
return List.of(a, b);
|
||||
}
|
||||
else if (isWord(a)) {
|
||||
return List.of(a);
|
||||
}
|
||||
else if (isWord(b)) {
|
||||
return List.of(b);
|
||||
}
|
||||
}
|
||||
if (s.endsWith("st")) {
|
||||
String a = s.substring(0, sl-1)+"m";
|
||||
String b = s + "s";
|
||||
if (isWord(a) && isWord(b)) {
|
||||
return List.of(a, b);
|
||||
}
|
||||
else if (isWord(a)) {
|
||||
return List.of(a);
|
||||
}
|
||||
else if (isWord(b)) {
|
||||
return List.of(b);
|
||||
}
|
||||
}
|
||||
else if (ingPattern.matcher(s).matches() && sl > 4) { // humming, clapping
|
||||
var a = s.substring(0, sl-4);
|
||||
var b = s.substring(0, sl-3) + "ed";
|
||||
|
||||
if (isWord(a) && isWord(b)) {
|
||||
return List.of(a, b);
|
||||
}
|
||||
else if (isWord(a)) {
|
||||
return List.of(a);
|
||||
}
|
||||
else if (isWord(b)) {
|
||||
return List.of(b);
|
||||
}
|
||||
}
|
||||
else {
|
||||
String a = s + "s";
|
||||
String b = ingForm(s);
|
||||
String c = s + "ed";
|
||||
|
||||
if (isWord(a) && isWord(b) && isWord(c)) {
|
||||
return List.of(a, b, c);
|
||||
}
|
||||
else if (isWord(a) && isWord(b)) {
|
||||
return List.of(a, b);
|
||||
}
|
||||
else if (isWord(b) && isWord(c)) {
|
||||
return List.of(b, c);
|
||||
}
|
||||
else if (isWord(a) && isWord(c)) {
|
||||
return List.of(a, c);
|
||||
}
|
||||
else if (isWord(a)) {
|
||||
return List.of(a);
|
||||
}
|
||||
else if (isWord(b)) {
|
||||
return List.of(b);
|
||||
}
|
||||
else if (isWord(c)) {
|
||||
return List.of(c);
|
||||
}
|
||||
}
|
||||
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
public String ingForm(String s) {
|
||||
if (s.endsWith("t") && !s.endsWith("tt")) {
|
||||
return s + "ting";
|
||||
}
|
||||
if (s.endsWith("n") && !s.endsWith("nn")) {
|
||||
return s + "ning";
|
||||
}
|
||||
if (s.endsWith("m") && !s.endsWith("mm")) {
|
||||
return s + "ming";
|
||||
}
|
||||
if (s.endsWith("r") && !s.endsWith("rr")) {
|
||||
return s + "ring";
|
||||
}
|
||||
return s + "ing";
|
||||
}
|
||||
}
|
@@ -0,0 +1,32 @@
|
||||
package nu.marginalia.functions.searchquery.query_parser;
|
||||
|
||||
import nu.marginalia.functions.searchquery.query_parser.token.QueryToken;
|
||||
import org.junit.jupiter.api.Assertions;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
class QueryParserTest {
|
||||
|
||||
@Test
|
||||
// https://github.com/MarginaliaSearch/MarginaliaSearch/issues/140
|
||||
void parse__builtin_ffs() {
|
||||
QueryParser parser = new QueryParser();
|
||||
var tokens = parser.parse("__builtin_ffs");
|
||||
Assertions.assertEquals(List.of(new QueryToken.LiteralTerm("builtin_ffs", "__builtin_ffs")), tokens);
|
||||
}
|
||||
|
||||
@Test
|
||||
void trailingParens() {
|
||||
QueryParser parser = new QueryParser();
|
||||
var tokens = parser.parse("strcpy()");
|
||||
Assertions.assertEquals(List.of(new QueryToken.LiteralTerm("strcpy", "strcpy()")), tokens);
|
||||
}
|
||||
|
||||
@Test
|
||||
void trailingQuote() {
|
||||
QueryParser parser = new QueryParser();
|
||||
var tokens = parser.parse("bob's");
|
||||
Assertions.assertEquals(List.of(new QueryToken.LiteralTerm("bob", "bob's")), tokens);
|
||||
}
|
||||
}
|
@@ -1,17 +1,18 @@
|
||||
package nu.marginalia.query.svc;
|
||||
|
||||
import nu.marginalia.WmsaHome;
|
||||
import nu.marginalia.api.searchquery.RpcQueryLimits;
|
||||
import nu.marginalia.api.searchquery.RpcTemporalBias;
|
||||
import nu.marginalia.api.searchquery.model.query.QueryParams;
|
||||
import nu.marginalia.api.searchquery.model.query.SearchSpecification;
|
||||
import nu.marginalia.api.searchquery.model.results.ResultRankingParameters;
|
||||
import nu.marginalia.functions.searchquery.QueryFactory;
|
||||
import nu.marginalia.functions.searchquery.query_parser.QueryExpansion;
|
||||
import nu.marginalia.index.query.limit.QueryLimits;
|
||||
import nu.marginalia.index.query.limit.QueryStrategy;
|
||||
import nu.marginalia.index.query.limit.SpecificationLimit;
|
||||
import nu.marginalia.index.query.limit.SpecificationLimitType;
|
||||
import nu.marginalia.segmentation.NgramLexicon;
|
||||
import nu.marginalia.term_frequency_dict.TermFrequencyDict;
|
||||
import org.junit.jupiter.api.Assertions;
|
||||
import org.junit.jupiter.api.BeforeAll;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
@@ -48,10 +49,15 @@ public class QueryFactoryTest {
|
||||
SpecificationLimit.none(),
|
||||
SpecificationLimit.none(),
|
||||
null,
|
||||
new QueryLimits(100, 100, 100, 100),
|
||||
RpcQueryLimits.newBuilder()
|
||||
.setResultsTotal(100)
|
||||
.setResultsByDomain(100)
|
||||
.setTimeoutMs(100)
|
||||
.setFetchSize(100)
|
||||
.build(),
|
||||
"NONE",
|
||||
QueryStrategy.AUTO,
|
||||
ResultRankingParameters.TemporalBias.NONE,
|
||||
RpcTemporalBias.Bias.NONE,
|
||||
0), null).specs;
|
||||
}
|
||||
|
||||
@@ -207,6 +213,34 @@ public class QueryFactoryTest {
|
||||
System.out.println(subquery);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCplusPlus() {
|
||||
var subquery = parseAndGetSpecs("std::vector::push_back vector");
|
||||
System.out.println(subquery);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testQuotedApostrophe() {
|
||||
var subquery = parseAndGetSpecs("\"bob's cars\"");
|
||||
|
||||
System.out.println(subquery);
|
||||
|
||||
Assertions.assertTrue(subquery.query.compiledQuery.contains(" bob "));
|
||||
Assertions.assertFalse(subquery.query.compiledQuery.contains(" bob's "));
|
||||
Assertions.assertEquals("\"bob's cars\"", subquery.humanQuery);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testExpansion9() {
|
||||
var subquery = parseAndGetSpecs("pie recipe");
|
||||
|
||||
Assertions.assertTrue(subquery.query.compiledQuery.contains(" category:food "));
|
||||
|
||||
subquery = parseAndGetSpecs("recipe pie");
|
||||
|
||||
Assertions.assertFalse(subquery.query.compiledQuery.contains(" category:food "));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testParsing() {
|
||||
var subquery = parseAndGetSpecs("strlen()");
|
||||
|
@@ -16,20 +16,19 @@ import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Comparator;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
|
||||
import static java.lang.Math.clamp;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
@Singleton
|
||||
public class IndexClient {
|
||||
private static final Logger logger = LoggerFactory.getLogger(IndexClient.class);
|
||||
private final GrpcMultiNodeChannelPool<IndexApiGrpc.IndexApiBlockingStub> channelPool;
|
||||
private final DomainBlacklistImpl blacklist;
|
||||
private static final ExecutorService executor = Executors.newVirtualThreadPerTaskExecutor();
|
||||
private static final ExecutorService executor = Executors.newCachedThreadPool();
|
||||
|
||||
@Inject
|
||||
public IndexClient(GrpcChannelPoolFactory channelPoolFactory, DomainBlacklistImpl blacklist) {
|
||||
@@ -51,40 +50,37 @@ public class IndexClient {
|
||||
|
||||
/** Execute a query on the index partitions and return the combined results. */
|
||||
public AggregateQueryResponse executeQueries(RpcIndexQuery indexRequest, Pagination pagination) {
|
||||
List<CompletableFuture<Iterator<RpcDecoratedResultItem>>> futures =
|
||||
channelPool.call(IndexApiGrpc.IndexApiBlockingStub::query)
|
||||
.async(executor)
|
||||
.runEach(indexRequest);
|
||||
|
||||
final int requestedMaxResults = indexRequest.getQueryLimits().getResultsTotal();
|
||||
final int resultsUpperBound = requestedMaxResults * channelPool.getNumNodes();
|
||||
|
||||
List<RpcDecoratedResultItem> results = new ArrayList<>(resultsUpperBound);
|
||||
AtomicInteger totalNumResults = new AtomicInteger(0);
|
||||
|
||||
for (var future : futures) {
|
||||
try {
|
||||
future.get().forEachRemaining(results::add);
|
||||
}
|
||||
catch (Exception e) {
|
||||
logger.error("Downstream exception", e);
|
||||
}
|
||||
}
|
||||
List<RpcDecoratedResultItem> results =
|
||||
channelPool.call(IndexApiGrpc.IndexApiBlockingStub::query)
|
||||
.async(executor)
|
||||
.runEach(indexRequest)
|
||||
.stream()
|
||||
.map(future -> future.thenApply(iterator -> {
|
||||
List<RpcDecoratedResultItem> ret = new ArrayList<>(requestedMaxResults);
|
||||
iterator.forEachRemaining(ret::add);
|
||||
totalNumResults.addAndGet(ret.size());
|
||||
return ret;
|
||||
}))
|
||||
.mapMulti((CompletableFuture<List<RpcDecoratedResultItem>> fut, Consumer<List<RpcDecoratedResultItem>> c) ->{
|
||||
try {
|
||||
c.accept(fut.join());
|
||||
} catch (Exception e) {
|
||||
logger.error("Error while fetching results", e);
|
||||
}
|
||||
})
|
||||
.flatMap(List::stream)
|
||||
.filter(item -> !isBlacklisted(item))
|
||||
.sorted(comparator)
|
||||
.skip(Math.max(0, (pagination.page - 1) * pagination.pageSize))
|
||||
.limit(pagination.pageSize)
|
||||
.toList();
|
||||
|
||||
// Sort the results by ranking score and remove blacklisted domains
|
||||
results.sort(comparator);
|
||||
results.removeIf(this::isBlacklisted);
|
||||
|
||||
int numReceivedResults = results.size();
|
||||
|
||||
// pagination is typically 1-indexed, so we need to adjust the start and end indices
|
||||
int indexStart = (pagination.page - 1) * pagination.pageSize;
|
||||
int indexEnd = (pagination.page) * pagination.pageSize;
|
||||
|
||||
results = results.subList(
|
||||
clamp(indexStart, 0, Math.max(0, results.size() - 1)), // from is inclusive, so subtract 1 from size()
|
||||
clamp(indexEnd, 0, results.size()));
|
||||
|
||||
return new AggregateQueryResponse(results, pagination.page(), numReceivedResults);
|
||||
return new AggregateQueryResponse(results, pagination.page(), totalNumResults.get());
|
||||
}
|
||||
|
||||
private boolean isBlacklisted(RpcDecoratedResultItem item) {
|
||||
|
@@ -10,12 +10,12 @@ import it.unimi.dsi.fastutil.longs.LongArrayList;
|
||||
import nu.marginalia.api.searchquery.IndexApiGrpc;
|
||||
import nu.marginalia.api.searchquery.RpcDecoratedResultItem;
|
||||
import nu.marginalia.api.searchquery.RpcIndexQuery;
|
||||
import nu.marginalia.api.searchquery.RpcResultRankingParameters;
|
||||
import nu.marginalia.api.searchquery.model.compiled.CompiledQuery;
|
||||
import nu.marginalia.api.searchquery.model.compiled.CompiledQueryLong;
|
||||
import nu.marginalia.api.searchquery.model.compiled.CqDataInt;
|
||||
import nu.marginalia.api.searchquery.model.query.SearchSpecification;
|
||||
import nu.marginalia.api.searchquery.model.results.ResultRankingContext;
|
||||
import nu.marginalia.api.searchquery.model.results.ResultRankingParameters;
|
||||
import nu.marginalia.array.page.LongQueryBuffer;
|
||||
import nu.marginalia.index.index.StatefulIndex;
|
||||
import nu.marginalia.index.model.SearchParameters;
|
||||
@@ -211,7 +211,7 @@ public class IndexGrpcService
|
||||
/** This class is responsible for ranking the results and adding the best results to the
|
||||
* resultHeap, which depending on the state of the indexLookup threads may or may not block
|
||||
*/
|
||||
private ResultRankingContext createRankingContext(ResultRankingParameters rankingParams,
|
||||
private ResultRankingContext createRankingContext(RpcResultRankingParameters rankingParams,
|
||||
CompiledQuery<String> compiledQuery,
|
||||
CompiledQueryLong compiledQueryIds)
|
||||
{
|
||||
|
@@ -2,12 +2,13 @@ package nu.marginalia.index.model;
|
||||
|
||||
import nu.marginalia.api.searchquery.IndexProtobufCodec;
|
||||
import nu.marginalia.api.searchquery.RpcIndexQuery;
|
||||
import nu.marginalia.api.searchquery.RpcResultRankingParameters;
|
||||
import nu.marginalia.api.searchquery.model.compiled.CompiledQuery;
|
||||
import nu.marginalia.api.searchquery.model.compiled.CompiledQueryLong;
|
||||
import nu.marginalia.api.searchquery.model.compiled.CompiledQueryParser;
|
||||
import nu.marginalia.api.searchquery.model.query.SearchSpecification;
|
||||
import nu.marginalia.api.searchquery.model.query.SearchQuery;
|
||||
import nu.marginalia.api.searchquery.model.results.ResultRankingParameters;
|
||||
import nu.marginalia.api.searchquery.model.query.SearchSpecification;
|
||||
import nu.marginalia.api.searchquery.model.results.PrototypeRankingParameters;
|
||||
import nu.marginalia.index.query.IndexSearchBudget;
|
||||
import nu.marginalia.index.query.limit.QueryStrategy;
|
||||
import nu.marginalia.index.searchset.SearchSet;
|
||||
@@ -23,7 +24,7 @@ public class SearchParameters {
|
||||
public final IndexSearchBudget budget;
|
||||
public final SearchQuery query;
|
||||
public final QueryParams queryParams;
|
||||
public final ResultRankingParameters rankingParams;
|
||||
public final RpcResultRankingParameters rankingParams;
|
||||
|
||||
public final int limitByDomain;
|
||||
public final int limitTotal;
|
||||
@@ -41,11 +42,11 @@ public class SearchParameters {
|
||||
public SearchParameters(SearchSpecification specsSet, SearchSet searchSet) {
|
||||
var limits = specsSet.queryLimits;
|
||||
|
||||
this.fetchSize = limits.fetchSize();
|
||||
this.budget = new IndexSearchBudget(limits.timeoutMs());
|
||||
this.fetchSize = limits.getFetchSize();
|
||||
this.budget = new IndexSearchBudget(limits.getTimeoutMs());
|
||||
this.query = specsSet.query;
|
||||
this.limitByDomain = limits.resultsByDomain();
|
||||
this.limitTotal = limits.resultsTotal();
|
||||
this.limitByDomain = limits.getResultsByDomain();
|
||||
this.limitTotal = limits.getResultsTotal();
|
||||
|
||||
queryParams = new QueryParams(
|
||||
specsSet.quality,
|
||||
@@ -62,17 +63,17 @@ public class SearchParameters {
|
||||
}
|
||||
|
||||
public SearchParameters(RpcIndexQuery request, SearchSet searchSet) {
|
||||
var limits = IndexProtobufCodec.convertQueryLimits(request.getQueryLimits());
|
||||
var limits = request.getQueryLimits();
|
||||
|
||||
this.fetchSize = limits.fetchSize();
|
||||
this.fetchSize = limits.getFetchSize();
|
||||
|
||||
// The time budget is halved because this is the point when we start to
|
||||
// wrap up the search and return the results.
|
||||
this.budget = new IndexSearchBudget(limits.timeoutMs() / 2);
|
||||
this.budget = new IndexSearchBudget(limits.getTimeoutMs() / 2);
|
||||
this.query = IndexProtobufCodec.convertRpcQuery(request.getQuery());
|
||||
|
||||
this.limitByDomain = limits.resultsByDomain();
|
||||
this.limitTotal = limits.resultsTotal();
|
||||
this.limitByDomain = limits.getResultsByDomain();
|
||||
this.limitTotal = limits.getResultsTotal();
|
||||
|
||||
queryParams = new QueryParams(
|
||||
convertSpecLimit(request.getQuality()),
|
||||
@@ -85,7 +86,7 @@ public class SearchParameters {
|
||||
compiledQuery = CompiledQueryParser.parse(this.query.compiledQuery);
|
||||
compiledQueryIds = compiledQuery.mapToLong(SearchTermsUtil::getWordId);
|
||||
|
||||
rankingParams = IndexProtobufCodec.convertRankingParameterss(request.getParameters());
|
||||
rankingParams = request.hasParameters() ? request.getParameters() : PrototypeRankingParameters.sensibleDefaults();
|
||||
}
|
||||
|
||||
|
||||
|
@@ -2,7 +2,6 @@ package nu.marginalia.index.results;
|
||||
|
||||
import nu.marginalia.api.searchquery.model.compiled.CqDataInt;
|
||||
import nu.marginalia.api.searchquery.model.compiled.CqExpression;
|
||||
import nu.marginalia.api.searchquery.model.results.Bm25Parameters;
|
||||
import nu.marginalia.api.searchquery.model.results.ResultRankingContext;
|
||||
|
||||
import java.util.BitSet;
|
||||
@@ -24,14 +23,14 @@ public class Bm25GraphVisitor implements CqExpression.DoubleVisitor {
|
||||
|
||||
private final BitSet mask;
|
||||
|
||||
public Bm25GraphVisitor(Bm25Parameters bm25Parameters,
|
||||
public Bm25GraphVisitor(double k1, double b,
|
||||
float[] counts,
|
||||
int length,
|
||||
ResultRankingContext ctx) {
|
||||
this.length = length;
|
||||
|
||||
this.k1 = bm25Parameters.k();
|
||||
this.b = bm25Parameters.b();
|
||||
this.k1 = k1;
|
||||
this.b = b;
|
||||
|
||||
this.docCount = ctx.termFreqDocCount();
|
||||
this.counts = counts;
|
||||
|
@@ -156,7 +156,7 @@ public class IndexResultRankingService {
|
||||
// for the selected results, as this would be comically expensive to do for all the results we
|
||||
// discard along the way
|
||||
|
||||
if (params.rankingParams.exportDebugData) {
|
||||
if (params.rankingParams.getExportDebugData()) {
|
||||
var combinedIdsList = new LongArrayList(resultsList.size());
|
||||
for (var item : resultsList) {
|
||||
combinedIdsList.add(item.combinedId);
|
||||
|
@@ -2,10 +2,11 @@ package nu.marginalia.index.results;
|
||||
|
||||
import it.unimi.dsi.fastutil.ints.IntIterator;
|
||||
import it.unimi.dsi.fastutil.ints.IntList;
|
||||
import nu.marginalia.api.searchquery.RpcResultRankingParameters;
|
||||
import nu.marginalia.api.searchquery.RpcTemporalBias;
|
||||
import nu.marginalia.api.searchquery.model.compiled.CompiledQuery;
|
||||
import nu.marginalia.api.searchquery.model.compiled.CompiledQueryLong;
|
||||
import nu.marginalia.api.searchquery.model.results.ResultRankingContext;
|
||||
import nu.marginalia.api.searchquery.model.results.ResultRankingParameters;
|
||||
import nu.marginalia.api.searchquery.model.results.SearchResultItem;
|
||||
import nu.marginalia.api.searchquery.model.results.debug.DebugRankingFactors;
|
||||
import nu.marginalia.index.forward.spans.DocumentSpans;
|
||||
@@ -116,14 +117,14 @@ public class IndexResultScoreCalculator {
|
||||
|
||||
float proximitiyFac = getProximitiyFac(decodedPositions, searchTerms.phraseConstraints, verbatimMatches, unorderedMatches, spans);
|
||||
|
||||
double score_firstPosition = params.tcfFirstPosition * (1.0 / Math.sqrt(unorderedMatches.firstPosition));
|
||||
double score_verbatim = params.tcfVerbatim * verbatimMatches.getScore();
|
||||
double score_proximity = params.tcfProximity * proximitiyFac;
|
||||
double score_bM25 = params.bm25Weight
|
||||
* wordFlagsQuery.root.visit(new Bm25GraphVisitor(params.bm25Params, unorderedMatches.getWeightedCounts(), docSize, rankingContext))
|
||||
double score_firstPosition = params.getTcfFirstPositionWeight() * (1.0 / Math.sqrt(unorderedMatches.firstPosition));
|
||||
double score_verbatim = params.getTcfVerbatimWeight() * verbatimMatches.getScore();
|
||||
double score_proximity = params.getTcfProximityWeight() * proximitiyFac;
|
||||
double score_bM25 = params.getBm25Weight()
|
||||
* wordFlagsQuery.root.visit(new Bm25GraphVisitor(params.getBm25K(), params.getBm25B(), unorderedMatches.getWeightedCounts(), docSize, rankingContext))
|
||||
/ (Math.sqrt(unorderedMatches.searchableKeywordCount + 1));
|
||||
double score_bFlags = params.bm25Weight
|
||||
* wordFlagsQuery.root.visit(new TermFlagsGraphVisitor(params.bm25Params, wordFlagsQuery.data, unorderedMatches.getWeightedCounts(), rankingContext))
|
||||
double score_bFlags = params.getBm25Weight()
|
||||
* wordFlagsQuery.root.visit(new TermFlagsGraphVisitor(params.getBm25K(), wordFlagsQuery.data, unorderedMatches.getWeightedCounts(), rankingContext))
|
||||
/ (Math.sqrt(unorderedMatches.searchableKeywordCount + 1));
|
||||
|
||||
double score = normalize(
|
||||
@@ -245,9 +246,13 @@ public class IndexResultScoreCalculator {
|
||||
private double calculateDocumentBonus(long documentMetadata,
|
||||
int features,
|
||||
int length,
|
||||
ResultRankingParameters rankingParams,
|
||||
RpcResultRankingParameters rankingParams,
|
||||
@Nullable DebugRankingFactors debugRankingFactors) {
|
||||
|
||||
if (rankingParams.getDisablePenalties()) {
|
||||
return 0.;
|
||||
}
|
||||
|
||||
int rank = DocumentMetadata.decodeRank(documentMetadata);
|
||||
int asl = DocumentMetadata.decodeAvgSentenceLength(documentMetadata);
|
||||
int quality = DocumentMetadata.decodeQuality(documentMetadata);
|
||||
@@ -256,18 +261,18 @@ public class IndexResultScoreCalculator {
|
||||
int topology = DocumentMetadata.decodeTopology(documentMetadata);
|
||||
int year = DocumentMetadata.decodeYear(documentMetadata);
|
||||
|
||||
double averageSentenceLengthPenalty = (asl >= rankingParams.shortSentenceThreshold ? 0 : -rankingParams.shortSentencePenalty);
|
||||
double averageSentenceLengthPenalty = (asl >= rankingParams.getShortSentenceThreshold() ? 0 : -rankingParams.getShortSentencePenalty());
|
||||
|
||||
final double qualityPenalty = calculateQualityPenalty(size, quality, rankingParams);
|
||||
final double rankingBonus = (255. - rank) * rankingParams.domainRankBonus;
|
||||
final double rankingBonus = (255. - rank) * rankingParams.getDomainRankBonus();
|
||||
final double topologyBonus = Math.log(1 + topology);
|
||||
final double documentLengthPenalty = length > rankingParams.shortDocumentThreshold ? 0 : -rankingParams.shortDocumentPenalty;
|
||||
final double documentLengthPenalty = length > rankingParams.getShortDocumentThreshold() ? 0 : -rankingParams.getShortDocumentPenalty();
|
||||
final double temporalBias;
|
||||
|
||||
if (rankingParams.temporalBias == ResultRankingParameters.TemporalBias.RECENT) {
|
||||
temporalBias = - Math.abs(year - PubDate.MAX_YEAR) * rankingParams.temporalBiasWeight;
|
||||
} else if (rankingParams.temporalBias == ResultRankingParameters.TemporalBias.OLD) {
|
||||
temporalBias = - Math.abs(year - PubDate.MIN_YEAR) * rankingParams.temporalBiasWeight;
|
||||
if (rankingParams.getTemporalBias().getBias() == RpcTemporalBias.Bias.RECENT) {
|
||||
temporalBias = - Math.abs(year - PubDate.MAX_YEAR) * rankingParams.getTemporalBiasWeight();
|
||||
} else if (rankingParams.getTemporalBias().getBias() == RpcTemporalBias.Bias.OLD) {
|
||||
temporalBias = - Math.abs(year - PubDate.MIN_YEAR) * rankingParams.getTemporalBiasWeight();
|
||||
} else {
|
||||
temporalBias = 0;
|
||||
}
|
||||
@@ -506,14 +511,14 @@ public class IndexResultScoreCalculator {
|
||||
}
|
||||
|
||||
|
||||
private double calculateQualityPenalty(int size, int quality, ResultRankingParameters rankingParams) {
|
||||
private double calculateQualityPenalty(int size, int quality, RpcResultRankingParameters rankingParams) {
|
||||
if (size < 400) {
|
||||
if (quality < 5)
|
||||
return 0;
|
||||
return -quality * rankingParams.qualityPenalty;
|
||||
return -quality * rankingParams.getQualityPenalty();
|
||||
}
|
||||
else {
|
||||
return -quality * rankingParams.qualityPenalty * 20;
|
||||
return -quality * rankingParams.getQualityPenalty() * 20;
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -3,7 +3,6 @@ package nu.marginalia.index.results;
|
||||
import nu.marginalia.api.searchquery.model.compiled.CqDataInt;
|
||||
import nu.marginalia.api.searchquery.model.compiled.CqDataLong;
|
||||
import nu.marginalia.api.searchquery.model.compiled.CqExpression;
|
||||
import nu.marginalia.api.searchquery.model.results.Bm25Parameters;
|
||||
import nu.marginalia.api.searchquery.model.results.ResultRankingContext;
|
||||
import nu.marginalia.model.idx.WordFlags;
|
||||
|
||||
@@ -15,15 +14,14 @@ public class TermFlagsGraphVisitor implements CqExpression.DoubleVisitor {
|
||||
private final CqDataLong wordMetaData;
|
||||
private final CqDataInt frequencies;
|
||||
private final float[] counts;
|
||||
private final Bm25Parameters bm25Parameters;
|
||||
|
||||
private final double k1;
|
||||
private final int docCount;
|
||||
|
||||
public TermFlagsGraphVisitor(Bm25Parameters bm25Parameters,
|
||||
public TermFlagsGraphVisitor(double k1,
|
||||
CqDataLong wordMetaData,
|
||||
float[] counts,
|
||||
ResultRankingContext ctx) {
|
||||
this.bm25Parameters = bm25Parameters;
|
||||
this.k1 = k1;
|
||||
this.counts = counts;
|
||||
this.docCount = ctx.termFreqDocCount();
|
||||
this.wordMetaData = wordMetaData;
|
||||
@@ -55,7 +53,7 @@ public class TermFlagsGraphVisitor implements CqExpression.DoubleVisitor {
|
||||
int freq = frequencies.get(idx);
|
||||
|
||||
// note we override b to zero for priority terms as they are independent of document length
|
||||
return invFreq(docCount, freq) * f(bm25Parameters.k(), 0, count, 0);
|
||||
return invFreq(docCount, freq) * f(k1, 0, count, 0);
|
||||
}
|
||||
|
||||
private double evaluatePriorityScore(int idx) {
|
||||
|
@@ -1,7 +0,0 @@
|
||||
package nu.marginalia.index.query.limit;
|
||||
|
||||
public record QueryLimits(int resultsByDomain, int resultsTotal, int timeoutMs, int fetchSize) {
|
||||
public QueryLimits forSingleDomain() {
|
||||
return new QueryLimits(resultsTotal, resultsTotal, timeoutMs, fetchSize);
|
||||
}
|
||||
}
|
@@ -4,10 +4,11 @@ import com.google.inject.Guice;
|
||||
import com.google.inject.Inject;
|
||||
import nu.marginalia.IndexLocations;
|
||||
import nu.marginalia.api.searchquery.RpcDecoratedResultItem;
|
||||
import nu.marginalia.api.searchquery.RpcQueryLimits;
|
||||
import nu.marginalia.api.searchquery.model.query.SearchPhraseConstraint;
|
||||
import nu.marginalia.api.searchquery.model.query.SearchQuery;
|
||||
import nu.marginalia.api.searchquery.model.query.SearchSpecification;
|
||||
import nu.marginalia.api.searchquery.model.results.ResultRankingParameters;
|
||||
import nu.marginalia.api.searchquery.model.results.PrototypeRankingParameters;
|
||||
import nu.marginalia.index.construction.DocIdRewriter;
|
||||
import nu.marginalia.index.construction.full.FullIndexConstructor;
|
||||
import nu.marginalia.index.construction.prio.PrioIndexConstructor;
|
||||
@@ -17,7 +18,6 @@ import nu.marginalia.index.forward.construction.ForwardIndexConverter;
|
||||
import nu.marginalia.index.index.StatefulIndex;
|
||||
import nu.marginalia.index.journal.IndexJournal;
|
||||
import nu.marginalia.index.journal.IndexJournalSlopWriter;
|
||||
import nu.marginalia.index.query.limit.QueryLimits;
|
||||
import nu.marginalia.index.query.limit.QueryStrategy;
|
||||
import nu.marginalia.index.query.limit.SpecificationLimit;
|
||||
import nu.marginalia.linkdb.docs.DocumentDbReader;
|
||||
@@ -115,9 +115,16 @@ public class IndexQueryServiceIntegrationSmokeTest {
|
||||
|
||||
var rsp = queryService.justQuery(
|
||||
SearchSpecification.builder()
|
||||
.queryLimits(new QueryLimits(10, 10, Integer.MAX_VALUE, 4000))
|
||||
.queryLimits(
|
||||
RpcQueryLimits.newBuilder()
|
||||
.setResultsByDomain(10)
|
||||
.setResultsTotal(10)
|
||||
.setTimeoutMs(Integer.MAX_VALUE)
|
||||
.setFetchSize(4000)
|
||||
.build()
|
||||
)
|
||||
.queryStrategy(QueryStrategy.SENTENCE)
|
||||
.rankingParams(ResultRankingParameters.sensibleDefaults())
|
||||
.rankingParams(PrototypeRankingParameters.sensibleDefaults())
|
||||
.domains(new ArrayList<>())
|
||||
.searchSetIdentifier("NONE")
|
||||
.query(
|
||||
@@ -171,9 +178,16 @@ public class IndexQueryServiceIntegrationSmokeTest {
|
||||
|
||||
var rsp = queryService.justQuery(
|
||||
SearchSpecification.builder()
|
||||
.queryLimits(new QueryLimits(10, 10, Integer.MAX_VALUE, 4000))
|
||||
.queryLimits(
|
||||
RpcQueryLimits.newBuilder()
|
||||
.setResultsByDomain(10)
|
||||
.setResultsTotal(10)
|
||||
.setTimeoutMs(Integer.MAX_VALUE)
|
||||
.setFetchSize(4000)
|
||||
.build()
|
||||
)
|
||||
.queryStrategy(QueryStrategy.SENTENCE)
|
||||
.rankingParams(ResultRankingParameters.sensibleDefaults())
|
||||
.rankingParams(PrototypeRankingParameters.sensibleDefaults())
|
||||
.domains(new ArrayList<>())
|
||||
.searchSetIdentifier("NONE")
|
||||
.query(
|
||||
@@ -225,8 +239,15 @@ public class IndexQueryServiceIntegrationSmokeTest {
|
||||
|
||||
var rsp = queryService.justQuery(
|
||||
SearchSpecification.builder()
|
||||
.queryLimits(new QueryLimits(10, 10, Integer.MAX_VALUE, 4000))
|
||||
.rankingParams(ResultRankingParameters.sensibleDefaults())
|
||||
.queryLimits(
|
||||
RpcQueryLimits.newBuilder()
|
||||
.setResultsByDomain(10)
|
||||
.setResultsTotal(10)
|
||||
.setTimeoutMs(Integer.MAX_VALUE)
|
||||
.setFetchSize(4000)
|
||||
.build()
|
||||
)
|
||||
.rankingParams(PrototypeRankingParameters.sensibleDefaults())
|
||||
.queryStrategy(QueryStrategy.SENTENCE)
|
||||
.domains(List.of(2))
|
||||
.query(
|
||||
@@ -282,11 +303,18 @@ public class IndexQueryServiceIntegrationSmokeTest {
|
||||
|
||||
var rsp = queryService.justQuery(
|
||||
SearchSpecification.builder()
|
||||
.queryLimits(new QueryLimits(10, 10, Integer.MAX_VALUE, 4000))
|
||||
.queryLimits(
|
||||
RpcQueryLimits.newBuilder()
|
||||
.setResultsByDomain(10)
|
||||
.setResultsTotal(10)
|
||||
.setTimeoutMs(Integer.MAX_VALUE)
|
||||
.setFetchSize(4000)
|
||||
.build()
|
||||
)
|
||||
.year(SpecificationLimit.equals(1998))
|
||||
.queryStrategy(QueryStrategy.SENTENCE)
|
||||
.searchSetIdentifier("NONE")
|
||||
.rankingParams(ResultRankingParameters.sensibleDefaults())
|
||||
.rankingParams(PrototypeRankingParameters.sensibleDefaults())
|
||||
.query(
|
||||
SearchQuery.builder()
|
||||
.compiledQuery("4")
|
||||
|
@@ -4,10 +4,11 @@ import com.google.inject.Guice;
|
||||
import com.google.inject.Inject;
|
||||
import it.unimi.dsi.fastutil.ints.IntList;
|
||||
import nu.marginalia.IndexLocations;
|
||||
import nu.marginalia.api.searchquery.RpcQueryLimits;
|
||||
import nu.marginalia.api.searchquery.model.query.SearchPhraseConstraint;
|
||||
import nu.marginalia.api.searchquery.model.query.SearchQuery;
|
||||
import nu.marginalia.api.searchquery.model.query.SearchSpecification;
|
||||
import nu.marginalia.api.searchquery.model.results.ResultRankingParameters;
|
||||
import nu.marginalia.api.searchquery.model.results.PrototypeRankingParameters;
|
||||
import nu.marginalia.hash.MurmurHash3_128;
|
||||
import nu.marginalia.index.construction.DocIdRewriter;
|
||||
import nu.marginalia.index.construction.full.FullIndexConstructor;
|
||||
@@ -18,7 +19,6 @@ import nu.marginalia.index.forward.construction.ForwardIndexConverter;
|
||||
import nu.marginalia.index.index.StatefulIndex;
|
||||
import nu.marginalia.index.journal.IndexJournal;
|
||||
import nu.marginalia.index.journal.IndexJournalSlopWriter;
|
||||
import nu.marginalia.index.query.limit.QueryLimits;
|
||||
import nu.marginalia.index.query.limit.QueryStrategy;
|
||||
import nu.marginalia.index.query.limit.SpecificationLimit;
|
||||
import nu.marginalia.linkdb.docs.DocumentDbReader;
|
||||
@@ -389,13 +389,20 @@ public class IndexQueryServiceIntegrationTest {
|
||||
SearchSpecification basicQuery(Function<SearchSpecification.SearchSpecificationBuilder, SearchSpecification.SearchSpecificationBuilder> mutator)
|
||||
{
|
||||
var builder = SearchSpecification.builder()
|
||||
.queryLimits(new QueryLimits(10, 10, Integer.MAX_VALUE, 4000))
|
||||
.queryLimits(
|
||||
RpcQueryLimits.newBuilder()
|
||||
.setResultsByDomain(10)
|
||||
.setResultsTotal(10)
|
||||
.setTimeoutMs(Integer.MAX_VALUE)
|
||||
.setFetchSize(4000)
|
||||
.build()
|
||||
)
|
||||
.queryStrategy(QueryStrategy.SENTENCE)
|
||||
.year(SpecificationLimit.none())
|
||||
.quality(SpecificationLimit.none())
|
||||
.size(SpecificationLimit.none())
|
||||
.rank(SpecificationLimit.none())
|
||||
.rankingParams(ResultRankingParameters.sensibleDefaults())
|
||||
.rankingParams(PrototypeRankingParameters.sensibleDefaults())
|
||||
.domains(new ArrayList<>())
|
||||
.searchSetIdentifier("NONE");
|
||||
|
||||
|
@@ -27,7 +27,7 @@ public class SentenceSegmentSplitter {
|
||||
else {
|
||||
// If we flatten unicode, we do this...
|
||||
// FIXME: This can almost definitely be cleaned up and simplified.
|
||||
wordBreakPattern = Pattern.compile("([^/_#@.a-zA-Z'+\\-0-9\\u00C0-\\u00D6\\u00D8-\\u00f6\\u00f8-\\u00ff]+)|[|]|(\\.(\\s+|$))");
|
||||
wordBreakPattern = Pattern.compile("([^/<>$:_#@.a-zA-Z'+\\-0-9\\u00C0-\\u00D6\\u00D8-\\u00f6\\u00f8-\\u00ff]+)|[|]|(\\.(\\s+|$))");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -90,12 +90,17 @@ public class SentenceSegmentSplitter {
|
||||
for (int i = 0; i < ret.size(); i++) {
|
||||
String part = ret.get(i);
|
||||
|
||||
if (part.startsWith("<") && part.endsWith(">") && part.length() > 2) {
|
||||
ret.set(i, part.substring(1, part.length() - 1));
|
||||
}
|
||||
|
||||
if (part.startsWith("'") && part.length() > 1) {
|
||||
ret.set(i, part.substring(1));
|
||||
}
|
||||
if (part.endsWith("'") && part.length() > 1) {
|
||||
ret.set(i, part.substring(0, part.length()-1));
|
||||
}
|
||||
|
||||
while (part.endsWith(".")) {
|
||||
part = part.substring(0, part.length()-1);
|
||||
ret.set(i, part);
|
||||
|
@@ -28,6 +28,20 @@ class SentenceExtractorTest {
|
||||
System.out.println(dld);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testCplusplus() {
|
||||
var dld = sentenceExtractor.extractSentence("std::vector", EnumSet.noneOf(HtmlTag.class));
|
||||
assertEquals(1, dld.length());
|
||||
assertEquals("std::vector", dld.wordsLowerCase[0]);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testPHP() {
|
||||
var dld = sentenceExtractor.extractSentence("$_GET", EnumSet.noneOf(HtmlTag.class));
|
||||
assertEquals(1, dld.length());
|
||||
assertEquals("$_get", dld.wordsLowerCase[0]);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testPolishArtist() {
|
||||
var dld = sentenceExtractor.extractSentence("Uklański", EnumSet.noneOf(HtmlTag.class));
|
||||
|
@@ -152,7 +152,10 @@ public class DocumentPositionMapper {
|
||||
}
|
||||
|
||||
boolean matchesWordPattern(String s) {
|
||||
// this function is an unrolled version of the regexp [\da-zA-Z]{1,15}([.\-_/:+*][\da-zA-Z]{1,10}){0,4}
|
||||
if (s.length() > 48)
|
||||
return false;
|
||||
|
||||
// this function is an unrolled version of the regexp [\da-zA-Z]{1,15}([.\-_/:+*][\da-zA-Z]{1,10}){0,8}
|
||||
|
||||
String wordPartSeparator = ".-_/:+*";
|
||||
|
||||
@@ -169,7 +172,7 @@ public class DocumentPositionMapper {
|
||||
if (i == 0)
|
||||
return false;
|
||||
|
||||
for (int j = 0; j < 5; j++) {
|
||||
for (int j = 0; j < 8; j++) {
|
||||
if (i == s.length()) return true;
|
||||
|
||||
if (wordPartSeparator.indexOf(s.charAt(i)) < 0) {
|
||||
|
@@ -30,9 +30,11 @@ class DocumentPositionMapperTest {
|
||||
Assertions.assertFalse(positionMapper.matchesWordPattern("1234567890abcdef"));
|
||||
|
||||
Assertions.assertTrue(positionMapper.matchesWordPattern("test-test-test-test-test"));
|
||||
Assertions.assertFalse(positionMapper.matchesWordPattern("test-test-test-test-test-test"));
|
||||
Assertions.assertFalse(positionMapper.matchesWordPattern("test-test-test-test-test-test-test-test-test"));
|
||||
Assertions.assertTrue(positionMapper.matchesWordPattern("192.168.1.100/24"));
|
||||
Assertions.assertTrue(positionMapper.matchesWordPattern("std::vector"));
|
||||
Assertions.assertTrue(positionMapper.matchesWordPattern("std::vector::push_back"));
|
||||
|
||||
Assertions.assertTrue(positionMapper.matchesWordPattern("c++"));
|
||||
Assertions.assertTrue(positionMapper.matchesWordPattern("m*a*s*h"));
|
||||
Assertions.assertFalse(positionMapper.matchesWordPattern("Stulpnagelstrasse"));
|
||||
|
@@ -0,0 +1,113 @@
|
||||
package nu.marginalia.converting.processor.plugin.specialization;
|
||||
|
||||
import com.google.inject.Inject;
|
||||
import com.google.inject.Singleton;
|
||||
import nu.marginalia.converting.processor.logic.TitleExtractor;
|
||||
import nu.marginalia.converting.processor.summary.SummaryExtractor;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.logging.log4j.util.Strings;
|
||||
import org.jsoup.nodes.Document;
|
||||
import org.jsoup.nodes.Element;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
@Singleton
|
||||
public class CppreferenceSpecialization extends WikiSpecialization {
|
||||
|
||||
@Inject
|
||||
public CppreferenceSpecialization(SummaryExtractor summaryExtractor, TitleExtractor titleExtractor) {
|
||||
super(summaryExtractor, titleExtractor);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Document prune(Document original) {
|
||||
var doc = original.clone();
|
||||
|
||||
doc.getElementsByClass("t-nv").remove();
|
||||
doc.getElementsByClass("toc").remove();
|
||||
doc.getElementsByClass("mw-head").remove();
|
||||
doc.getElementsByClass("printfooter").remove();
|
||||
doc.getElementsByClass("cpp-footer-base").remove();
|
||||
|
||||
doc.title(doc.title() + " " + Strings.join(extractExtraTokens(doc.title()), ' '));
|
||||
|
||||
return doc;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getSummary(Document doc, Set<String> importantWords) {
|
||||
|
||||
Element declTable = doc.getElementsByClass("t-dcl-begin").first();
|
||||
if (declTable != null) {
|
||||
var nextPar = declTable.nextElementSibling();
|
||||
if (nextPar != null) {
|
||||
return nextPar.text();
|
||||
}
|
||||
}
|
||||
|
||||
return super.getSummary(doc, importantWords);
|
||||
}
|
||||
|
||||
|
||||
public List<String> extractExtraTokens(String title) {
|
||||
|
||||
if (!title.contains("::")) {
|
||||
return List.of();
|
||||
}
|
||||
if (!title.contains("-")) {
|
||||
return List.of();
|
||||
}
|
||||
|
||||
title = StringUtils.split(title, '-')[0];
|
||||
|
||||
String name = title;
|
||||
for (;;) {
|
||||
int lbidx = name.indexOf('<');
|
||||
int rbidx = name.indexOf('>');
|
||||
|
||||
if (lbidx > 0 && rbidx > lbidx) {
|
||||
String className = name.substring(0, lbidx);
|
||||
String methodName = name.substring(rbidx + 1);
|
||||
name = className + methodName;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
List<String> tokens = new ArrayList<>();
|
||||
|
||||
for (var part : name.split("\\s*,\\s*")) {
|
||||
if (part.endsWith(")") && !part.endsWith("()")) {
|
||||
int parenStart = part.indexOf('(');
|
||||
if (parenStart > 0) { // foo(...) -> foo
|
||||
part = part.substring(0, parenStart);
|
||||
}
|
||||
else if (parenStart == 0) { // (foo) -> foo
|
||||
part = part.substring(1, part.length() - 1);
|
||||
}
|
||||
}
|
||||
|
||||
part = part.trim();
|
||||
if (part.contains("::")) {
|
||||
tokens.add(part);
|
||||
if (part.startsWith("std::")) {
|
||||
tokens.add(part.substring(5));
|
||||
|
||||
int ss = part.indexOf("::", 5);
|
||||
if (ss > 0) {
|
||||
tokens.add(part.substring(0, ss));
|
||||
tokens.add(part.substring(ss+2));
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return tokens;
|
||||
}
|
||||
|
||||
|
||||
}
|
@@ -24,6 +24,7 @@ public class HtmlProcessorSpecializations {
|
||||
private final WikiSpecialization wikiSpecialization;
|
||||
private final BlogSpecialization blogSpecialization;
|
||||
private final GogStoreSpecialization gogStoreSpecialization;
|
||||
private final CppreferenceSpecialization cppreferenceSpecialization;
|
||||
private final DefaultSpecialization defaultSpecialization;
|
||||
|
||||
@Inject
|
||||
@@ -37,6 +38,7 @@ public class HtmlProcessorSpecializations {
|
||||
WikiSpecialization wikiSpecialization,
|
||||
BlogSpecialization blogSpecialization,
|
||||
GogStoreSpecialization gogStoreSpecialization,
|
||||
CppreferenceSpecialization cppreferenceSpecialization,
|
||||
DefaultSpecialization defaultSpecialization) {
|
||||
this.domainTypes = domainTypes;
|
||||
this.lemmySpecialization = lemmySpecialization;
|
||||
@@ -48,6 +50,7 @@ public class HtmlProcessorSpecializations {
|
||||
this.wikiSpecialization = wikiSpecialization;
|
||||
this.blogSpecialization = blogSpecialization;
|
||||
this.gogStoreSpecialization = gogStoreSpecialization;
|
||||
this.cppreferenceSpecialization = cppreferenceSpecialization;
|
||||
this.defaultSpecialization = defaultSpecialization;
|
||||
}
|
||||
|
||||
@@ -66,6 +69,10 @@ public class HtmlProcessorSpecializations {
|
||||
return mariadbKbSpecialization;
|
||||
}
|
||||
|
||||
if (url.domain.getTopDomain().equals("cppreference.com")) {
|
||||
return cppreferenceSpecialization;
|
||||
}
|
||||
|
||||
if (url.domain.toString().equals("store.steampowered.com")) {
|
||||
return steamStoreSpecialization;
|
||||
}
|
||||
@@ -86,6 +93,9 @@ public class HtmlProcessorSpecializations {
|
||||
if (generator.keywords().contains("javadoc")) {
|
||||
return javadocSpecialization;
|
||||
}
|
||||
|
||||
// Must be toward the end, as some specializations are for
|
||||
// wiki-generator content
|
||||
if (generator.type() == GeneratorType.WIKI) {
|
||||
return wikiSpecialization;
|
||||
}
|
||||
@@ -105,7 +115,7 @@ public class HtmlProcessorSpecializations {
|
||||
|
||||
boolean shouldIndex(EdgeUrl url);
|
||||
double lengthModifier();
|
||||
void amendWords(Document doc, DocumentKeywordsBuilder words);
|
||||
|
||||
default void amendWords(Document doc, DocumentKeywordsBuilder words) {}
|
||||
}
|
||||
}
|
||||
|
@@ -4,7 +4,6 @@ import com.google.inject.Inject;
|
||||
import com.google.inject.Singleton;
|
||||
import nu.marginalia.converting.processor.logic.TitleExtractor;
|
||||
import nu.marginalia.converting.processor.summary.SummaryExtractor;
|
||||
import nu.marginalia.keyword.model.DocumentKeywordsBuilder;
|
||||
import nu.marginalia.model.EdgeUrl;
|
||||
import org.jsoup.nodes.Document;
|
||||
import org.jsoup.nodes.Element;
|
||||
@@ -93,6 +92,8 @@ public class WikiSpecialization extends DefaultSpecialization {
|
||||
return true;
|
||||
}
|
||||
|
||||
public void amendWords(Document doc, DocumentKeywordsBuilder words) {
|
||||
@Override
|
||||
public double lengthModifier() {
|
||||
return 2.5;
|
||||
}
|
||||
}
|
||||
|
@@ -106,11 +106,7 @@ public class WarcSideloader implements SideloadSource, AutoCloseable {
|
||||
return false;
|
||||
|
||||
var url = new EdgeUrl(warcResponse.target());
|
||||
if (!Objects.equals(url.getDomain(), domain)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
return Objects.equals(url.getDomain(), domain);
|
||||
} catch (Exception e) {
|
||||
logger.warn("Failed to process response", e);
|
||||
}
|
||||
|
@@ -8,6 +8,7 @@ import nu.marginalia.converting.model.ProcessedDomain;
|
||||
import nu.marginalia.converting.processor.DomainProcessor;
|
||||
import nu.marginalia.crawl.CrawlerMain;
|
||||
import nu.marginalia.crawl.DomainStateDb;
|
||||
import nu.marginalia.crawl.fetcher.Cookies;
|
||||
import nu.marginalia.crawl.fetcher.HttpFetcher;
|
||||
import nu.marginalia.crawl.fetcher.HttpFetcherImpl;
|
||||
import nu.marginalia.crawl.fetcher.warc.WarcRecorder;
|
||||
@@ -200,23 +201,23 @@ public class CrawlingThenConvertingIntegrationTest {
|
||||
|
||||
@Test
|
||||
public void crawlRobotsTxt() throws Exception {
|
||||
var specs = new CrawlerMain.CrawlSpecRecord("search.marginalia.nu", 5,
|
||||
List.of("https://search.marginalia.nu/search?q=hello+world")
|
||||
var specs = new CrawlerMain.CrawlSpecRecord("marginalia-search.com", 5,
|
||||
List.of("https://marginalia-search.com/search?q=hello+world")
|
||||
);
|
||||
|
||||
CrawledDomain domain = crawl(specs);
|
||||
assertFalse(domain.doc.isEmpty());
|
||||
assertEquals("OK", domain.crawlerStatus);
|
||||
assertEquals("search.marginalia.nu", domain.domain);
|
||||
assertEquals("marginalia-search.com", domain.domain);
|
||||
|
||||
Set<String> allUrls = domain.doc.stream().map(doc -> doc.url).collect(Collectors.toSet());
|
||||
assertTrue(allUrls.contains("https://search.marginalia.nu/search"), "We expect a record for entities that are forbidden");
|
||||
assertTrue(allUrls.contains("https://marginalia-search.com/search"), "We expect a record for entities that are forbidden");
|
||||
|
||||
var output = process();
|
||||
|
||||
assertNotNull(output);
|
||||
assertFalse(output.documents.isEmpty());
|
||||
assertEquals(new EdgeDomain("search.marginalia.nu"), output.domain);
|
||||
assertEquals(new EdgeDomain("marginalia-search.com"), output.domain);
|
||||
assertEquals(DomainIndexingState.ACTIVE, output.state);
|
||||
|
||||
for (var doc : output.documents) {
|
||||
@@ -246,7 +247,7 @@ public class CrawlingThenConvertingIntegrationTest {
|
||||
private CrawledDomain crawl(CrawlerMain.CrawlSpecRecord specs, Predicate<EdgeDomain> domainBlacklist) throws Exception {
|
||||
List<SerializableCrawlData> data = new ArrayList<>();
|
||||
|
||||
try (var recorder = new WarcRecorder(fileName);
|
||||
try (var recorder = new WarcRecorder(fileName, new Cookies());
|
||||
var db = new DomainStateDb(dbTempFile))
|
||||
{
|
||||
new CrawlerRetreiver(httpFetcher, new DomainProber(domainBlacklist), specs, db, recorder).crawlDomain();
|
||||
|
@@ -0,0 +1,27 @@
|
||||
package nu.marginalia.converting.processor.plugin.specialization;
|
||||
|
||||
import org.junit.jupiter.api.Assertions;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
class CppreferenceSpecializationTest {
|
||||
CppreferenceSpecialization specialization = new CppreferenceSpecialization(null, null);
|
||||
|
||||
@Test
|
||||
public void testTitleMagic() {
|
||||
|
||||
List<String> ret;
|
||||
|
||||
ret = specialization.extractExtraTokens("std::multimap<Key, T, Compare, Allocator>::crend - cppreference.com");
|
||||
Assertions.assertTrue(ret.contains("std::multimap::crend"));
|
||||
Assertions.assertTrue(ret.contains("multimap::crend"));
|
||||
Assertions.assertTrue(ret.contains("std::multimap"));
|
||||
Assertions.assertTrue(ret.contains("crend"));
|
||||
|
||||
ret = specialization.extractExtraTokens("std::coroutine_handle<Promise>::operator(), std::coroutine_handle<Promise>::resume - cppreference.com");
|
||||
Assertions.assertTrue(ret.contains("std::coroutine_handle::operator()"));
|
||||
Assertions.assertTrue(ret.contains("std::coroutine_handle::resume"));
|
||||
}
|
||||
|
||||
}
|
@@ -55,7 +55,6 @@ dependencies {
|
||||
implementation libs.zstd
|
||||
implementation libs.jwarc
|
||||
implementation libs.crawlercommons
|
||||
implementation libs.okhttp3
|
||||
implementation libs.jsoup
|
||||
implementation libs.opencsv
|
||||
implementation libs.fastutil
|
||||
|
@@ -33,8 +33,6 @@ import nu.marginalia.service.module.DatabaseModule;
|
||||
import nu.marginalia.storage.FileStorageService;
|
||||
import nu.marginalia.storage.model.FileStorageId;
|
||||
import nu.marginalia.util.SimpleBlockingThreadPool;
|
||||
import okhttp3.ConnectionPool;
|
||||
import okhttp3.Dispatcher;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
@@ -85,6 +83,7 @@ public class CrawlerMain extends ProcessMainClass {
|
||||
|
||||
@Inject
|
||||
public CrawlerMain(UserAgent userAgent,
|
||||
HttpFetcherImpl httpFetcher,
|
||||
ProcessHeartbeatImpl heartbeat,
|
||||
MessageQueueFactory messageQueueFactory, DomainProber domainProber,
|
||||
FileStorageService fileStorageService,
|
||||
@@ -98,6 +97,7 @@ public class CrawlerMain extends ProcessMainClass {
|
||||
super(messageQueueFactory, processConfiguration, gson, CRAWLER_INBOX);
|
||||
|
||||
this.userAgent = userAgent;
|
||||
this.fetcher = httpFetcher;
|
||||
this.heartbeat = heartbeat;
|
||||
this.domainProber = domainProber;
|
||||
this.fileStorageService = fileStorageService;
|
||||
@@ -111,10 +111,6 @@ public class CrawlerMain extends ProcessMainClass {
|
||||
Integer.getInteger("crawler.poolSize", 256),
|
||||
1);
|
||||
|
||||
fetcher = new HttpFetcherImpl(userAgent,
|
||||
new Dispatcher(),
|
||||
new ConnectionPool(5, 10, TimeUnit.SECONDS)
|
||||
);
|
||||
|
||||
// Wait for the blacklist to be loaded before starting the crawl
|
||||
blacklist.waitUntilLoaded();
|
||||
@@ -132,6 +128,10 @@ public class CrawlerMain extends ProcessMainClass {
|
||||
System.setProperty("sun.net.client.defaultConnectTimeout", "30000");
|
||||
System.setProperty("sun.net.client.defaultReadTimeout", "30000");
|
||||
|
||||
// Set the maximum number of connections to keep alive in the connection pool
|
||||
System.setProperty("jdk.httpclient.idleTimeout", "15"); // 15 seconds
|
||||
System.setProperty("jdk.httpclient.connectionPoolSize", "256");
|
||||
|
||||
// We don't want to use too much memory caching sessions for https
|
||||
System.setProperty("javax.net.ssl.sessionCacheSize", "2048");
|
||||
|
||||
@@ -364,10 +364,10 @@ public class CrawlerMain extends ProcessMainClass {
|
||||
Files.deleteIfExists(tempFile);
|
||||
}
|
||||
|
||||
try (var warcRecorder = new WarcRecorder(newWarcFile); // write to a temp file for now
|
||||
try (var warcRecorder = new WarcRecorder(newWarcFile, fetcher); // write to a temp file for now
|
||||
var retriever = new CrawlerRetreiver(fetcher, domainProber, specification, domainStateDb, warcRecorder);
|
||||
CrawlDataReference reference = getReference();
|
||||
)
|
||||
CrawlDataReference reference = getReference()
|
||||
)
|
||||
{
|
||||
// Resume the crawl if it was aborted
|
||||
if (Files.exists(tempFile)) {
|
||||
|
@@ -1,6 +1,6 @@
|
||||
package nu.marginalia.crawl.fetcher;
|
||||
|
||||
import okhttp3.Request;
|
||||
import java.net.http.HttpRequest;
|
||||
|
||||
/** Encapsulates request modifiers; the ETag and Last-Modified tags for a resource */
|
||||
public record ContentTags(String etag, String lastMod) {
|
||||
@@ -17,37 +17,14 @@ public record ContentTags(String etag, String lastMod) {
|
||||
}
|
||||
|
||||
/** Paints the tags onto the request builder. */
|
||||
public void paint(Request.Builder getBuilder) {
|
||||
public void paint(HttpRequest.Builder getBuilder) {
|
||||
|
||||
if (etag != null) {
|
||||
getBuilder.addHeader("If-None-Match", ifNoneMatch());
|
||||
getBuilder.header("If-None-Match", etag);
|
||||
}
|
||||
|
||||
if (lastMod != null) {
|
||||
getBuilder.addHeader("If-Modified-Since", ifModifiedSince());
|
||||
getBuilder.header("If-Modified-Since", lastMod);
|
||||
}
|
||||
}
|
||||
|
||||
private String ifNoneMatch() {
|
||||
// Remove the W/ prefix if it exists
|
||||
|
||||
//'W/' (case-sensitive) indicates that a weak validator is used. Weak etags are
|
||||
// easy to generate, but are far less useful for comparisons. Strong validators
|
||||
// are ideal for comparisons but can be very difficult to generate efficiently.
|
||||
// Weak ETag values of two representations of the same resources might be semantically
|
||||
// equivalent, but not byte-for-byte identical. This means weak etags prevent caching
|
||||
// when byte range requests are used, but strong etags mean range requests can
|
||||
// still be cached.
|
||||
// - https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/ETag
|
||||
|
||||
if (null != etag && etag.startsWith("W/")) {
|
||||
return etag.substring(2);
|
||||
} else {
|
||||
return etag;
|
||||
}
|
||||
}
|
||||
|
||||
private String ifModifiedSince() {
|
||||
return lastMod;
|
||||
}
|
||||
}
|
||||
|
@@ -1,33 +1,14 @@
|
||||
package nu.marginalia.crawl.fetcher;
|
||||
|
||||
import okhttp3.Cookie;
|
||||
import okhttp3.CookieJar;
|
||||
import okhttp3.HttpUrl;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.io.IOException;
|
||||
import java.net.CookieHandler;
|
||||
import java.net.URI;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
|
||||
public class Cookies {
|
||||
final ThreadLocal<ConcurrentHashMap<String, List<Cookie>>> cookieJar = ThreadLocal.withInitial(ConcurrentHashMap::new);
|
||||
|
||||
public CookieJar getJar() {
|
||||
return new CookieJar() {
|
||||
|
||||
@Override
|
||||
public void saveFromResponse(HttpUrl url, List<Cookie> cookies) {
|
||||
|
||||
if (!cookies.isEmpty()) {
|
||||
cookieJar.get().put(url.host(), cookies);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Cookie> loadForRequest(HttpUrl url) {
|
||||
return cookieJar.get().getOrDefault(url.host(), Collections.emptyList());
|
||||
}
|
||||
};
|
||||
}
|
||||
public class Cookies extends CookieHandler {
|
||||
final ThreadLocal<ConcurrentHashMap<String, List<String>>> cookieJar = ThreadLocal.withInitial(ConcurrentHashMap::new);
|
||||
|
||||
public void clear() {
|
||||
cookieJar.get().clear();
|
||||
@@ -38,6 +19,16 @@ public class Cookies {
|
||||
}
|
||||
|
||||
public List<String> getCookies() {
|
||||
return cookieJar.get().values().stream().flatMap(List::stream).map(Cookie::toString).toList();
|
||||
return cookieJar.get().values().stream().flatMap(List::stream).toList();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, List<String>> get(URI uri, Map<String, List<String>> requestHeaders) throws IOException {
|
||||
return cookieJar.get();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void put(URI uri, Map<String, List<String>> responseHeaders) throws IOException {
|
||||
cookieJar.get().putAll(responseHeaders);
|
||||
}
|
||||
}
|
||||
|
@@ -3,6 +3,7 @@ package nu.marginalia.crawl.fetcher;
|
||||
import com.google.inject.ImplementedBy;
|
||||
import crawlercommons.robots.SimpleRobotRules;
|
||||
import nu.marginalia.crawl.fetcher.warc.WarcRecorder;
|
||||
import nu.marginalia.crawl.retreival.CrawlDelayTimer;
|
||||
import nu.marginalia.model.EdgeDomain;
|
||||
import nu.marginalia.model.EdgeUrl;
|
||||
import nu.marginalia.model.body.HttpFetchResult;
|
||||
@@ -11,10 +12,10 @@ import nu.marginalia.model.crawldata.CrawlerDomainStatus;
|
||||
import java.util.List;
|
||||
|
||||
@ImplementedBy(HttpFetcherImpl.class)
|
||||
public interface HttpFetcher {
|
||||
public interface HttpFetcher extends AutoCloseable {
|
||||
void setAllowAllContentTypes(boolean allowAllContentTypes);
|
||||
|
||||
List<String> getCookies();
|
||||
Cookies getCookies();
|
||||
void clearCookies();
|
||||
|
||||
DomainProbeResult probeDomain(EdgeUrl url);
|
||||
@@ -27,7 +28,9 @@ public interface HttpFetcher {
|
||||
HttpFetchResult fetchContent(EdgeUrl url,
|
||||
WarcRecorder recorder,
|
||||
ContentTags tags,
|
||||
ProbeType probeType) throws HttpFetcherImpl.RateLimitException, Exception;
|
||||
ProbeType probeType) throws Exception;
|
||||
|
||||
List<EdgeUrl> fetchSitemapUrls(String rootSitemapUrl, CrawlDelayTimer delayTimer);
|
||||
|
||||
SimpleRobotRules fetchRobotRules(EdgeDomain domain, WarcRecorder recorder);
|
||||
|
||||
|
@@ -1,35 +1,41 @@
|
||||
package nu.marginalia.crawl.fetcher;
|
||||
|
||||
import com.google.inject.Inject;
|
||||
import com.google.inject.Singleton;
|
||||
import crawlercommons.robots.SimpleRobotRules;
|
||||
import crawlercommons.robots.SimpleRobotRulesParser;
|
||||
import nu.marginalia.UserAgent;
|
||||
import nu.marginalia.crawl.fetcher.socket.FastTerminatingSocketFactory;
|
||||
import nu.marginalia.crawl.fetcher.socket.IpInterceptingNetworkInterceptor;
|
||||
import nu.marginalia.crawl.fetcher.socket.NoSecuritySSL;
|
||||
import nu.marginalia.crawl.fetcher.warc.WarcRecorder;
|
||||
import nu.marginalia.crawl.retreival.CrawlDelayTimer;
|
||||
import nu.marginalia.model.EdgeDomain;
|
||||
import nu.marginalia.model.EdgeUrl;
|
||||
import nu.marginalia.model.body.ContentTypeLogic;
|
||||
import nu.marginalia.model.body.DocumentBodyExtractor;
|
||||
import nu.marginalia.model.body.HttpFetchResult;
|
||||
import nu.marginalia.model.crawldata.CrawlerDomainStatus;
|
||||
import okhttp3.ConnectionPool;
|
||||
import okhttp3.Dispatcher;
|
||||
import okhttp3.OkHttpClient;
|
||||
import okhttp3.Request;
|
||||
import org.jsoup.Jsoup;
|
||||
import org.jsoup.nodes.Document;
|
||||
import org.jsoup.parser.Parser;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import javax.net.ssl.X509TrustManager;
|
||||
import java.io.InterruptedIOException;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.net.URISyntaxException;
|
||||
import java.net.URLDecoder;
|
||||
import java.net.http.HttpClient;
|
||||
import java.net.http.HttpRequest;
|
||||
import java.net.http.HttpResponse;
|
||||
import java.net.http.HttpTimeoutException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.time.Duration;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.zip.GZIPInputStream;
|
||||
|
||||
|
||||
@Singleton
|
||||
public class HttpFetcherImpl implements HttpFetcher {
|
||||
|
||||
private final Logger logger = LoggerFactory.getLogger(getClass());
|
||||
@@ -40,39 +46,28 @@ public class HttpFetcherImpl implements HttpFetcher {
|
||||
private static final SimpleRobotRulesParser robotsParser = new SimpleRobotRulesParser();
|
||||
private static final ContentTypeLogic contentTypeLogic = new ContentTypeLogic();
|
||||
|
||||
private final Duration requestTimeout = Duration.ofSeconds(10);
|
||||
|
||||
@Override
|
||||
public void setAllowAllContentTypes(boolean allowAllContentTypes) {
|
||||
contentTypeLogic.setAllowAllContentTypes(allowAllContentTypes);
|
||||
}
|
||||
|
||||
private final OkHttpClient client;
|
||||
|
||||
private static final FastTerminatingSocketFactory ftSocketFactory = new FastTerminatingSocketFactory();
|
||||
|
||||
private OkHttpClient createClient(Dispatcher dispatcher, ConnectionPool pool) {
|
||||
var builder = new OkHttpClient.Builder();
|
||||
if (dispatcher != null) {
|
||||
builder.dispatcher(dispatcher);
|
||||
}
|
||||
|
||||
return builder.sslSocketFactory(NoSecuritySSL.buildSocketFactory(), (X509TrustManager) NoSecuritySSL.trustAllCerts[0])
|
||||
.socketFactory(ftSocketFactory)
|
||||
.hostnameVerifier(NoSecuritySSL.buildHostnameVerifyer())
|
||||
.addNetworkInterceptor(new IpInterceptingNetworkInterceptor())
|
||||
.connectionPool(pool)
|
||||
.cookieJar(cookies.getJar())
|
||||
.followRedirects(true)
|
||||
.followSslRedirects(true)
|
||||
.connectTimeout(8, TimeUnit.SECONDS)
|
||||
.readTimeout(10, TimeUnit.SECONDS)
|
||||
.writeTimeout(10, TimeUnit.SECONDS)
|
||||
.build();
|
||||
private final HttpClient client;
|
||||
|
||||
private HttpClient createClient() {
|
||||
return HttpClient.newBuilder()
|
||||
.sslContext(NoSecuritySSL.buildSslContext())
|
||||
.cookieHandler(cookies)
|
||||
.followRedirects(HttpClient.Redirect.NORMAL)
|
||||
.connectTimeout(Duration.ofSeconds(8))
|
||||
.executor(Executors.newCachedThreadPool())
|
||||
.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> getCookies() {
|
||||
return cookies.getCookies();
|
||||
public Cookies getCookies() {
|
||||
return cookies;
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -81,26 +76,24 @@ public class HttpFetcherImpl implements HttpFetcher {
|
||||
}
|
||||
|
||||
@Inject
|
||||
public HttpFetcherImpl(UserAgent userAgent,
|
||||
Dispatcher dispatcher,
|
||||
ConnectionPool connectionPool)
|
||||
public HttpFetcherImpl(UserAgent userAgent)
|
||||
{
|
||||
this.client = createClient(dispatcher, connectionPool);
|
||||
this.client = createClient();
|
||||
this.userAgentString = userAgent.uaString();
|
||||
this.userAgentIdentifier = userAgent.uaIdentifier();
|
||||
}
|
||||
|
||||
public HttpFetcherImpl(String userAgent) {
|
||||
this.client = createClient(null, new ConnectionPool());
|
||||
this.client = createClient();
|
||||
this.userAgentString = userAgent;
|
||||
this.userAgentIdentifier = userAgent;
|
||||
}
|
||||
|
||||
// Not necessary in prod, but useful in test
|
||||
public void close() {
|
||||
client.dispatcher().executorService().shutdown();
|
||||
client.connectionPool().evictAll();
|
||||
client.close();
|
||||
}
|
||||
|
||||
/**
|
||||
* Probe the domain to see if it is reachable, attempting to identify which schema to use,
|
||||
* and if there are any redirects. This is done by one or more HEAD requests.
|
||||
@@ -110,19 +103,26 @@ public class HttpFetcherImpl implements HttpFetcher {
|
||||
*/
|
||||
@Override
|
||||
public DomainProbeResult probeDomain(EdgeUrl url) {
|
||||
var head = new Request.Builder().head().addHeader("User-agent", userAgentString)
|
||||
.url(url.toString())
|
||||
.build();
|
||||
HttpRequest head;
|
||||
try {
|
||||
head = HttpRequest.newBuilder()
|
||||
.HEAD()
|
||||
.uri(url.asURI())
|
||||
.header("User-agent", userAgentString)
|
||||
.timeout(requestTimeout)
|
||||
.build();
|
||||
} catch (URISyntaxException e) {
|
||||
return new DomainProbeResult.Error(CrawlerDomainStatus.ERROR, "Invalid URL");
|
||||
}
|
||||
|
||||
var call = client.newCall(head);
|
||||
try {
|
||||
var rsp = client.send(head, HttpResponse.BodyHandlers.discarding());
|
||||
EdgeUrl rspUri = new EdgeUrl(rsp.uri());
|
||||
|
||||
try (var rsp = call.execute()) {
|
||||
EdgeUrl requestUrl = new EdgeUrl(rsp.request().url().toString());
|
||||
|
||||
if (!Objects.equals(requestUrl.domain, url.domain)) {
|
||||
return new DomainProbeResult.Redirect(requestUrl.domain);
|
||||
if (!Objects.equals(rspUri.domain, url.domain)) {
|
||||
return new DomainProbeResult.Redirect(rspUri.domain);
|
||||
}
|
||||
return new DomainProbeResult.Ok(requestUrl);
|
||||
return new DomainProbeResult.Ok(rspUri);
|
||||
}
|
||||
catch (Exception ex) {
|
||||
return new DomainProbeResult.Error(CrawlerDomainStatus.ERROR, ex.getMessage());
|
||||
@@ -140,21 +140,25 @@ public class HttpFetcherImpl implements HttpFetcher {
|
||||
WarcRecorder warcRecorder,
|
||||
ContentTags tags) throws RateLimitException {
|
||||
if (tags.isEmpty() && contentTypeLogic.isUrlLikeBinary(url)) {
|
||||
var headBuilder = new Request.Builder().head()
|
||||
.addHeader("User-agent", userAgentString)
|
||||
.addHeader("Accept-Encoding", "gzip")
|
||||
.url(url.toString());
|
||||
|
||||
var head = headBuilder.build();
|
||||
var call = client.newCall(head);
|
||||
try {
|
||||
var headBuilder = HttpRequest.newBuilder()
|
||||
.HEAD()
|
||||
.uri(url.asURI())
|
||||
.header("User-agent", userAgentString)
|
||||
.header("Accept-Encoding", "gzip")
|
||||
.timeout(requestTimeout)
|
||||
;
|
||||
|
||||
try (var rsp = call.execute()) {
|
||||
var contentTypeHeader = rsp.header("Content-type");
|
||||
var rsp = client.send(headBuilder.build(), HttpResponse.BodyHandlers.discarding());
|
||||
var headers = rsp.headers();
|
||||
|
||||
var contentTypeHeader = headers.firstValue("Content-Type").orElse(null);
|
||||
|
||||
if (contentTypeHeader != null && !contentTypeLogic.isAllowableContentType(contentTypeHeader)) {
|
||||
warcRecorder.flagAsFailedContentTypeProbe(url, contentTypeHeader, rsp.code());
|
||||
warcRecorder.flagAsFailedContentTypeProbe(url, contentTypeHeader, rsp.statusCode());
|
||||
|
||||
return new ContentTypeProbeResult.BadContentType(contentTypeHeader, rsp.code());
|
||||
return new ContentTypeProbeResult.BadContentType(contentTypeHeader, rsp.statusCode());
|
||||
}
|
||||
|
||||
// Update the URL to the final URL of the HEAD request, otherwise we might end up doing
|
||||
@@ -168,27 +172,27 @@ public class HttpFetcherImpl implements HttpFetcher {
|
||||
// too many eyebrows when looking at the logs on the target server. Overall it's probably desirable
|
||||
// that it looks like the traffic makes sense, as opposed to looking like a broken bot.
|
||||
|
||||
var redirectUrl = new EdgeUrl(rsp.request().url().toString());
|
||||
var redirectUrl = new EdgeUrl(rsp.uri());
|
||||
EdgeUrl ret;
|
||||
|
||||
if (Objects.equals(redirectUrl.domain, url.domain)) ret = redirectUrl;
|
||||
else ret = url;
|
||||
|
||||
// Intercept rate limiting
|
||||
if (rsp.code() == 429) {
|
||||
throw new HttpFetcherImpl.RateLimitException(Objects.requireNonNullElse(rsp.header("Retry-After"), "1"));
|
||||
if (rsp.statusCode() == 429) {
|
||||
throw new HttpFetcherImpl.RateLimitException(headers.firstValue("Retry-After").orElse("1"));
|
||||
}
|
||||
|
||||
return new ContentTypeProbeResult.Ok(ret);
|
||||
}
|
||||
catch (HttpTimeoutException ex) {
|
||||
warcRecorder.flagAsTimeout(url);
|
||||
return new ContentTypeProbeResult.Timeout(ex);
|
||||
}
|
||||
catch (RateLimitException ex) {
|
||||
throw ex;
|
||||
}
|
||||
catch (InterruptedIOException ex) {
|
||||
warcRecorder.flagAsTimeout(url);
|
||||
|
||||
return new ContentTypeProbeResult.Timeout(ex);
|
||||
} catch (Exception ex) {
|
||||
catch (Exception ex) {
|
||||
logger.error("Error during fetching {}[{}]", ex.getClass().getSimpleName(), ex.getMessage());
|
||||
|
||||
warcRecorder.flagAsError(url, ex);
|
||||
@@ -210,13 +214,15 @@ public class HttpFetcherImpl implements HttpFetcher {
|
||||
ProbeType probeType)
|
||||
throws Exception
|
||||
{
|
||||
var getBuilder = new Request.Builder().get();
|
||||
|
||||
getBuilder.url(url.toString())
|
||||
.addHeader("Accept-Encoding", "gzip")
|
||||
.addHeader("Accept-Language", "en,*;q=0.5")
|
||||
.addHeader("Accept", "text/html, application/xhtml+xml, text/*;q=0.8")
|
||||
.addHeader("User-agent", userAgentString);
|
||||
var getBuilder = HttpRequest.newBuilder()
|
||||
.GET()
|
||||
.uri(url.asURI())
|
||||
.header("User-agent", userAgentString)
|
||||
.header("Accept-Encoding", "gzip")
|
||||
.header("Accept-Language", "en,*;q=0.5")
|
||||
.header("Accept", "text/html, application/xhtml+xml, text/*;q=0.8")
|
||||
.timeout(requestTimeout)
|
||||
;
|
||||
|
||||
contentTags.paint(getBuilder);
|
||||
|
||||
@@ -242,6 +248,126 @@ public class HttpFetcherImpl implements HttpFetcher {
|
||||
return new SitemapRetriever();
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<EdgeUrl> fetchSitemapUrls(String root, CrawlDelayTimer delayTimer) {
|
||||
try {
|
||||
List<EdgeUrl> ret = new ArrayList<>();
|
||||
|
||||
Set<String> seenUrls = new HashSet<>();
|
||||
Set<String> seenSitemaps = new HashSet<>();
|
||||
|
||||
Deque<EdgeUrl> sitemapQueue = new LinkedList<>();
|
||||
|
||||
EdgeUrl rootSitemapUrl = new EdgeUrl(root);
|
||||
|
||||
sitemapQueue.add(rootSitemapUrl);
|
||||
|
||||
int fetchedSitemaps = 0;
|
||||
|
||||
while (!sitemapQueue.isEmpty() && ret.size() < 20_000 && ++fetchedSitemaps < 10) {
|
||||
var head = sitemapQueue.removeFirst();
|
||||
|
||||
switch (fetchSitemap(head)) {
|
||||
case SitemapResult.SitemapUrls(List<String> urls) -> {
|
||||
|
||||
for (var url : urls) {
|
||||
if (seenUrls.add(url)) {
|
||||
EdgeUrl.parse(url)
|
||||
.filter(u -> u.domain.equals(rootSitemapUrl.domain))
|
||||
.ifPresent(ret::add);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
case SitemapResult.SitemapReferences(List<String> refs) -> {
|
||||
for (var ref : refs) {
|
||||
if (seenSitemaps.add(ref)) {
|
||||
EdgeUrl.parse(ref)
|
||||
.filter(url -> url.domain.equals(rootSitemapUrl.domain))
|
||||
.ifPresent(sitemapQueue::addFirst);
|
||||
}
|
||||
}
|
||||
}
|
||||
case SitemapResult.SitemapError() -> {}
|
||||
}
|
||||
|
||||
delayTimer.waitFetchDelay();
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
catch (Exception ex) {
|
||||
logger.error("Error while fetching sitemaps via {}: {} ({})", root, ex.getClass().getSimpleName(), ex.getMessage());
|
||||
return List.of();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private SitemapResult fetchSitemap(EdgeUrl sitemapUrl) throws URISyntaxException, IOException, InterruptedException {
|
||||
HttpRequest getRequest = HttpRequest.newBuilder()
|
||||
.GET()
|
||||
.uri(sitemapUrl.asURI())
|
||||
.header("Accept-Encoding", "gzip")
|
||||
.header("Accept", "text/*, */*;q=0.9")
|
||||
.header("User-agent", userAgentString)
|
||||
.timeout(requestTimeout)
|
||||
.build();
|
||||
|
||||
var response = client.send(getRequest, HttpResponse.BodyHandlers.ofInputStream());
|
||||
if (response.statusCode() != 200) {
|
||||
return new SitemapResult.SitemapError();
|
||||
}
|
||||
|
||||
try (InputStream inputStream = response.body()) {
|
||||
|
||||
InputStream parserStream;
|
||||
if (sitemapUrl.path.endsWith(".gz")) {
|
||||
parserStream = new GZIPInputStream(inputStream);
|
||||
}
|
||||
else {
|
||||
parserStream = inputStream;
|
||||
}
|
||||
|
||||
Document parsedSitemap = Jsoup.parse(parserStream, "UTF-8", sitemapUrl.toString(), Parser.xmlParser());
|
||||
if (parsedSitemap.childrenSize() == 0) {
|
||||
return new SitemapResult.SitemapError();
|
||||
}
|
||||
|
||||
String rootTagName = parsedSitemap.child(0).tagName();
|
||||
|
||||
return switch (rootTagName.toLowerCase()) {
|
||||
case "sitemapindex" -> {
|
||||
List<String> references = new ArrayList<>();
|
||||
for (var locTag : parsedSitemap.getElementsByTag("loc")) {
|
||||
references.add(URLDecoder.decode(locTag.text().trim(), StandardCharsets.UTF_8));
|
||||
}
|
||||
yield new SitemapResult.SitemapReferences(Collections.unmodifiableList(references));
|
||||
}
|
||||
case "urlset" -> {
|
||||
List<String> urls = new ArrayList<>();
|
||||
for (var locTag : parsedSitemap.select("url > loc")) {
|
||||
urls.add(URLDecoder.decode(locTag.text().trim(), StandardCharsets.UTF_8));
|
||||
}
|
||||
yield new SitemapResult.SitemapUrls(Collections.unmodifiableList(urls));
|
||||
}
|
||||
case "rss", "atom" -> {
|
||||
List<String> urls = new ArrayList<>();
|
||||
for (var locTag : parsedSitemap.select("link, url")) {
|
||||
urls.add(locTag.text().trim());
|
||||
}
|
||||
yield new SitemapResult.SitemapUrls(Collections.unmodifiableList(urls));
|
||||
}
|
||||
default -> new SitemapResult.SitemapError();
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
private sealed interface SitemapResult {
|
||||
record SitemapUrls(List<String> urls) implements SitemapResult {}
|
||||
record SitemapReferences(List<String> sitemapRefs) implements SitemapResult {}
|
||||
record SitemapError() implements SitemapResult {}
|
||||
}
|
||||
|
||||
@Override
|
||||
public SimpleRobotRules fetchRobotRules(EdgeDomain domain, WarcRecorder recorder) {
|
||||
var ret = fetchAndParseRobotsTxt(new EdgeUrl("https", domain, null, "/robots.txt", null), recorder);
|
||||
@@ -257,14 +383,15 @@ public class HttpFetcherImpl implements HttpFetcher {
|
||||
|
||||
private Optional<SimpleRobotRules> fetchAndParseRobotsTxt(EdgeUrl url, WarcRecorder recorder) {
|
||||
try {
|
||||
var getBuilder = new Request.Builder().get();
|
||||
var getRequest = HttpRequest.newBuilder()
|
||||
.GET()
|
||||
.uri(url.asURI())
|
||||
.header("Accept-Encoding", "gzip")
|
||||
.header("Accept", "text/*, */*;q=0.9")
|
||||
.header("User-agent", userAgentString)
|
||||
.timeout(requestTimeout);
|
||||
|
||||
getBuilder.url(url.toString())
|
||||
.addHeader("Accept-Encoding", "gzip")
|
||||
.addHeader("Accept", "text/*, */*;q=0.9")
|
||||
.addHeader("User-agent", userAgentString);
|
||||
|
||||
HttpFetchResult result = recorder.fetch(client, getBuilder.build());
|
||||
HttpFetchResult result = recorder.fetch(client, getRequest.build());
|
||||
|
||||
return DocumentBodyExtractor.asBytes(result).mapOpt((contentType, body) ->
|
||||
robotsParser.parseContent(url.toString(),
|
||||
|
@@ -1,31 +0,0 @@
|
||||
package nu.marginalia.crawl.fetcher.socket;
|
||||
|
||||
import okhttp3.Interceptor;
|
||||
import okhttp3.Response;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
||||
/** An interceptor that intercepts network requests and adds the remote IP address as
|
||||
* a header in the response. This is used to pass the remote IP address to the Warc
|
||||
* writer, as this information is not available in the response.
|
||||
*/
|
||||
public class IpInterceptingNetworkInterceptor implements Interceptor {
|
||||
private static final String pseudoHeaderName = "X-Marginalia-Remote-IP";
|
||||
|
||||
@NotNull
|
||||
@Override
|
||||
public Response intercept(@NotNull Interceptor.Chain chain) throws IOException {
|
||||
String IP = chain.connection().socket().getInetAddress().getHostAddress();
|
||||
|
||||
return chain.proceed(chain.request())
|
||||
.newBuilder()
|
||||
.addHeader(pseudoHeaderName, IP)
|
||||
.build();
|
||||
}
|
||||
|
||||
public static String getIpFromResponse(Response response) {
|
||||
return response.header(pseudoHeaderName);
|
||||
}
|
||||
}
|
@@ -27,7 +27,7 @@ public class NoSecuritySSL {
|
||||
}
|
||||
};
|
||||
|
||||
public static SSLSocketFactory buildSocketFactory() {
|
||||
public static SSLContext buildSslContext() {
|
||||
try {
|
||||
// Install the all-trusting trust manager
|
||||
final SSLContext sslContext = SSLContext.getInstance("TLS");
|
||||
@@ -40,14 +40,11 @@ public class NoSecuritySSL {
|
||||
clientSessionContext.setSessionCacheSize(2048);
|
||||
|
||||
// Create a ssl socket factory with our all-trusting manager
|
||||
return sslContext.getSocketFactory();
|
||||
return sslContext;
|
||||
}
|
||||
catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
public static HostnameVerifier buildHostnameVerifyer() {
|
||||
return (hn, session) -> true;
|
||||
}
|
||||
}
|
||||
|
@@ -1,14 +1,14 @@
|
||||
package nu.marginalia.crawl.fetcher.warc;
|
||||
|
||||
import okhttp3.Headers;
|
||||
import okhttp3.Response;
|
||||
import org.apache.commons.io.input.BOMInputStream;
|
||||
import org.netpreserve.jwarc.WarcTruncationReason;
|
||||
|
||||
import java.io.*;
|
||||
import java.net.http.HttpHeaders;
|
||||
import java.net.http.HttpResponse;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Objects;
|
||||
import java.util.Map;
|
||||
import java.util.zip.GZIPInputStream;
|
||||
|
||||
/** Input buffer for temporary storage of a HTTP response
|
||||
@@ -17,8 +17,9 @@ import java.util.zip.GZIPInputStream;
|
||||
* */
|
||||
public abstract class WarcInputBuffer implements AutoCloseable {
|
||||
protected WarcTruncationReason truncationReason = WarcTruncationReason.NOT_TRUNCATED;
|
||||
protected Headers headers;
|
||||
WarcInputBuffer(Headers headers) {
|
||||
protected HttpHeaders headers;
|
||||
|
||||
WarcInputBuffer(HttpHeaders headers) {
|
||||
this.headers = headers;
|
||||
}
|
||||
|
||||
@@ -30,7 +31,7 @@ public abstract class WarcInputBuffer implements AutoCloseable {
|
||||
|
||||
public final WarcTruncationReason truncationReason() { return truncationReason; }
|
||||
|
||||
public final Headers headers() { return headers; }
|
||||
public final HttpHeaders headers() { return headers; }
|
||||
|
||||
/** Create a buffer for a response.
|
||||
* If the response is small and not compressed, it will be stored in memory.
|
||||
@@ -38,26 +39,27 @@ public abstract class WarcInputBuffer implements AutoCloseable {
|
||||
* and suppressed from the headers.
|
||||
* If an error occurs, a buffer will be created with no content and an error status.
|
||||
*/
|
||||
static WarcInputBuffer forResponse(Response rsp) {
|
||||
static WarcInputBuffer forResponse(HttpResponse<InputStream> rsp) {
|
||||
if (rsp == null)
|
||||
return new ErrorBuffer();
|
||||
|
||||
try {
|
||||
String contentLengthHeader = Objects.requireNonNullElse(rsp.header("Content-Length"), "-1");
|
||||
int contentLength = Integer.parseInt(contentLengthHeader);
|
||||
String contentEncoding = rsp.header("Content-Encoding");
|
||||
var headers = rsp.headers();
|
||||
|
||||
try (var is = rsp.body()) {
|
||||
int contentLength = (int) headers.firstValueAsLong("Content-Length").orElse(-1L);
|
||||
String contentEncoding = headers.firstValue("Content-Encoding").orElse(null);
|
||||
|
||||
if (contentEncoding == null && contentLength > 0 && contentLength < 8192) {
|
||||
// If the content is small and not compressed, we can just read it into memory
|
||||
return new MemoryBuffer(rsp, contentLength);
|
||||
return new MemoryBuffer(headers, is, contentLength);
|
||||
}
|
||||
else {
|
||||
// Otherwise, we unpack it into a file and read it from there
|
||||
return new FileBuffer(rsp);
|
||||
return new FileBuffer(headers, is);
|
||||
}
|
||||
}
|
||||
catch (Exception ex) {
|
||||
return new ErrorBuffer(rsp);
|
||||
return new ErrorBuffer();
|
||||
}
|
||||
|
||||
}
|
||||
@@ -99,12 +101,8 @@ public abstract class WarcInputBuffer implements AutoCloseable {
|
||||
/** Pseudo-buffer for when we have an error */
|
||||
class ErrorBuffer extends WarcInputBuffer {
|
||||
public ErrorBuffer() {
|
||||
super(Headers.of());
|
||||
truncationReason = WarcTruncationReason.UNSPECIFIED;
|
||||
}
|
||||
super(HttpHeaders.of(Map.of(), (k,v)->false));
|
||||
|
||||
public ErrorBuffer(Response rsp) {
|
||||
super(rsp.headers());
|
||||
truncationReason = WarcTruncationReason.UNSPECIFIED;
|
||||
}
|
||||
|
||||
@@ -125,12 +123,12 @@ class ErrorBuffer extends WarcInputBuffer {
|
||||
/** Buffer for when we have the response in memory */
|
||||
class MemoryBuffer extends WarcInputBuffer {
|
||||
byte[] data;
|
||||
public MemoryBuffer(Response response, int size) {
|
||||
super(response.headers());
|
||||
public MemoryBuffer(HttpHeaders headers, InputStream responseStream, int size) {
|
||||
super(headers);
|
||||
|
||||
var outputStream = new ByteArrayOutputStream(size);
|
||||
|
||||
copy(response.body().byteStream(), outputStream);
|
||||
copy(responseStream, outputStream);
|
||||
|
||||
data = outputStream.toByteArray();
|
||||
}
|
||||
@@ -154,19 +152,15 @@ class MemoryBuffer extends WarcInputBuffer {
|
||||
class FileBuffer extends WarcInputBuffer {
|
||||
private final Path tempFile;
|
||||
|
||||
public FileBuffer(Response response) throws IOException {
|
||||
super(suppressContentEncoding(response.headers()));
|
||||
public FileBuffer(HttpHeaders headers, InputStream responseStream) throws IOException {
|
||||
super(suppressContentEncoding(headers));
|
||||
|
||||
this.tempFile = Files.createTempFile("rsp", ".html");
|
||||
|
||||
if (response.body() == null) {
|
||||
truncationReason = WarcTruncationReason.DISCONNECT;
|
||||
return;
|
||||
}
|
||||
|
||||
if ("gzip".equals(response.header("Content-Encoding"))) {
|
||||
if ("gzip".equalsIgnoreCase(headers.firstValue("Content-Encoding").orElse(""))) {
|
||||
try (var out = Files.newOutputStream(tempFile)) {
|
||||
copy(new GZIPInputStream(response.body().byteStream()), out);
|
||||
copy(new GZIPInputStream(responseStream), out);
|
||||
}
|
||||
catch (Exception ex) {
|
||||
truncationReason = WarcTruncationReason.UNSPECIFIED;
|
||||
@@ -174,7 +168,7 @@ class FileBuffer extends WarcInputBuffer {
|
||||
}
|
||||
else {
|
||||
try (var out = Files.newOutputStream(tempFile)) {
|
||||
copy(response.body().byteStream(), out);
|
||||
copy(responseStream, out);
|
||||
}
|
||||
catch (Exception ex) {
|
||||
truncationReason = WarcTruncationReason.UNSPECIFIED;
|
||||
@@ -182,22 +176,13 @@ class FileBuffer extends WarcInputBuffer {
|
||||
}
|
||||
}
|
||||
|
||||
private static Headers suppressContentEncoding(Headers headers) {
|
||||
var builder = new Headers.Builder();
|
||||
|
||||
headers.toMultimap().forEach((k, values) -> {
|
||||
private static HttpHeaders suppressContentEncoding(HttpHeaders headers) {
|
||||
return HttpHeaders.of(headers.map(), (k, v) -> {
|
||||
if ("Content-Encoding".equalsIgnoreCase(k)) {
|
||||
return;
|
||||
}
|
||||
if ("Transfer-Encoding".equalsIgnoreCase(k)) {
|
||||
return;
|
||||
}
|
||||
for (var value : values) {
|
||||
builder.add(k, value);
|
||||
return false;
|
||||
}
|
||||
return !"Transfer-Encoding".equalsIgnoreCase(k);
|
||||
});
|
||||
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
|
||||
|
@@ -1,11 +1,12 @@
|
||||
package nu.marginalia.crawl.fetcher.warc;
|
||||
|
||||
import okhttp3.Protocol;
|
||||
import okhttp3.Response;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
||||
import java.net.URI;
|
||||
import java.net.URLEncoder;
|
||||
import java.net.http.HttpClient;
|
||||
import java.net.http.HttpHeaders;
|
||||
import java.net.http.HttpResponse;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
@@ -75,13 +76,13 @@ public class WarcProtocolReconstructor {
|
||||
return "HTTP/" + version + " " + statusCode + " " + statusMessage + "\r\n" + headerString + "\r\n\r\n";
|
||||
}
|
||||
|
||||
static String getResponseHeader(Response response, long size) {
|
||||
String version = response.protocol() == Protocol.HTTP_1_1 ? "1.1" : "2.0";
|
||||
static String getResponseHeader(HttpResponse<?> response, long size) {
|
||||
String version = response.version() == HttpClient.Version.HTTP_1_1 ? "1.1" : "2.0";
|
||||
|
||||
String statusCode = String.valueOf(response.code());
|
||||
String statusMessage = STATUS_CODE_MAP.getOrDefault(response.code(), "Unknown");
|
||||
String statusCode = String.valueOf(response.statusCode());
|
||||
String statusMessage = STATUS_CODE_MAP.getOrDefault(response.statusCode(), "Unknown");
|
||||
|
||||
String headerString = getHeadersAsString(response, size);
|
||||
String headerString = getHeadersAsString(response.headers(), size);
|
||||
|
||||
return "HTTP/" + version + " " + statusCode + " " + statusMessage + "\r\n" + headerString + "\r\n\r\n";
|
||||
}
|
||||
@@ -148,10 +149,10 @@ public class WarcProtocolReconstructor {
|
||||
return joiner.toString();
|
||||
}
|
||||
|
||||
static private String getHeadersAsString(Response response, long responseSize) {
|
||||
static private String getHeadersAsString(HttpHeaders headers, long responseSize) {
|
||||
StringJoiner joiner = new StringJoiner("\r\n");
|
||||
|
||||
response.headers().toMultimap().forEach((k, values) -> {
|
||||
headers.map().forEach((k, values) -> {
|
||||
String headerCapitalized = capitalizeHeader(k);
|
||||
|
||||
// Omit pseudoheaders injected by the crawler itself
|
||||
@@ -179,8 +180,8 @@ public class WarcProtocolReconstructor {
|
||||
return joiner.toString();
|
||||
}
|
||||
|
||||
// okhttp gives us flattened headers, so we need to reconstruct Camel-Kebab-Case style
|
||||
// for the WARC parser's sake...
|
||||
// okhttp gave us flattened headers, so we need to reconstruct Camel-Kebab-Case style
|
||||
// for the WARC parser's sake... (do we still need this, mr chesterton?)
|
||||
static private String capitalizeHeader(String k) {
|
||||
return Arrays.stream(StringUtils.split(k, '-'))
|
||||
.map(StringUtils::capitalize)
|
||||
|
@@ -1,13 +1,11 @@
|
||||
package nu.marginalia.crawl.fetcher.warc;
|
||||
|
||||
import nu.marginalia.crawl.fetcher.ContentTags;
|
||||
import nu.marginalia.crawl.fetcher.Cookies;
|
||||
import nu.marginalia.crawl.fetcher.HttpFetcherImpl;
|
||||
import nu.marginalia.crawl.fetcher.socket.IpInterceptingNetworkInterceptor;
|
||||
import nu.marginalia.model.EdgeDomain;
|
||||
import nu.marginalia.model.EdgeUrl;
|
||||
import nu.marginalia.model.body.HttpFetchResult;
|
||||
import okhttp3.OkHttpClient;
|
||||
import okhttp3.Request;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
import org.netpreserve.jwarc.*;
|
||||
import org.slf4j.Logger;
|
||||
@@ -18,24 +16,27 @@ import java.io.InputStream;
|
||||
import java.net.InetAddress;
|
||||
import java.net.URI;
|
||||
import java.net.URISyntaxException;
|
||||
import java.net.http.HttpClient;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.time.Duration;
|
||||
import java.time.Instant;
|
||||
import java.util.*;
|
||||
|
||||
/** Based on JWarc's fetch method, APL 2.0 license
|
||||
* <p></p>
|
||||
* This class wraps OkHttp's OkHttpClient and records the HTTP request and response in a WARC file,
|
||||
* This class wraps HttpClient and records the HTTP request and response in a WARC file,
|
||||
* as best is possible given not all the data is available at the same time and needs to
|
||||
* be reconstructed.
|
||||
*/
|
||||
public class WarcRecorder implements AutoCloseable {
|
||||
/** Maximum time we'll wait on a single request */
|
||||
static final int MAX_TIME = 30_000;
|
||||
/** Maximum (decompressed) size we'll fetch */
|
||||
static final int MAX_SIZE = 1024 * 1024 * 10;
|
||||
|
||||
/** Maximum (decompressed) size we'll save */
|
||||
static final int MAX_SIZE = Integer.getInteger("crawler.maxFetchSize", 10 * 1024 * 1024);
|
||||
|
||||
private final WarcWriter writer;
|
||||
private final Path warcFile;
|
||||
@@ -46,20 +47,22 @@ public class WarcRecorder implements AutoCloseable {
|
||||
// Affix a version string in case we need to change the format in the future
|
||||
// in some way
|
||||
private final String warcRecorderVersion = "1.0";
|
||||
|
||||
// We need to know if the site uses cookies so this can be reported among the search results
|
||||
// -- flip this to true if we see any cookies. This information will also be painted on any
|
||||
// revisited pages. It's not 100% perfect and a bit order dependent, but it's good enough.
|
||||
private final WarcXCookieInformationHeader cookieInformation = new WarcXCookieInformationHeader();
|
||||
|
||||
private final Cookies cookies;
|
||||
/**
|
||||
* Create a new WarcRecorder that will write to the given file
|
||||
*
|
||||
* @param warcFile The file to write to
|
||||
*/
|
||||
public WarcRecorder(Path warcFile) throws IOException {
|
||||
public WarcRecorder(Path warcFile, HttpFetcherImpl fetcher) throws IOException {
|
||||
this.warcFile = warcFile;
|
||||
this.writer = new WarcWriter(warcFile);
|
||||
this.cookies = fetcher.getCookies();
|
||||
}
|
||||
|
||||
public WarcRecorder(Path warcFile, Cookies cookies) throws IOException {
|
||||
this.warcFile = warcFile;
|
||||
this.writer = new WarcWriter(warcFile);
|
||||
this.cookies = cookies;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -69,37 +72,41 @@ public class WarcRecorder implements AutoCloseable {
|
||||
public WarcRecorder() throws IOException {
|
||||
this.warcFile = Files.createTempFile("warc", ".warc.gz");
|
||||
this.writer = new WarcWriter(this.warcFile);
|
||||
this.cookies = new Cookies();
|
||||
|
||||
temporaryFile = true;
|
||||
}
|
||||
|
||||
public HttpFetchResult fetch(OkHttpClient client, Request request) throws NoSuchAlgorithmException,
|
||||
IOException,
|
||||
URISyntaxException,
|
||||
InterruptedException
|
||||
public HttpFetchResult fetch(HttpClient client,
|
||||
java.net.http.HttpRequest request)
|
||||
throws NoSuchAlgorithmException, IOException, URISyntaxException, InterruptedException
|
||||
{
|
||||
URI requestUri = request.url().uri();
|
||||
URI requestUri = request.uri();
|
||||
|
||||
WarcDigestBuilder responseDigestBuilder = new WarcDigestBuilder();
|
||||
WarcDigestBuilder payloadDigestBuilder = new WarcDigestBuilder();
|
||||
|
||||
String ip;
|
||||
Instant date = Instant.now();
|
||||
|
||||
var call = client.newCall(request);
|
||||
var response = client.send(request, java.net.http.HttpResponse.BodyHandlers.ofInputStream());
|
||||
|
||||
cookieInformation.update(client, request.url());
|
||||
|
||||
try (var response = call.execute();
|
||||
WarcInputBuffer inputBuffer = WarcInputBuffer.forResponse(response))
|
||||
Map<String, List<String>> extraHeaders = new HashMap<>();
|
||||
|
||||
// Not entirely sure why we need to do this, but keeping it due to Chesterton's Fence
|
||||
extraHeaders.putAll(request.headers().map());
|
||||
|
||||
try (WarcInputBuffer inputBuffer = WarcInputBuffer.forResponse(response))
|
||||
{
|
||||
if (cookies.hasCookies()) {
|
||||
extraHeaders.put("X-Has-Cookies", List.of("1"));
|
||||
}
|
||||
|
||||
byte[] responseHeaders = WarcProtocolReconstructor.getResponseHeader(response, inputBuffer.size()).getBytes(StandardCharsets.UTF_8);
|
||||
|
||||
ResponseDataBuffer responseDataBuffer = new ResponseDataBuffer(inputBuffer.size() + responseHeaders.length);
|
||||
InputStream inputStream = inputBuffer.read();
|
||||
|
||||
ip = IpInterceptingNetworkInterceptor.getIpFromResponse(response);
|
||||
|
||||
responseDataBuffer.put(responseHeaders);
|
||||
responseDataBuffer.updateDigest(responseDigestBuilder, 0, responseHeaders.length);
|
||||
|
||||
@@ -122,17 +129,15 @@ public class WarcRecorder implements AutoCloseable {
|
||||
|
||||
// It looks like this might be the same as requestUri, but it's not;
|
||||
// it's the URI after resolving redirects.
|
||||
final URI responseUri = response.request().url().uri();
|
||||
final URI responseUri = response.uri();
|
||||
|
||||
WarcResponse.Builder responseBuilder = new WarcResponse.Builder(responseUri)
|
||||
.blockDigest(responseDigestBuilder.build())
|
||||
.date(date)
|
||||
.body(MediaType.HTTP_RESPONSE, responseDataBuffer.copyBytes());
|
||||
|
||||
cookieInformation.paint(responseBuilder);
|
||||
|
||||
if (ip != null) responseBuilder.ipAddress(InetAddress.getByName(ip));
|
||||
|
||||
InetAddress inetAddress = InetAddress.getByName(responseUri.getHost());
|
||||
responseBuilder.ipAddress(inetAddress);
|
||||
responseBuilder.payloadDigest(payloadDigestBuilder.build());
|
||||
responseBuilder.truncated(inputBuffer.truncationReason());
|
||||
|
||||
@@ -149,8 +154,8 @@ public class WarcRecorder implements AutoCloseable {
|
||||
byte[] httpRequestString = WarcProtocolReconstructor
|
||||
.getHttpRequestString(
|
||||
response.request().method(),
|
||||
response.request().headers().toMultimap(),
|
||||
request.headers().toMultimap(),
|
||||
response.request().headers().map(),
|
||||
extraHeaders,
|
||||
requestUri)
|
||||
.getBytes();
|
||||
|
||||
@@ -166,10 +171,29 @@ public class WarcRecorder implements AutoCloseable {
|
||||
warcRequest.http(); // force HTTP header to be parsed before body is consumed so that caller can use it
|
||||
writer.write(warcRequest);
|
||||
|
||||
if (Duration.between(date, Instant.now()).compareTo(Duration.ofSeconds(9)) > 0
|
||||
&& inputBuffer.size() < 2048
|
||||
&& !request.uri().getPath().endsWith("robots.txt")) // don't bail on robots.txt
|
||||
{
|
||||
// Fast detection and mitigation of crawler traps that respond with slow
|
||||
// small responses, with a high branching factor
|
||||
|
||||
// Note we bail *after* writing the warc records, this will effectively only
|
||||
// prevent link extraction from the document.
|
||||
|
||||
logger.warn("URL {} took too long to fetch ({}s) and was too small for the effort ({}b)",
|
||||
requestUri,
|
||||
Duration.between(date, Instant.now()).getSeconds(),
|
||||
inputBuffer.size()
|
||||
);
|
||||
|
||||
return new HttpFetchResult.ResultException(new IOException("Likely crawler trap"));
|
||||
}
|
||||
|
||||
return new HttpFetchResult.ResultOk(responseUri,
|
||||
response.code(),
|
||||
response.statusCode(),
|
||||
inputBuffer.headers(),
|
||||
ip,
|
||||
inetAddress.getHostAddress(),
|
||||
responseDataBuffer.data,
|
||||
dataStart,
|
||||
responseDataBuffer.length() - dataStart);
|
||||
@@ -245,7 +269,9 @@ public class WarcRecorder implements AutoCloseable {
|
||||
.date(Instant.now())
|
||||
.body(MediaType.HTTP_RESPONSE, responseDataBuffer.copyBytes());
|
||||
|
||||
cookieInformation.paint(builder);
|
||||
if (cookies.hasCookies()) {
|
||||
builder.addHeader("X-Has-Cookies", "1");
|
||||
}
|
||||
|
||||
var reference = builder.build();
|
||||
|
||||
|
@@ -12,7 +12,6 @@ import nu.marginalia.crawl.fetcher.warc.WarcRecorder;
|
||||
import nu.marginalia.crawl.logic.LinkFilterSelector;
|
||||
import nu.marginalia.crawl.retreival.revisit.CrawlerRevisitor;
|
||||
import nu.marginalia.crawl.retreival.revisit.DocumentWithReference;
|
||||
import nu.marginalia.crawl.retreival.sitemap.SitemapFetcher;
|
||||
import nu.marginalia.ip_blocklist.UrlBlocklist;
|
||||
import nu.marginalia.link_parser.LinkParser;
|
||||
import nu.marginalia.model.EdgeDomain;
|
||||
@@ -53,7 +52,6 @@ public class CrawlerRetreiver implements AutoCloseable {
|
||||
private final WarcRecorder warcRecorder;
|
||||
private final CrawlerRevisitor crawlerRevisitor;
|
||||
|
||||
private final SitemapFetcher sitemapFetcher;
|
||||
int errorCount = 0;
|
||||
|
||||
public CrawlerRetreiver(HttpFetcher fetcher,
|
||||
@@ -71,7 +69,6 @@ public class CrawlerRetreiver implements AutoCloseable {
|
||||
|
||||
crawlFrontier = new DomainCrawlFrontier(new EdgeDomain(domain), specs.urls(), specs.crawlDepth());
|
||||
crawlerRevisitor = new CrawlerRevisitor(crawlFrontier, this, warcRecorder);
|
||||
sitemapFetcher = new SitemapFetcher(crawlFrontier, fetcher.createSitemapRetriever());
|
||||
|
||||
// We must always crawl the index page first, this is assumed when fingerprinting the server
|
||||
var fst = crawlFrontier.peek();
|
||||
@@ -145,9 +142,11 @@ public class CrawlerRetreiver implements AutoCloseable {
|
||||
// Add external links to the crawl frontier
|
||||
crawlFrontier.addAllToQueue(domainLinks.getUrls(rootUrl.proto));
|
||||
|
||||
// Add links from the sitemap to the crawl frontier
|
||||
sitemapFetcher.downloadSitemaps(robotsRules, rootUrl);
|
||||
|
||||
// Fetch sitemaps
|
||||
for (var sitemap : robotsRules.getSitemaps()) {
|
||||
crawlFrontier.addAllToQueue(fetcher.fetchSitemapUrls(sitemap, delayTimer));
|
||||
}
|
||||
|
||||
while (!crawlFrontier.isEmpty()
|
||||
&& !crawlFrontier.isCrawlDepthReached()
|
||||
@@ -271,10 +270,7 @@ public class CrawlerRetreiver implements AutoCloseable {
|
||||
}
|
||||
|
||||
// Download the sitemap if available
|
||||
if (feedLink.isPresent()) {
|
||||
sitemapFetcher.downloadSitemaps(List.of(feedLink.get()));
|
||||
timer.waitFetchDelay(0);
|
||||
}
|
||||
feedLink.ifPresent(s -> fetcher.fetchSitemapUrls(s, timer));
|
||||
|
||||
// Grab the favicon if it exists
|
||||
fetchWithRetry(faviconUrl, timer, HttpFetcher.ProbeType.DISABLED, ContentTags.empty());
|
||||
|
@@ -1,72 +0,0 @@
|
||||
package nu.marginalia.crawl.retreival.sitemap;
|
||||
|
||||
import crawlercommons.robots.SimpleRobotRules;
|
||||
import nu.marginalia.crawl.fetcher.SitemapRetriever;
|
||||
import nu.marginalia.crawl.retreival.DomainCrawlFrontier;
|
||||
import nu.marginalia.model.EdgeUrl;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
|
||||
public class SitemapFetcher {
|
||||
|
||||
private final DomainCrawlFrontier crawlFrontier;
|
||||
private final SitemapRetriever sitemapRetriever;
|
||||
private static final Logger logger = LoggerFactory.getLogger(SitemapFetcher.class);
|
||||
|
||||
public SitemapFetcher(DomainCrawlFrontier crawlFrontier, SitemapRetriever sitemapRetriever) {
|
||||
this.crawlFrontier = crawlFrontier;
|
||||
this.sitemapRetriever = sitemapRetriever;
|
||||
}
|
||||
|
||||
public void downloadSitemaps(SimpleRobotRules robotsRules, EdgeUrl rootUrl) {
|
||||
List<String> urls = robotsRules.getSitemaps();
|
||||
|
||||
if (urls.isEmpty()) {
|
||||
urls = List.of(rootUrl.withPathAndParam("/sitemap.xml", null).toString());
|
||||
}
|
||||
|
||||
downloadSitemaps(urls);
|
||||
}
|
||||
|
||||
public void downloadSitemaps(List<String> urls) {
|
||||
|
||||
Set<String> checkedSitemaps = new HashSet<>();
|
||||
|
||||
for (var rawUrl : urls) {
|
||||
Optional<EdgeUrl> parsedUrl = EdgeUrl.parse(rawUrl);
|
||||
if (parsedUrl.isEmpty()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
EdgeUrl url = parsedUrl.get();
|
||||
|
||||
// Let's not download sitemaps from other domains for now
|
||||
if (!crawlFrontier.isSameDomain(url)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (checkedSitemaps.contains(url.path))
|
||||
continue;
|
||||
|
||||
var sitemap = sitemapRetriever.fetchSitemap(url);
|
||||
if (sitemap.isEmpty()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// ensure we don't try to download this sitemap again
|
||||
// (don't move this up, as we may want to check the same
|
||||
// path with different protocols until we find one that works)
|
||||
|
||||
checkedSitemaps.add(url.path);
|
||||
|
||||
crawlFrontier.addAllToQueue(sitemap);
|
||||
}
|
||||
|
||||
logger.debug("Queue is now {}", crawlFrontier.queueSize());
|
||||
}
|
||||
}
|
@@ -36,7 +36,6 @@ dependencies {
|
||||
implementation libs.gson
|
||||
implementation libs.commons.io
|
||||
implementation libs.commons.lang3
|
||||
implementation libs.okhttp3
|
||||
implementation libs.jsoup
|
||||
implementation libs.snakeyaml
|
||||
implementation libs.zstd
|
||||
|
@@ -1,17 +1,17 @@
|
||||
package nu.marginalia.model.body;
|
||||
|
||||
import nu.marginalia.contenttype.ContentType;
|
||||
import okhttp3.Headers;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
import org.jsoup.Jsoup;
|
||||
import org.jsoup.nodes.Document;
|
||||
import org.netpreserve.jwarc.MessageHeaders;
|
||||
import org.netpreserve.jwarc.WarcResponse;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.net.InetAddress;
|
||||
import java.net.URI;
|
||||
import java.net.http.HttpHeaders;
|
||||
import java.util.Optional;
|
||||
|
||||
/* FIXME: This interface has a very unfortunate name that is not very descriptive.
|
||||
@@ -56,42 +56,26 @@ public sealed interface HttpFetchResult {
|
||||
*/
|
||||
record ResultOk(URI uri,
|
||||
int statusCode,
|
||||
Headers headers,
|
||||
HttpHeaders headers,
|
||||
String ipAddress,
|
||||
byte[] bytesRaw,
|
||||
int bytesStart,
|
||||
int bytesLength
|
||||
) implements HttpFetchResult {
|
||||
|
||||
public ResultOk(URI uri, int status, MessageHeaders headers, String ipAddress, byte[] bytes, int bytesStart, int length) {
|
||||
this(uri, status, HttpHeaders.of(headers.map(), (k,v) -> true), ipAddress, bytes, bytesStart, length);
|
||||
}
|
||||
|
||||
public boolean isOk() {
|
||||
return statusCode >= 200 && statusCode < 300;
|
||||
}
|
||||
|
||||
public ResultOk(URI uri,
|
||||
int statusCode,
|
||||
MessageHeaders headers,
|
||||
String ipAddress,
|
||||
byte[] bytesRaw,
|
||||
int bytesStart,
|
||||
int bytesLength) {
|
||||
this(uri, statusCode, convertHeaders(headers), ipAddress, bytesRaw, bytesStart, bytesLength);
|
||||
}
|
||||
|
||||
private static Headers convertHeaders(MessageHeaders headers) {
|
||||
var ret = new Headers.Builder();
|
||||
for (var header : headers.map().entrySet()) {
|
||||
for (var value : header.getValue()) {
|
||||
ret.add(header.getKey(), value);
|
||||
}
|
||||
}
|
||||
return ret.build();
|
||||
}
|
||||
|
||||
public InputStream getInputStream() {
|
||||
return new ByteArrayInputStream(bytesRaw, bytesStart, bytesLength);
|
||||
}
|
||||
|
||||
public Optional<Document> parseDocument() throws IOException {
|
||||
public Optional<Document> parseDocument() {
|
||||
return DocumentBodyExtractor.asString(this).flatMapOpt((contentType, body) -> {
|
||||
if (contentType.is("text/html")) {
|
||||
return Optional.of(Jsoup.parse(body));
|
||||
@@ -102,8 +86,9 @@ public sealed interface HttpFetchResult {
|
||||
});
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public String header(String name) {
|
||||
return headers.get(name);
|
||||
return headers.firstValue(name).orElse(null);
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -165,27 +165,28 @@ public class CrawledDocumentParquetRecordFileWriter implements AutoCloseable {
|
||||
contentType = "";
|
||||
}
|
||||
|
||||
String headersStr = null;
|
||||
StringJoiner headersStrBuilder = new StringJoiner("\n");
|
||||
for (var header : headers) {
|
||||
headersStrBuilder.add(header.getFirst() + ": " + header.getSecond());
|
||||
for (var header : headers.map().entrySet()) {
|
||||
for (var value : header.getValue()) {
|
||||
headersStrBuilder.add(header.getKey() + ": " + value);
|
||||
}
|
||||
}
|
||||
headersStr = headersStrBuilder.toString();
|
||||
String headersStr = headersStrBuilder.toString();
|
||||
|
||||
|
||||
write(new CrawledDocumentParquetRecord(
|
||||
domain,
|
||||
response.target(),
|
||||
fetchOk.ipAddress(),
|
||||
WarcXCookieInformationHeader.hasCookies(response),
|
||||
headers.firstValue("X-Has-Cookies").orElse("0").equals("1"),
|
||||
fetchOk.statusCode(),
|
||||
response.date(),
|
||||
contentType,
|
||||
bodyBytes,
|
||||
headersStr,
|
||||
headers.get("ETag"),
|
||||
headers.get("Last-Modified"))
|
||||
);
|
||||
headers.firstValue("ETag").orElse(null),
|
||||
headers.firstValue("Last-Modified").orElse(null)
|
||||
));
|
||||
}
|
||||
|
||||
|
||||
|
@@ -1,35 +0,0 @@
|
||||
package org.netpreserve.jwarc;
|
||||
|
||||
import okhttp3.HttpUrl;
|
||||
import okhttp3.OkHttpClient;
|
||||
|
||||
/** Encapsulates out-of-band information about whether a website uses cookies,
|
||||
* using a non-standard WARC header "X-Has-Cookies".
|
||||
*/
|
||||
public class WarcXCookieInformationHeader {
|
||||
private boolean hasCookies = false;
|
||||
private static final String headerName = "X-Has-Cookies";
|
||||
|
||||
public void update(OkHttpClient client, HttpUrl url) {
|
||||
if (!hasCookies) {
|
||||
hasCookies = !client.cookieJar().loadForRequest(url).isEmpty();
|
||||
}
|
||||
}
|
||||
|
||||
public boolean hasCookies() {
|
||||
return hasCookies;
|
||||
}
|
||||
|
||||
public void paint(WarcResponse.Builder builder) {
|
||||
builder.addHeader(headerName, hasCookies ? "1" : "0");
|
||||
}
|
||||
public void paint(WarcXResponseReference.Builder builder) {
|
||||
builder.addHeader(headerName, hasCookies ? "1" : "0");
|
||||
}
|
||||
|
||||
public static boolean hasCookies(WarcRecord record) {
|
||||
return record.headers().contains(headerName, "1");
|
||||
}
|
||||
|
||||
|
||||
}
|
@@ -1,11 +1,9 @@
|
||||
package nu.marginalia.crawl.retreival;
|
||||
|
||||
import nu.marginalia.crawl.fetcher.socket.IpInterceptingNetworkInterceptor;
|
||||
import nu.marginalia.crawl.fetcher.Cookies;
|
||||
import nu.marginalia.crawl.fetcher.warc.WarcRecorder;
|
||||
import nu.marginalia.model.EdgeDomain;
|
||||
import nu.marginalia.model.EdgeUrl;
|
||||
import okhttp3.OkHttpClient;
|
||||
import okhttp3.Request;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
@@ -15,6 +13,8 @@ import org.netpreserve.jwarc.WarcResponse;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.URISyntaxException;
|
||||
import java.net.http.HttpClient;
|
||||
import java.net.http.HttpRequest;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
@@ -27,11 +27,10 @@ import static org.junit.jupiter.api.Assertions.fail;
|
||||
class CrawlerWarcResynchronizerTest {
|
||||
Path fileName;
|
||||
Path outputFile;
|
||||
OkHttpClient httpClient;
|
||||
HttpClient httpClient;
|
||||
@BeforeEach
|
||||
public void setUp() throws Exception {
|
||||
httpClient = new OkHttpClient.Builder()
|
||||
.addNetworkInterceptor(new IpInterceptingNetworkInterceptor())
|
||||
httpClient = HttpClient.newBuilder()
|
||||
.build();
|
||||
|
||||
fileName = Files.createTempFile("test", ".warc.gz");
|
||||
@@ -46,7 +45,7 @@ class CrawlerWarcResynchronizerTest {
|
||||
|
||||
@Test
|
||||
void run() throws IOException, URISyntaxException {
|
||||
try (var oldRecorder = new WarcRecorder(fileName)) {
|
||||
try (var oldRecorder = new WarcRecorder(fileName, new Cookies())) {
|
||||
fetchUrl(oldRecorder, "https://www.marginalia.nu/");
|
||||
fetchUrl(oldRecorder, "https://www.marginalia.nu/log/");
|
||||
fetchUrl(oldRecorder, "https://www.marginalia.nu/feed/");
|
||||
@@ -56,7 +55,7 @@ class CrawlerWarcResynchronizerTest {
|
||||
|
||||
var crawlFrontier = new DomainCrawlFrontier(new EdgeDomain("www.marginalia.nu"), List.of(), 100);
|
||||
|
||||
try (var newRecorder = new WarcRecorder(outputFile)) {
|
||||
try (var newRecorder = new WarcRecorder(outputFile, new Cookies())) {
|
||||
new CrawlerWarcResynchronizer(crawlFrontier, newRecorder).run(fileName);
|
||||
}
|
||||
|
||||
@@ -79,10 +78,11 @@ class CrawlerWarcResynchronizerTest {
|
||||
}
|
||||
|
||||
void fetchUrl(WarcRecorder recorder, String url) throws NoSuchAlgorithmException, IOException, URISyntaxException, InterruptedException {
|
||||
var req = new Request.Builder().url(url)
|
||||
.addHeader("User-agent", "test.marginalia.nu")
|
||||
.addHeader("Accept-Encoding", "gzip")
|
||||
.get().build();
|
||||
var req = HttpRequest.newBuilder()
|
||||
.uri(new java.net.URI(url))
|
||||
.header("User-agent", "test.marginalia.nu")
|
||||
.header("Accept-Encoding", "gzip")
|
||||
.GET().build();
|
||||
recorder.fetch(httpClient, req);
|
||||
}
|
||||
}
|
@@ -2,6 +2,7 @@ package nu.marginalia.crawl.retreival.fetcher;
|
||||
|
||||
import com.sun.net.httpserver.HttpServer;
|
||||
import nu.marginalia.crawl.fetcher.ContentTags;
|
||||
import nu.marginalia.crawl.fetcher.Cookies;
|
||||
import nu.marginalia.crawl.fetcher.HttpFetcher;
|
||||
import nu.marginalia.crawl.fetcher.HttpFetcherImpl;
|
||||
import nu.marginalia.crawl.fetcher.warc.WarcRecorder;
|
||||
@@ -79,7 +80,7 @@ class ContentTypeProberTest {
|
||||
htmlRedirEndpoint = EdgeUrl.parse("http://localhost:" + port + "/redir.gz").get();
|
||||
|
||||
fetcher = new HttpFetcherImpl("test");
|
||||
recorder = new WarcRecorder(warcFile);
|
||||
recorder = new WarcRecorder(warcFile, new Cookies());
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
|
@@ -2,13 +2,11 @@ package nu.marginalia.crawl.retreival.fetcher;
|
||||
|
||||
import nu.marginalia.UserAgent;
|
||||
import nu.marginalia.crawl.fetcher.ContentTags;
|
||||
import nu.marginalia.crawl.fetcher.socket.IpInterceptingNetworkInterceptor;
|
||||
import nu.marginalia.crawl.fetcher.Cookies;
|
||||
import nu.marginalia.crawl.fetcher.warc.WarcRecorder;
|
||||
import nu.marginalia.model.EdgeUrl;
|
||||
import nu.marginalia.parquet.crawldata.CrawledDocumentParquetRecordFileReader;
|
||||
import nu.marginalia.parquet.crawldata.CrawledDocumentParquetRecordFileWriter;
|
||||
import okhttp3.OkHttpClient;
|
||||
import okhttp3.Request;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
@@ -19,6 +17,8 @@ import org.netpreserve.jwarc.WarcXResponseReference;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.URISyntaxException;
|
||||
import java.net.http.HttpClient;
|
||||
import java.net.http.HttpRequest;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
@@ -31,17 +31,16 @@ class WarcRecorderTest {
|
||||
Path fileNameWarc;
|
||||
Path fileNameParquet;
|
||||
WarcRecorder client;
|
||||
OkHttpClient httpClient;
|
||||
|
||||
HttpClient httpClient;
|
||||
@BeforeEach
|
||||
public void setUp() throws Exception {
|
||||
httpClient = new OkHttpClient.Builder()
|
||||
.addNetworkInterceptor(new IpInterceptingNetworkInterceptor())
|
||||
.build();
|
||||
httpClient = HttpClient.newBuilder().build();
|
||||
|
||||
fileNameWarc = Files.createTempFile("test", ".warc");
|
||||
fileNameParquet = Files.createTempFile("test", ".parquet");
|
||||
|
||||
client = new WarcRecorder(fileNameWarc);
|
||||
client = new WarcRecorder(fileNameWarc, new Cookies());
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
@@ -52,10 +51,13 @@ class WarcRecorderTest {
|
||||
|
||||
@Test
|
||||
void fetch() throws NoSuchAlgorithmException, IOException, URISyntaxException, InterruptedException {
|
||||
client.fetch(httpClient, new Request.Builder().url("https://www.marginalia.nu/")
|
||||
.addHeader("User-agent", "test.marginalia.nu")
|
||||
.addHeader("Accept-Encoding", "gzip")
|
||||
.get().build());
|
||||
client.fetch(httpClient,
|
||||
HttpRequest.newBuilder()
|
||||
.uri(new java.net.URI("https://www.marginalia.nu/"))
|
||||
.header("User-agent", "test.marginalia.nu")
|
||||
.header("Accept-Encoding", "gzip")
|
||||
.GET().build()
|
||||
);
|
||||
|
||||
Map<String, String> sampleData = new HashMap<>();
|
||||
try (var warcReader = new WarcReader(fileNameWarc)) {
|
||||
@@ -76,7 +78,7 @@ class WarcRecorderTest {
|
||||
@Test
|
||||
public void flagAsSkipped() throws IOException, URISyntaxException {
|
||||
|
||||
try (var recorder = new WarcRecorder(fileNameWarc)) {
|
||||
try (var recorder = new WarcRecorder(fileNameWarc, new Cookies())) {
|
||||
recorder.writeReferenceCopy(new EdgeUrl("https://www.marginalia.nu/"),
|
||||
"text/html",
|
||||
200,
|
||||
@@ -100,7 +102,7 @@ class WarcRecorderTest {
|
||||
@Test
|
||||
public void flagAsSkippedNullBody() throws IOException, URISyntaxException {
|
||||
|
||||
try (var recorder = new WarcRecorder(fileNameWarc)) {
|
||||
try (var recorder = new WarcRecorder(fileNameWarc, new Cookies())) {
|
||||
recorder.writeReferenceCopy(new EdgeUrl("https://www.marginalia.nu/"),
|
||||
"text/html",
|
||||
200,
|
||||
@@ -112,7 +114,7 @@ class WarcRecorderTest {
|
||||
|
||||
@Test
|
||||
public void testSaveImport() throws URISyntaxException, IOException {
|
||||
try (var recorder = new WarcRecorder(fileNameWarc)) {
|
||||
try (var recorder = new WarcRecorder(fileNameWarc, new Cookies())) {
|
||||
recorder.writeReferenceCopy(new EdgeUrl("https://www.marginalia.nu/"),
|
||||
"text/html",
|
||||
200,
|
||||
@@ -136,19 +138,23 @@ class WarcRecorderTest {
|
||||
|
||||
@Test
|
||||
public void testConvertToParquet() throws NoSuchAlgorithmException, IOException, URISyntaxException, InterruptedException {
|
||||
client.fetch(httpClient, new Request.Builder().url("https://www.marginalia.nu/")
|
||||
.addHeader("User-agent", "test.marginalia.nu")
|
||||
.addHeader("Accept-Encoding", "gzip")
|
||||
.get().build());
|
||||
client.fetch(httpClient, new Request.Builder().url("https://www.marginalia.nu/log/")
|
||||
.addHeader("User-agent", "test.marginalia.nu")
|
||||
.addHeader("Accept-Encoding", "gzip")
|
||||
.get().build());
|
||||
client.fetch(httpClient, new Request.Builder().url("https://www.marginalia.nu/sanic.png")
|
||||
.addHeader("User-agent", "test.marginalia.nu")
|
||||
.addHeader("Accept-Encoding", "gzip")
|
||||
.get().build());
|
||||
client.close();
|
||||
client.fetch(httpClient, HttpRequest.newBuilder()
|
||||
.uri(new java.net.URI("https://www.marginalia.nu/"))
|
||||
.header("User-agent", "test.marginalia.nu")
|
||||
.header("Accept-Encoding", "gzip")
|
||||
.GET().build());
|
||||
|
||||
client.fetch(httpClient, HttpRequest.newBuilder()
|
||||
.uri(new java.net.URI("https://www.marginalia.nu/log/"))
|
||||
.header("User-agent", "test.marginalia.nu")
|
||||
.header("Accept-Encoding", "gzip")
|
||||
.GET().build());
|
||||
|
||||
client.fetch(httpClient, HttpRequest.newBuilder()
|
||||
.uri(new java.net.URI("https://www.marginalia.nu/sanic.png"))
|
||||
.header("User-agent", "test.marginalia.nu")
|
||||
.header("Accept-Encoding", "gzip")
|
||||
.GET().build());
|
||||
|
||||
CrawledDocumentParquetRecordFileWriter.convertWarc(
|
||||
"www.marginalia.nu",
|
||||
|
@@ -4,6 +4,7 @@ import nu.marginalia.crawl.fetcher.ContentTags;
|
||||
import nu.marginalia.crawl.fetcher.HttpFetcher;
|
||||
import nu.marginalia.crawl.fetcher.HttpFetcherImpl;
|
||||
import nu.marginalia.crawl.fetcher.warc.WarcRecorder;
|
||||
import nu.marginalia.crawl.retreival.CrawlDelayTimer;
|
||||
import nu.marginalia.model.EdgeUrl;
|
||||
import nu.marginalia.model.body.ContentTypeLogic;
|
||||
import nu.marginalia.model.body.DocumentBodyExtractor;
|
||||
@@ -37,6 +38,12 @@ class HttpFetcherTest {
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSitemapMarginalia() {
|
||||
var fetcher = new HttpFetcherImpl("nu.marginalia.edge-crawler");
|
||||
fetcher.fetchSitemapUrls("https://www.marginalia.nu/sitemap.xml", new CrawlDelayTimer(1)).forEach(System.out::println);
|
||||
}
|
||||
|
||||
@Test
|
||||
void fetchText() throws Exception {
|
||||
var fetcher = new HttpFetcherImpl("nu.marginalia.edge-crawler");
|
||||
|
@@ -3,11 +3,9 @@ package nu.marginalia.crawling.retreival;
|
||||
import crawlercommons.robots.SimpleRobotRules;
|
||||
import nu.marginalia.crawl.CrawlerMain;
|
||||
import nu.marginalia.crawl.DomainStateDb;
|
||||
import nu.marginalia.crawl.fetcher.ContentTags;
|
||||
import nu.marginalia.crawl.fetcher.HttpFetcher;
|
||||
import nu.marginalia.crawl.fetcher.HttpFetcherImpl;
|
||||
import nu.marginalia.crawl.fetcher.SitemapRetriever;
|
||||
import nu.marginalia.crawl.fetcher.*;
|
||||
import nu.marginalia.crawl.fetcher.warc.WarcRecorder;
|
||||
import nu.marginalia.crawl.retreival.CrawlDelayTimer;
|
||||
import nu.marginalia.crawl.retreival.CrawlerRetreiver;
|
||||
import nu.marginalia.crawl.retreival.DomainProber;
|
||||
import nu.marginalia.model.EdgeDomain;
|
||||
@@ -17,7 +15,6 @@ import nu.marginalia.model.crawldata.CrawledDocument;
|
||||
import nu.marginalia.model.crawldata.CrawlerDocumentStatus;
|
||||
import nu.marginalia.model.crawldata.SerializableCrawlData;
|
||||
import nu.marginalia.test.CommonTestData;
|
||||
import okhttp3.Headers;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
@@ -27,6 +24,7 @@ import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.URISyntaxException;
|
||||
import java.net.http.HttpHeaders;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.sql.SQLException;
|
||||
@@ -122,7 +120,7 @@ public class CrawlerMockFetcherTest {
|
||||
public void setAllowAllContentTypes(boolean allowAllContentTypes) {}
|
||||
|
||||
@Override
|
||||
public List<String> getCookies() { return List.of();}
|
||||
public Cookies getCookies() { return new Cookies();}
|
||||
|
||||
@Override
|
||||
public void clearCookies() {}
|
||||
@@ -149,7 +147,7 @@ public class CrawlerMockFetcherTest {
|
||||
return new HttpFetchResult.ResultOk(
|
||||
url.asURI(),
|
||||
200,
|
||||
new Headers.Builder().build(),
|
||||
HttpHeaders.of(Map.of(), (k,v)->true),
|
||||
"127.0.0.1",
|
||||
bodyBytes,
|
||||
0,
|
||||
@@ -164,6 +162,11 @@ public class CrawlerMockFetcherTest {
|
||||
return new HttpFetchResult.ResultNone();
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<EdgeUrl> fetchSitemapUrls(String rootSitemapUrl, CrawlDelayTimer delayTimer) {
|
||||
return List.of();
|
||||
}
|
||||
|
||||
@Override
|
||||
public SimpleRobotRules fetchRobotRules(EdgeDomain domain, WarcRecorder recorder) {
|
||||
return new SimpleRobotRules();
|
||||
@@ -174,5 +177,9 @@ public class CrawlerMockFetcherTest {
|
||||
return Mockito.mock(SitemapRetriever.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -5,6 +5,7 @@ import nu.marginalia.WmsaHome;
|
||||
import nu.marginalia.atags.model.DomainLinks;
|
||||
import nu.marginalia.crawl.CrawlerMain;
|
||||
import nu.marginalia.crawl.DomainStateDb;
|
||||
import nu.marginalia.crawl.fetcher.Cookies;
|
||||
import nu.marginalia.crawl.fetcher.HttpFetcher;
|
||||
import nu.marginalia.crawl.fetcher.HttpFetcherImpl;
|
||||
import nu.marginalia.crawl.fetcher.warc.WarcRecorder;
|
||||
@@ -180,7 +181,7 @@ class CrawlerRetreiverTest {
|
||||
new EdgeDomain("www.marginalia.nu"),
|
||||
List.of(), 100);
|
||||
var resync = new CrawlerWarcResynchronizer(revisitCrawlFrontier,
|
||||
new WarcRecorder(tempFileWarc2)
|
||||
new WarcRecorder(tempFileWarc2, new Cookies())
|
||||
);
|
||||
|
||||
// truncate the size of the file to simulate a crash
|
||||
@@ -458,7 +459,7 @@ class CrawlerRetreiverTest {
|
||||
List.of(), 100);
|
||||
|
||||
var resync = new CrawlerWarcResynchronizer(revisitCrawlFrontier,
|
||||
new WarcRecorder(tempFileWarc3)
|
||||
new WarcRecorder(tempFileWarc3, new Cookies())
|
||||
);
|
||||
|
||||
// truncate the size of the file to simulate a crash
|
||||
@@ -509,7 +510,7 @@ class CrawlerRetreiverTest {
|
||||
}
|
||||
|
||||
private void doCrawlWithReferenceStream(CrawlerMain.CrawlSpecRecord specs, SerializableCrawlDataStream stream) {
|
||||
try (var recorder = new WarcRecorder(tempFileWarc2);
|
||||
try (var recorder = new WarcRecorder(tempFileWarc2, new Cookies());
|
||||
var db = new DomainStateDb(tempFileDb)
|
||||
) {
|
||||
new CrawlerRetreiver(httpFetcher, new DomainProber(d -> true), specs, db, recorder).crawlDomain(new DomainLinks(),
|
||||
@@ -522,7 +523,7 @@ class CrawlerRetreiverTest {
|
||||
|
||||
@NotNull
|
||||
private DomainCrawlFrontier doCrawl(Path tempFileWarc1, CrawlerMain.CrawlSpecRecord specs) {
|
||||
try (var recorder = new WarcRecorder(tempFileWarc1);
|
||||
try (var recorder = new WarcRecorder(tempFileWarc1, new Cookies());
|
||||
var db = new DomainStateDb(tempFileDb)
|
||||
) {
|
||||
var crawler = new CrawlerRetreiver(httpFetcher, new DomainProber(d -> true), specs, db, recorder);
|
||||
|
@@ -56,7 +56,6 @@ dependencies {
|
||||
implementation libs.zstd
|
||||
implementation libs.jwarc
|
||||
implementation libs.crawlercommons
|
||||
implementation libs.okhttp3
|
||||
implementation libs.jsoup
|
||||
implementation libs.opencsv
|
||||
implementation libs.fastutil
|
||||
|
@@ -48,6 +48,8 @@ public class SimpleLinkScraper implements AutoCloseable {
|
||||
private final Duration readTimeout = Duration.ofSeconds(10);
|
||||
private final DomainLocks domainLocks = new DomainLocks();
|
||||
|
||||
private final static int MAX_SIZE = Integer.getInteger("crawler.maxFetchSize", 10 * 1024 * 1024);
|
||||
|
||||
public SimpleLinkScraper(LiveCrawlDataSet dataSet,
|
||||
DbDomainQueries domainQueries,
|
||||
DomainBlacklist domainBlacklist) {
|
||||
@@ -207,7 +209,7 @@ public class SimpleLinkScraper implements AutoCloseable {
|
||||
}
|
||||
|
||||
byte[] body = getResponseData(response);
|
||||
if (body.length > 1024 * 1024) {
|
||||
if (body.length > MAX_SIZE) {
|
||||
return new FetchResult.Error(parsedUrl);
|
||||
}
|
||||
|
||||
|
@@ -44,6 +44,7 @@ dependencies {
|
||||
implementation libs.bundles.jetty
|
||||
implementation libs.opencsv
|
||||
implementation libs.trove
|
||||
implementation libs.protobuf
|
||||
implementation libs.fastutil
|
||||
implementation libs.bundles.gson
|
||||
implementation libs.bundles.mariadb
|
||||
|
@@ -6,10 +6,10 @@ import nu.marginalia.api.model.ApiSearchResult;
|
||||
import nu.marginalia.api.model.ApiSearchResultQueryDetails;
|
||||
import nu.marginalia.api.model.ApiSearchResults;
|
||||
import nu.marginalia.api.searchquery.QueryClient;
|
||||
import nu.marginalia.api.searchquery.RpcQueryLimits;
|
||||
import nu.marginalia.api.searchquery.model.query.QueryParams;
|
||||
import nu.marginalia.api.searchquery.model.query.SearchSetIdentifier;
|
||||
import nu.marginalia.api.searchquery.model.results.DecoratedSearchResultItem;
|
||||
import nu.marginalia.index.query.limit.QueryLimits;
|
||||
import nu.marginalia.model.idx.WordFlags;
|
||||
|
||||
import java.util.ArrayList;
|
||||
@@ -47,11 +47,12 @@ public class ApiSearchOperator {
|
||||
|
||||
return new QueryParams(
|
||||
query,
|
||||
new QueryLimits(
|
||||
2,
|
||||
Math.min(100, count),
|
||||
150,
|
||||
8192),
|
||||
RpcQueryLimits.newBuilder()
|
||||
.setResultsByDomain(2)
|
||||
.setResultsTotal(Math.min(100, count))
|
||||
.setTimeoutMs(150)
|
||||
.setFetchSize(8192)
|
||||
.build(),
|
||||
searchSet.name());
|
||||
}
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user