mirror of
https://github.com/MarginaliaSearch/MarginaliaSearch.git
synced 2025-10-06 07:32:38 +02:00
Compare commits
392 Commits
deploy-001
...
deploy-018
Author | SHA1 | Date | |
---|---|---|---|
|
269ebd1654 | ||
|
39ce40bfeb | ||
|
c187b2e1c1 | ||
|
42eaa4588b | ||
|
4f40a5fbeb | ||
|
3f3d42bc01 | ||
|
61c8d53e1b | ||
|
a7a3d85be9 | ||
|
306232fb54 | ||
|
5aef844f0d | ||
|
d56b5c828a | ||
|
ab58a4636f | ||
|
00be269238 | ||
|
879e6a9424 | ||
|
fba3455732 | ||
|
14283da7f5 | ||
|
93df4d1fc0 | ||
|
b12a0b998c | ||
|
3b6f4e321b | ||
|
8428111771 | ||
|
e9fd4415ef | ||
|
4c95c3dcad | ||
|
c5281536fb | ||
|
4431dae7ac | ||
|
4df4d0a7a8 | ||
|
9f05083b94 | ||
|
fc92e9b9c0 | ||
|
328fb5d927 | ||
|
36889950e8 | ||
|
c96a94878b | ||
|
1c57d7d73a | ||
|
a443d22356 | ||
|
aa59d4afa4 | ||
|
df0f18d0e7 | ||
|
0819d46f97 | ||
|
5e2b63473e | ||
|
f9590703f1 | ||
|
f12fc11337 | ||
|
c309030184 | ||
|
fd5af01629 | ||
|
d4c43c7a79 | ||
|
18700e1919 | ||
|
120b431998 | ||
|
71dad99326 | ||
|
c1e8afdf86 | ||
|
fa32dddc24 | ||
|
a266fcbf30 | ||
|
6e47e58e0e | ||
|
9dc43d8b4a | ||
|
83967e3305 | ||
|
4db980a291 | ||
|
089b177868 | ||
|
9c8e9a68d5 | ||
|
413d5cc788 | ||
|
58539b92ac | ||
|
fe72f16df1 | ||
|
b49a244a2e | ||
|
3f0b4c010f | ||
|
c6e0cd93f7 | ||
|
80a7ccb080 | ||
|
54dec347c4 | ||
|
d6ee3f0785 | ||
|
8be88afcf3 | ||
|
0e3c00d3e1 | ||
|
4279a7f1aa | ||
|
251006d4f9 | ||
|
c3e99dc12a | ||
|
aaaa2de022 | ||
|
fc1388422a | ||
|
b07080db16 | ||
|
e9d86dca4a | ||
|
1d693f0efa | ||
|
5874a163dc | ||
|
5ec7a1deab | ||
|
7fea2808ed | ||
|
8da74484f0 | ||
|
923d5a7234 | ||
|
58f88749b8 | ||
|
77f727a5ba | ||
|
667cfb53dc | ||
|
fe36d4ed20 | ||
|
acf4bef98d | ||
|
2a737c34bb | ||
|
90a577af82 | ||
|
f0c9b935d8 | ||
|
7b5493dd51 | ||
|
c246a59158 | ||
|
0b99781d24 | ||
|
39db9620c1 | ||
|
1781599363 | ||
|
6b2d18fb9b | ||
|
59b1d200ab | ||
|
897010a2cf | ||
|
602af7a77e | ||
|
a7d91c8527 | ||
|
7151602124 | ||
|
884e33bd4a | ||
|
e84d5c497a | ||
|
2d2d3e2466 | ||
|
647dd9b12f | ||
|
de4e2849ce | ||
|
3c43f1954e | ||
|
fa2462ec39 | ||
|
f4ad7145db | ||
|
068b450180 | ||
|
05b909a21f | ||
|
3d179cddce | ||
|
1a2aae496a | ||
|
353cdffb3f | ||
|
2e3f1313c7 | ||
|
58e6f141ce | ||
|
500f63e921 | ||
|
6dfbedda1e | ||
|
9715ddb105 | ||
|
1fc6313a77 | ||
|
b1249d5b8a | ||
|
ef95d59b07 | ||
|
acdd8664f5 | ||
|
6b12eac58a | ||
|
bb3f1f395a | ||
|
b661beef41 | ||
|
9888c47f19 | ||
|
dcef7e955b | ||
|
b3973a1dd7 | ||
|
8bd05d6d90 | ||
|
59df8e356e | ||
|
7161162a35 | ||
|
d7c4c5141f | ||
|
88e9b8fb05 | ||
|
b6265cee11 | ||
|
c91af247e9 | ||
|
7a31227de1 | ||
|
4f477604c5 | ||
|
2970f4395b | ||
|
d1ec909b36 | ||
|
c67c5bbf42 | ||
|
ecb0e57a1a | ||
|
8c61f61b46 | ||
|
662a18c933 | ||
|
1c2426a052 | ||
|
34df7441ac | ||
|
5387e2bd80 | ||
|
0f3b24d0f8 | ||
|
a732095d2a | ||
|
6607f0112f | ||
|
4913730de9 | ||
|
1db64f9d56 | ||
|
4dcff14498 | ||
|
426658f64e | ||
|
2181b22f05 | ||
|
42bd79a609 | ||
|
b91c1e528a | ||
|
b1130d7a04 | ||
|
8364bcdc97 | ||
|
626cab5fab | ||
|
cfd4712191 | ||
|
9f18ced73d | ||
|
18e91269ab | ||
|
e315ca5758 | ||
|
3ceea17c1d | ||
|
b34527c1a3 | ||
|
185bf28fca | ||
|
78cc25584a | ||
|
62ba30bacf | ||
|
3bb84eb206 | ||
|
be7d13ccce | ||
|
8c088a7c0b | ||
|
ea9a642b9b | ||
|
27f528af6a | ||
|
20ca41ec95 | ||
|
7671f0d9e4 | ||
|
44d6bc71b7 | ||
|
9d302e2973 | ||
|
f553701224 | ||
|
f076d05595 | ||
|
b513809710 | ||
|
7519b28e21 | ||
|
3eac4dd57f | ||
|
4c2810720a | ||
|
8480ba8daa | ||
|
fbba392491 | ||
|
530eb35949 | ||
|
c2dd2175a2 | ||
|
b8581b0f56 | ||
|
2ea34767d8 | ||
|
e9af838231 | ||
|
ae0cad47c4 | ||
|
5fbc8ef998 | ||
|
32c6dd9e6a | ||
|
6ece6a6cfb | ||
|
39cd1c18f8 | ||
|
eb65daaa88 | ||
|
0bebdb6e33 | ||
|
1e50e392c6 | ||
|
fb673de370 | ||
|
eee73ab16c | ||
|
5354e034bf | ||
|
72384ad6ca | ||
|
a2b076f9be | ||
|
c8b0a32c0f | ||
|
f0d74aa3bb | ||
|
74a1f100f4 | ||
|
eb049658e4 | ||
|
db138b2a6f | ||
|
1673fc284c | ||
|
503ea57d5b | ||
|
18ca926c7f | ||
|
db99242db2 | ||
|
2b9d2985ba | ||
|
eeb6ecd711 | ||
|
1f58aeadbf | ||
|
3d68be64da | ||
|
668f3b16ef | ||
|
98a340a0d1 | ||
|
8862100f7e | ||
|
274941f6de | ||
|
abec83582d | ||
|
569520c9b6 | ||
|
088310e998 | ||
|
270cab874b | ||
|
4c74e280d3 | ||
|
5b347e17ac | ||
|
55d6ab933f | ||
|
43b74e9706 | ||
|
579a115243 | ||
|
2c67f50a43 | ||
|
78a958e2b0 | ||
|
4e939389b2 | ||
|
e67a9bdb91 | ||
|
567e4e1237 | ||
|
4342e42722 | ||
|
bc818056e6 | ||
|
de2feac238 | ||
|
1e770205a5 | ||
|
e44ecd6d69 | ||
|
5b93a0e633 | ||
|
08fb0e5efe | ||
|
bcf67782ea | ||
|
ef3f175ede | ||
|
bbe4b5d9fd | ||
|
c67a635103 | ||
|
20b24133fb | ||
|
f2567677e8 | ||
|
bc2c2061f2 | ||
|
1c7f5a31a5 | ||
|
59a8ea60f7 | ||
|
aa9b1244ea | ||
|
2d17233366 | ||
|
b245cc9f38 | ||
|
6614d05bdf | ||
|
55aeb03c4a | ||
|
faa589962f | ||
|
c7edd6b39f | ||
|
79da622e3b | ||
|
3da8337ba6 | ||
|
a32d230f0a | ||
|
3772bfd387 | ||
|
02a7900d1a | ||
|
a1fb92468f | ||
|
b7f0a2a98e | ||
|
5fb76b2e79 | ||
|
ad8c97f342 | ||
|
dc1b6373eb | ||
|
983d6d067c | ||
|
a84a06975c | ||
|
d2864c13ec | ||
|
03ba53ce51 | ||
|
d4a6684931 | ||
|
6f0485287a | ||
|
59e2dd4c26 | ||
|
ca1807caae | ||
|
26c20e18ac | ||
|
7c90b6b414 | ||
|
b63c54c4ce | ||
|
fecd2f4ec3 | ||
|
39e420de88 | ||
|
dc83619861 | ||
|
87d1c89701 | ||
|
a42a7769e2 | ||
|
202bda884f | ||
|
2315fdc731 | ||
|
b5469bd8a1 | ||
|
6a6318d04c | ||
|
55933f8d40 | ||
|
be6382e0d0 | ||
|
45e771f96b | ||
|
8dde502cc9 | ||
|
3e66767af3 | ||
|
9ec9d1b338 | ||
|
dcad0d7863 | ||
|
94e1aa0baf | ||
|
b62f043910 | ||
|
6ea22d0d21 | ||
|
8c69dc31b8 | ||
|
00734ea87f | ||
|
3009713db4 | ||
|
9b2ceaf37c | ||
|
8019c2ce18 | ||
|
a9e312b8b1 | ||
|
4da3563d8a | ||
|
48d0a3089a | ||
|
594df64b20 | ||
|
06efb5abfc | ||
|
78eb1417a7 | ||
|
8c8f2ad5ee | ||
|
f71e79d10f | ||
|
1b27c5cf06 | ||
|
67edc8f90d | ||
|
5f576b7d0c | ||
|
8b05c788fd | ||
|
236f033bc9 | ||
|
510fc75121 | ||
|
0376f2e6e3 | ||
|
0b65164f60 | ||
|
9be477de33 | ||
|
84f55b84ff | ||
|
ab5c30ad51 | ||
|
0c839453c5 | ||
|
5e4c5d03ae | ||
|
710af4999a | ||
|
a5b0a1ae62 | ||
|
e9f71ee39b | ||
|
baeb4a46cd | ||
|
5e2a8e9f27 | ||
|
cc1a5bdf90 | ||
|
7f7b1ffaba | ||
|
0ea8092350 | ||
|
483d29497e | ||
|
bae44497fe | ||
|
0d59202aca | ||
|
0ca43f0c9c | ||
|
3bc99639a0 | ||
|
927bc0b63c | ||
|
d968801dc1 | ||
|
89db69d360 | ||
|
81cdd6385d | ||
|
e76c42329f | ||
|
e6ef4734ea | ||
|
df4bc1d7e9 | ||
|
2b222efa75 | ||
|
6d18e6d840 | ||
|
2a3c63f209 | ||
|
9f70cecaef | ||
|
47e58a21c6 | ||
|
3714104976 | ||
|
f6f036b9b1 | ||
|
b510b7feb8 | ||
|
c08203e2ed | ||
|
86497fd32f | ||
|
3b998573fd | ||
|
e161882ec7 | ||
|
357f349e30 | ||
|
e4769f541d | ||
|
2a173e2861 | ||
|
a6a900266c | ||
|
bdba53f055 | ||
|
bbdde789e7 | ||
|
eab61cd48a | ||
|
0ce2ba9ad9 | ||
|
3ddcebaa36 | ||
|
b91463383e | ||
|
7444a2f36c | ||
|
fdee07048d | ||
|
2fbf201761 | ||
|
4018e4c434 | ||
|
f3382b5bd8 | ||
|
9287ee0141 | ||
|
2769c8f869 | ||
|
ddb66f33ba | ||
|
79500b8fbc | ||
|
187eea43a4 | ||
|
a89ed6fa9f | ||
|
8d168be138 | ||
|
6e1aa7b391 | ||
|
deab9b9516 | ||
|
39d99a906a | ||
|
6f72e6e0d3 | ||
|
d786d79483 | ||
|
01510f6c2e | ||
|
7ba43e9e3f | ||
|
97bfcd1353 | ||
|
aa3c85c196 | ||
|
fb75a3827d | ||
|
7d546d0e2a | ||
|
8fcb6ffd7a | ||
|
f97de0c15a | ||
|
be9e192b78 | ||
|
75ae1c9526 | ||
|
33761a0236 | ||
|
19b69b1764 | ||
|
8b804359a9 | ||
|
f050bf5c4c |
1
.github/FUNDING.yml
vendored
1
.github/FUNDING.yml
vendored
@@ -1,5 +1,6 @@
|
|||||||
# These are supported funding model platforms
|
# These are supported funding model platforms
|
||||||
|
|
||||||
|
polar: marginalia-search
|
||||||
github: MarginaliaSearch
|
github: MarginaliaSearch
|
||||||
patreon: marginalia_nu
|
patreon: marginalia_nu
|
||||||
open_collective: # Replace with a single Open Collective username
|
open_collective: # Replace with a single Open Collective username
|
||||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@@ -7,3 +7,4 @@ build/
|
|||||||
lombok.config
|
lombok.config
|
||||||
Dockerfile
|
Dockerfile
|
||||||
run
|
run
|
||||||
|
jte-classes
|
75
ROADMAP.md
75
ROADMAP.md
@@ -1,4 +1,4 @@
|
|||||||
# Roadmap 2024-2025
|
# Roadmap 2025
|
||||||
|
|
||||||
This is a roadmap with major features planned for Marginalia Search.
|
This is a roadmap with major features planned for Marginalia Search.
|
||||||
|
|
||||||
@@ -8,20 +8,10 @@ be implemented as well.
|
|||||||
Major goals:
|
Major goals:
|
||||||
|
|
||||||
* Reach 1 billion pages indexed
|
* Reach 1 billion pages indexed
|
||||||
* Improve technical ability of indexing and search. Although this area has improved a bit, the
|
|
||||||
search engine is still not very good at dealing with longer queries.
|
|
||||||
|
|
||||||
## Proper Position Index (COMPLETED 2024-09)
|
|
||||||
|
|
||||||
The search engine uses a fixed width bit mask to indicate word positions. It has the benefit
|
* Improve technical ability of indexing and search. ~~Although this area has improved a bit, the
|
||||||
of being very fast to evaluate and works well for what it is, but is inaccurate and has the
|
search engine is still not very good at dealing with longer queries.~~ (As of PR [#129](https://github.com/MarginaliaSearch/MarginaliaSearch/pull/129), this has improved significantly. There is still more work to be done )
|
||||||
drawback of making support for quoted search terms inaccurate and largely reliant on indexing
|
|
||||||
word n-grams known beforehand. This limits the ability to interpret longer queries.
|
|
||||||
|
|
||||||
The positions mask should be supplemented or replaced with a more accurate (e.g.) gamma coded positions
|
|
||||||
list, as is the civilized way of doing this.
|
|
||||||
|
|
||||||
Completed with PR [#99](https://github.com/MarginaliaSearch/MarginaliaSearch/pull/99)
|
|
||||||
|
|
||||||
## Hybridize crawler w/ Common Crawl data
|
## Hybridize crawler w/ Common Crawl data
|
||||||
|
|
||||||
@@ -37,16 +27,9 @@ Retaining the ability to independently crawl the web is still strongly desirable
|
|||||||
|
|
||||||
## Safe Search
|
## Safe Search
|
||||||
|
|
||||||
The search engine has a bit of a problem showing spicy content mixed in with the results. It would be desirable
|
The search engine has a bit of a problem showing spicy content mixed in with the results. It would be desirable to have a way to filter this out. It's likely something like a URL blacklist (e.g. [UT1](https://dsi.ut-capitole.fr/blacklists/index_en.php) )
|
||||||
to have a way to filter this out. It's likely something like a URL blacklist (e.g. [UT1](https://dsi.ut-capitole.fr/blacklists/index_en.php) )
|
|
||||||
combined with naive bayesian filter would go a long way, or something more sophisticated...?
|
combined with naive bayesian filter would go a long way, or something more sophisticated...?
|
||||||
|
|
||||||
## Web Design Overhaul
|
|
||||||
|
|
||||||
The design is kinda clunky and hard to maintain, and needlessly outdated-looking.
|
|
||||||
|
|
||||||
In progress: PR [#127](https://github.com/MarginaliaSearch/MarginaliaSearch/pull/127) -- demo available at https://test.marginalia.nu/
|
|
||||||
|
|
||||||
## Additional Language Support
|
## Additional Language Support
|
||||||
|
|
||||||
It would be desirable if the search engine supported more languages than English. This is partially about
|
It would be desirable if the search engine supported more languages than English. This is partially about
|
||||||
@@ -55,15 +38,6 @@ associated with each language added, at least a models file or two, as well as s
|
|||||||
|
|
||||||
It would be very helpful to find a speaker of a large language other than English to help in the fine tuning.
|
It would be very helpful to find a speaker of a large language other than English to help in the fine tuning.
|
||||||
|
|
||||||
## Finalize RSS support (COMPLETED 2024-11)
|
|
||||||
|
|
||||||
Marginalia has experimental RSS preview support for a few domains. This works well and
|
|
||||||
it should be extended to all domains. It would also be interesting to offer search of the
|
|
||||||
RSS data itself, or use the RSS set to feed a special live index that updates faster than the
|
|
||||||
main dataset.
|
|
||||||
|
|
||||||
Completed with PR [#122](https://github.com/MarginaliaSearch/MarginaliaSearch/pull/122) and PR [#125](https://github.com/MarginaliaSearch/MarginaliaSearch/pull/125)
|
|
||||||
|
|
||||||
## Support for binary formats like PDF
|
## Support for binary formats like PDF
|
||||||
|
|
||||||
The crawler needs to be modified to retain them, and the conversion logic needs to parse them.
|
The crawler needs to be modified to retain them, and the conversion logic needs to parse them.
|
||||||
@@ -80,5 +54,42 @@ This looks like a good idea that wouldn't just help clean up the search filters
|
|||||||
website, but might be cheap enough we might go as far as to offer a number of ad-hoc custom search
|
website, but might be cheap enough we might go as far as to offer a number of ad-hoc custom search
|
||||||
filter for any API consumer.
|
filter for any API consumer.
|
||||||
|
|
||||||
I've talked to the stract dev and he does not think it's a good idea to mimic their optics language,
|
I've talked to the stract dev and he does not think it's a good idea to mimic their optics language, which is quite ad-hoc, but instead to work together to find some new common description language for this.
|
||||||
which is quite ad-hoc, but instead to work together to find some new common description language for this.
|
|
||||||
|
## Show favicons next to search results
|
||||||
|
|
||||||
|
This is expected from search engines. Basic proof of concept sketch of fetching this data has been done, but the feature is some way from being reality.
|
||||||
|
|
||||||
|
## Specialized crawler for github
|
||||||
|
|
||||||
|
One of the search engine's biggest limitations right now is that it does not index github at all. A specialized crawler that fetches at least the readme.md would go a long way toward providing search capabilities in this domain.
|
||||||
|
|
||||||
|
# Completed
|
||||||
|
|
||||||
|
## Web Design Overhaul (COMPLETED 2025-01)
|
||||||
|
|
||||||
|
The design is kinda clunky and hard to maintain, and needlessly outdated-looking.
|
||||||
|
|
||||||
|
PR [#127](https://github.com/MarginaliaSearch/MarginaliaSearch/pull/127)
|
||||||
|
|
||||||
|
## Finalize RSS support (COMPLETED 2024-11)
|
||||||
|
|
||||||
|
Marginalia has experimental RSS preview support for a few domains. This works well and
|
||||||
|
it should be extended to all domains. It would also be interesting to offer search of the
|
||||||
|
RSS data itself, or use the RSS set to feed a special live index that updates faster than the
|
||||||
|
main dataset.
|
||||||
|
|
||||||
|
Completed with PR [#122](https://github.com/MarginaliaSearch/MarginaliaSearch/pull/122) and PR [#125](https://github.com/MarginaliaSearch/MarginaliaSearch/pull/125)
|
||||||
|
|
||||||
|
## Proper Position Index (COMPLETED 2024-09)
|
||||||
|
|
||||||
|
The search engine uses a fixed width bit mask to indicate word positions. It has the benefit
|
||||||
|
of being very fast to evaluate and works well for what it is, but is inaccurate and has the
|
||||||
|
drawback of making support for quoted search terms inaccurate and largely reliant on indexing
|
||||||
|
word n-grams known beforehand. This limits the ability to interpret longer queries.
|
||||||
|
|
||||||
|
The positions mask should be supplemented or replaced with a more accurate (e.g.) gamma coded positions
|
||||||
|
list, as is the civilized way of doing this.
|
||||||
|
|
||||||
|
Completed with PR [#99](https://github.com/MarginaliaSearch/MarginaliaSearch/pull/99)
|
||||||
|
|
||||||
|
@@ -5,7 +5,7 @@ plugins {
|
|||||||
|
|
||||||
// This is a workaround for a bug in the Jib plugin that causes it to stall randomly
|
// This is a workaround for a bug in the Jib plugin that causes it to stall randomly
|
||||||
// https://github.com/GoogleContainerTools/jib/issues/3347
|
// https://github.com/GoogleContainerTools/jib/issues/3347
|
||||||
id 'com.google.cloud.tools.jib' version '3.4.3' apply(false)
|
id 'com.google.cloud.tools.jib' version '3.4.5' apply(false)
|
||||||
}
|
}
|
||||||
|
|
||||||
group 'marginalia'
|
group 'marginalia'
|
||||||
@@ -43,11 +43,11 @@ subprojects.forEach {it ->
|
|||||||
}
|
}
|
||||||
|
|
||||||
ext {
|
ext {
|
||||||
jvmVersion=23
|
jvmVersion = 24
|
||||||
dockerImageBase='container-registry.oracle.com/graalvm/jdk:23'
|
dockerImageBase='container-registry.oracle.com/graalvm/jdk:24'
|
||||||
dockerImageTag='latest'
|
dockerImageTag='latest'
|
||||||
dockerImageRegistry='marginalia'
|
dockerImageRegistry='marginalia'
|
||||||
jibVersion = '3.4.3'
|
jibVersion = '3.4.5'
|
||||||
}
|
}
|
||||||
|
|
||||||
idea {
|
idea {
|
||||||
|
@@ -24,58 +24,4 @@ public class LanguageModels {
|
|||||||
this.fasttextLanguageModel = fasttextLanguageModel;
|
this.fasttextLanguageModel = fasttextLanguageModel;
|
||||||
this.segments = segments;
|
this.segments = segments;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static LanguageModelsBuilder builder() {
|
|
||||||
return new LanguageModelsBuilder();
|
|
||||||
}
|
|
||||||
|
|
||||||
public static class LanguageModelsBuilder {
|
|
||||||
private Path termFrequencies;
|
|
||||||
private Path openNLPSentenceDetectionData;
|
|
||||||
private Path posRules;
|
|
||||||
private Path posDict;
|
|
||||||
private Path fasttextLanguageModel;
|
|
||||||
private Path segments;
|
|
||||||
|
|
||||||
LanguageModelsBuilder() {
|
|
||||||
}
|
|
||||||
|
|
||||||
public LanguageModelsBuilder termFrequencies(Path termFrequencies) {
|
|
||||||
this.termFrequencies = termFrequencies;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public LanguageModelsBuilder openNLPSentenceDetectionData(Path openNLPSentenceDetectionData) {
|
|
||||||
this.openNLPSentenceDetectionData = openNLPSentenceDetectionData;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public LanguageModelsBuilder posRules(Path posRules) {
|
|
||||||
this.posRules = posRules;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public LanguageModelsBuilder posDict(Path posDict) {
|
|
||||||
this.posDict = posDict;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public LanguageModelsBuilder fasttextLanguageModel(Path fasttextLanguageModel) {
|
|
||||||
this.fasttextLanguageModel = fasttextLanguageModel;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public LanguageModelsBuilder segments(Path segments) {
|
|
||||||
this.segments = segments;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public LanguageModels build() {
|
|
||||||
return new LanguageModels(this.termFrequencies, this.openNLPSentenceDetectionData, this.posRules, this.posDict, this.fasttextLanguageModel, this.segments);
|
|
||||||
}
|
|
||||||
|
|
||||||
public String toString() {
|
|
||||||
return "LanguageModels.LanguageModelsBuilder(termFrequencies=" + this.termFrequencies + ", openNLPSentenceDetectionData=" + this.openNLPSentenceDetectionData + ", posRules=" + this.posRules + ", posDict=" + this.posDict + ", fasttextLanguageModel=" + this.fasttextLanguageModel + ", segments=" + this.segments + ")";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
@@ -8,18 +8,23 @@ import com.google.inject.Inject;
|
|||||||
import com.google.inject.Singleton;
|
import com.google.inject.Singleton;
|
||||||
import com.zaxxer.hikari.HikariDataSource;
|
import com.zaxxer.hikari.HikariDataSource;
|
||||||
import nu.marginalia.model.EdgeDomain;
|
import nu.marginalia.model.EdgeDomain;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import java.sql.SQLException;
|
import java.sql.SQLException;
|
||||||
import java.util.NoSuchElementException;
|
import java.util.*;
|
||||||
import java.util.Optional;
|
|
||||||
import java.util.OptionalInt;
|
|
||||||
import java.util.concurrent.ExecutionException;
|
import java.util.concurrent.ExecutionException;
|
||||||
|
|
||||||
@Singleton
|
@Singleton
|
||||||
public class DbDomainQueries {
|
public class DbDomainQueries {
|
||||||
private final HikariDataSource dataSource;
|
private final HikariDataSource dataSource;
|
||||||
|
|
||||||
|
private static final Logger logger = LoggerFactory.getLogger(DbDomainQueries.class);
|
||||||
|
|
||||||
private final Cache<EdgeDomain, Integer> domainIdCache = CacheBuilder.newBuilder().maximumSize(10_000).build();
|
private final Cache<EdgeDomain, Integer> domainIdCache = CacheBuilder.newBuilder().maximumSize(10_000).build();
|
||||||
|
private final Cache<EdgeDomain, DomainIdWithNode> domainWithNodeCache = CacheBuilder.newBuilder().maximumSize(10_000).build();
|
||||||
|
private final Cache<Integer, EdgeDomain> domainNameCache = CacheBuilder.newBuilder().maximumSize(10_000).build();
|
||||||
|
private final Cache<String, List<DomainWithNode>> siblingsCache = CacheBuilder.newBuilder().maximumSize(10_000).build();
|
||||||
|
|
||||||
@Inject
|
@Inject
|
||||||
public DbDomainQueries(HikariDataSource dataSource)
|
public DbDomainQueries(HikariDataSource dataSource)
|
||||||
@@ -28,25 +33,58 @@ public class DbDomainQueries {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
public Integer getDomainId(EdgeDomain domain) {
|
public Integer getDomainId(EdgeDomain domain) throws NoSuchElementException {
|
||||||
try (var connection = dataSource.getConnection()) {
|
try {
|
||||||
|
|
||||||
return domainIdCache.get(domain, () -> {
|
return domainIdCache.get(domain, () -> {
|
||||||
try (var stmt = connection.prepareStatement("SELECT ID FROM EC_DOMAIN WHERE DOMAIN_NAME=?")) {
|
try (var connection = dataSource.getConnection();
|
||||||
|
var stmt = connection.prepareStatement("SELECT ID FROM EC_DOMAIN WHERE DOMAIN_NAME=?")) {
|
||||||
|
|
||||||
stmt.setString(1, domain.toString());
|
stmt.setString(1, domain.toString());
|
||||||
var rsp = stmt.executeQuery();
|
var rsp = stmt.executeQuery();
|
||||||
if (rsp.next()) {
|
if (rsp.next()) {
|
||||||
return rsp.getInt(1);
|
return rsp.getInt(1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
catch (SQLException ex) {
|
||||||
|
throw new RuntimeException(ex);
|
||||||
|
}
|
||||||
|
|
||||||
throw new NoSuchElementException();
|
throw new NoSuchElementException();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
catch (UncheckedExecutionException ex) {
|
||||||
|
throw new NoSuchElementException();
|
||||||
|
}
|
||||||
catch (ExecutionException ex) {
|
catch (ExecutionException ex) {
|
||||||
throw new RuntimeException(ex.getCause());
|
throw new RuntimeException(ex.getCause());
|
||||||
}
|
}
|
||||||
catch (SQLException ex) {
|
}
|
||||||
throw new RuntimeException(ex);
|
|
||||||
|
|
||||||
|
public DomainIdWithNode getDomainIdWithNode(EdgeDomain domain) throws NoSuchElementException {
|
||||||
|
try {
|
||||||
|
return domainWithNodeCache.get(domain, () -> {
|
||||||
|
try (var connection = dataSource.getConnection();
|
||||||
|
var stmt = connection.prepareStatement("SELECT ID, NODE_AFFINITY FROM EC_DOMAIN WHERE DOMAIN_NAME=?")) {
|
||||||
|
|
||||||
|
stmt.setString(1, domain.toString());
|
||||||
|
var rsp = stmt.executeQuery();
|
||||||
|
if (rsp.next()) {
|
||||||
|
return new DomainIdWithNode(rsp.getInt(1), rsp.getInt(2));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (SQLException ex) {
|
||||||
|
throw new RuntimeException(ex);
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new NoSuchElementException();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
catch (UncheckedExecutionException ex) {
|
||||||
|
throw new NoSuchElementException();
|
||||||
|
}
|
||||||
|
catch (ExecutionException ex) {
|
||||||
|
throw new RuntimeException(ex.getCause());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -80,22 +118,62 @@ public class DbDomainQueries {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public Optional<EdgeDomain> getDomain(int id) {
|
public Optional<EdgeDomain> getDomain(int id) {
|
||||||
try (var connection = dataSource.getConnection()) {
|
|
||||||
|
|
||||||
|
EdgeDomain existing = domainNameCache.getIfPresent(id);
|
||||||
|
if (existing != null) {
|
||||||
|
return Optional.of(existing);
|
||||||
|
}
|
||||||
|
|
||||||
|
try (var connection = dataSource.getConnection()) {
|
||||||
try (var stmt = connection.prepareStatement("SELECT DOMAIN_NAME FROM EC_DOMAIN WHERE ID=?")) {
|
try (var stmt = connection.prepareStatement("SELECT DOMAIN_NAME FROM EC_DOMAIN WHERE ID=?")) {
|
||||||
stmt.setInt(1, id);
|
stmt.setInt(1, id);
|
||||||
var rsp = stmt.executeQuery();
|
var rsp = stmt.executeQuery();
|
||||||
if (rsp.next()) {
|
if (rsp.next()) {
|
||||||
return Optional.of(new EdgeDomain(rsp.getString(1)));
|
var val = new EdgeDomain(rsp.getString(1));
|
||||||
|
domainNameCache.put(id, val);
|
||||||
|
return Optional.of(val);
|
||||||
}
|
}
|
||||||
return Optional.empty();
|
return Optional.empty();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
catch (UncheckedExecutionException ex) {
|
|
||||||
throw new RuntimeException(ex.getCause());
|
|
||||||
}
|
|
||||||
catch (SQLException ex) {
|
catch (SQLException ex) {
|
||||||
throw new RuntimeException(ex);
|
throw new RuntimeException(ex);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public List<DomainWithNode> otherSubdomains(EdgeDomain domain, int cnt) throws ExecutionException {
|
||||||
|
String topDomain = domain.topDomain;
|
||||||
|
|
||||||
|
return siblingsCache.get(topDomain, () -> {
|
||||||
|
List<DomainWithNode> ret = new ArrayList<>();
|
||||||
|
|
||||||
|
try (var conn = dataSource.getConnection();
|
||||||
|
var stmt = conn.prepareStatement("SELECT DOMAIN_NAME, NODE_AFFINITY FROM EC_DOMAIN WHERE DOMAIN_TOP = ? LIMIT ?")) {
|
||||||
|
stmt.setString(1, topDomain);
|
||||||
|
stmt.setInt(2, cnt);
|
||||||
|
|
||||||
|
var rs = stmt.executeQuery();
|
||||||
|
while (rs.next()) {
|
||||||
|
var sibling = new EdgeDomain(rs.getString(1));
|
||||||
|
|
||||||
|
if (sibling.equals(domain))
|
||||||
|
continue;
|
||||||
|
|
||||||
|
ret.add(new DomainWithNode(sibling, rs.getInt(2)));
|
||||||
|
}
|
||||||
|
} catch (SQLException e) {
|
||||||
|
logger.error("Failed to get domain neighbors");
|
||||||
|
}
|
||||||
|
return ret;
|
||||||
|
});
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public record DomainWithNode (EdgeDomain domain, int nodeAffinity) {
|
||||||
|
public boolean isIndexed() {
|
||||||
|
return nodeAffinity > 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public record DomainIdWithNode (int domainId, int nodeAffinity) { }
|
||||||
}
|
}
|
||||||
|
@@ -1,118 +0,0 @@
|
|||||||
package nu.marginalia.db;
|
|
||||||
|
|
||||||
import com.zaxxer.hikari.HikariDataSource;
|
|
||||||
|
|
||||||
import java.sql.Connection;
|
|
||||||
import java.sql.PreparedStatement;
|
|
||||||
import java.sql.SQLException;
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.OptionalInt;
|
|
||||||
|
|
||||||
/** Class used in exporting data. This is intended to be used for a brief time
|
|
||||||
* and then discarded, not kept around as a service.
|
|
||||||
*/
|
|
||||||
public class DbDomainStatsExportMultitool implements AutoCloseable {
|
|
||||||
private final Connection connection;
|
|
||||||
private final int nodeId;
|
|
||||||
private final PreparedStatement knownUrlsQuery;
|
|
||||||
private final PreparedStatement visitedUrlsQuery;
|
|
||||||
private final PreparedStatement goodUrlsQuery;
|
|
||||||
private final PreparedStatement domainNameToId;
|
|
||||||
|
|
||||||
private final PreparedStatement allDomainsQuery;
|
|
||||||
private final PreparedStatement crawlQueueDomains;
|
|
||||||
private final PreparedStatement indexedDomainsQuery;
|
|
||||||
|
|
||||||
public DbDomainStatsExportMultitool(HikariDataSource dataSource, int nodeId) throws SQLException {
|
|
||||||
this.connection = dataSource.getConnection();
|
|
||||||
this.nodeId = nodeId;
|
|
||||||
|
|
||||||
knownUrlsQuery = connection.prepareStatement("""
|
|
||||||
SELECT KNOWN_URLS
|
|
||||||
FROM EC_DOMAIN INNER JOIN DOMAIN_METADATA
|
|
||||||
ON EC_DOMAIN.ID=DOMAIN_METADATA.ID
|
|
||||||
WHERE DOMAIN_NAME=?
|
|
||||||
""");
|
|
||||||
visitedUrlsQuery = connection.prepareStatement("""
|
|
||||||
SELECT VISITED_URLS
|
|
||||||
FROM EC_DOMAIN INNER JOIN DOMAIN_METADATA
|
|
||||||
ON EC_DOMAIN.ID=DOMAIN_METADATA.ID
|
|
||||||
WHERE DOMAIN_NAME=?
|
|
||||||
""");
|
|
||||||
goodUrlsQuery = connection.prepareStatement("""
|
|
||||||
SELECT GOOD_URLS
|
|
||||||
FROM EC_DOMAIN INNER JOIN DOMAIN_METADATA
|
|
||||||
ON EC_DOMAIN.ID=DOMAIN_METADATA.ID
|
|
||||||
WHERE DOMAIN_NAME=?
|
|
||||||
""");
|
|
||||||
domainNameToId = connection.prepareStatement("""
|
|
||||||
SELECT ID
|
|
||||||
FROM EC_DOMAIN
|
|
||||||
WHERE DOMAIN_NAME=?
|
|
||||||
""");
|
|
||||||
allDomainsQuery = connection.prepareStatement("""
|
|
||||||
SELECT DOMAIN_NAME
|
|
||||||
FROM EC_DOMAIN
|
|
||||||
""");
|
|
||||||
crawlQueueDomains = connection.prepareStatement("""
|
|
||||||
SELECT DOMAIN_NAME
|
|
||||||
FROM CRAWL_QUEUE
|
|
||||||
""");
|
|
||||||
indexedDomainsQuery = connection.prepareStatement("""
|
|
||||||
SELECT DOMAIN_NAME
|
|
||||||
FROM EC_DOMAIN
|
|
||||||
WHERE INDEXED > 0
|
|
||||||
""");
|
|
||||||
}
|
|
||||||
|
|
||||||
public OptionalInt getVisitedUrls(String domainName) throws SQLException {
|
|
||||||
return executeNameToIntQuery(domainName, visitedUrlsQuery);
|
|
||||||
}
|
|
||||||
|
|
||||||
public OptionalInt getDomainId(String domainName) throws SQLException {
|
|
||||||
return executeNameToIntQuery(domainName, domainNameToId);
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<String> getCrawlQueueDomains() throws SQLException {
|
|
||||||
return executeListQuery(crawlQueueDomains, 100);
|
|
||||||
}
|
|
||||||
public List<String> getAllIndexedDomains() throws SQLException {
|
|
||||||
return executeListQuery(indexedDomainsQuery, 100_000);
|
|
||||||
}
|
|
||||||
|
|
||||||
private OptionalInt executeNameToIntQuery(String domainName, PreparedStatement statement)
|
|
||||||
throws SQLException {
|
|
||||||
statement.setString(1, domainName);
|
|
||||||
var rs = statement.executeQuery();
|
|
||||||
|
|
||||||
if (rs.next()) {
|
|
||||||
return OptionalInt.of(rs.getInt(1));
|
|
||||||
}
|
|
||||||
|
|
||||||
return OptionalInt.empty();
|
|
||||||
}
|
|
||||||
|
|
||||||
private List<String> executeListQuery(PreparedStatement statement, int sizeHint) throws SQLException {
|
|
||||||
List<String> ret = new ArrayList<>(sizeHint);
|
|
||||||
|
|
||||||
var rs = statement.executeQuery();
|
|
||||||
|
|
||||||
while (rs.next()) {
|
|
||||||
ret.add(rs.getString(1));
|
|
||||||
}
|
|
||||||
|
|
||||||
return ret;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void close() throws SQLException {
|
|
||||||
knownUrlsQuery.close();
|
|
||||||
goodUrlsQuery.close();
|
|
||||||
visitedUrlsQuery.close();
|
|
||||||
allDomainsQuery.close();
|
|
||||||
crawlQueueDomains.close();
|
|
||||||
domainNameToId.close();
|
|
||||||
connection.close();
|
|
||||||
}
|
|
||||||
}
|
|
@@ -0,0 +1,24 @@
|
|||||||
|
package nu.marginalia.model;
|
||||||
|
|
||||||
|
public enum DocumentFormat {
|
||||||
|
PLAIN(0, 1, "text"),
|
||||||
|
PDF(0, 1, "pdf"),
|
||||||
|
UNKNOWN(0, 1, "???"),
|
||||||
|
HTML123(0, 1, "html"),
|
||||||
|
HTML4(-0.1, 1.05, "html"),
|
||||||
|
XHTML(-0.1, 1.05, "html"),
|
||||||
|
HTML5(0.5, 1.1, "html");
|
||||||
|
|
||||||
|
/** Used to tune quality score */
|
||||||
|
public final double offset;
|
||||||
|
/** Used to tune quality score */
|
||||||
|
public final double scale;
|
||||||
|
public final String shortFormat;
|
||||||
|
|
||||||
|
DocumentFormat(double offset, double scale, String shortFormat) {
|
||||||
|
this.offset = offset;
|
||||||
|
this.scale = scale;
|
||||||
|
this.shortFormat = shortFormat;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@@ -14,7 +14,7 @@ public class EdgeDomain implements Serializable {
|
|||||||
@Nonnull
|
@Nonnull
|
||||||
public final String topDomain;
|
public final String topDomain;
|
||||||
|
|
||||||
public EdgeDomain(String host) {
|
public EdgeDomain(@Nonnull String host) {
|
||||||
Objects.requireNonNull(host, "domain name must not be null");
|
Objects.requireNonNull(host, "domain name must not be null");
|
||||||
|
|
||||||
host = host.toLowerCase();
|
host = host.toLowerCase();
|
||||||
@@ -61,6 +61,10 @@ public class EdgeDomain implements Serializable {
|
|||||||
this.topDomain = topDomain;
|
this.topDomain = topDomain;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static String getTopDomain(String host) {
|
||||||
|
return new EdgeDomain(host).topDomain;
|
||||||
|
}
|
||||||
|
|
||||||
private boolean looksLikeGovTld(String host) {
|
private boolean looksLikeGovTld(String host) {
|
||||||
if (host.length() < 8)
|
if (host.length() < 8)
|
||||||
return false;
|
return false;
|
||||||
@@ -116,24 +120,6 @@ public class EdgeDomain implements Serializable {
|
|||||||
return topDomain.substring(0, cutPoint).toLowerCase();
|
return topDomain.substring(0, cutPoint).toLowerCase();
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getLongDomainKey() {
|
|
||||||
StringBuilder ret = new StringBuilder();
|
|
||||||
|
|
||||||
int cutPoint = topDomain.indexOf('.');
|
|
||||||
if (cutPoint < 0) {
|
|
||||||
ret.append(topDomain);
|
|
||||||
} else {
|
|
||||||
ret.append(topDomain, 0, cutPoint);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!subDomain.isEmpty() && !"www".equals(subDomain)) {
|
|
||||||
ret.append(":");
|
|
||||||
ret.append(subDomain);
|
|
||||||
}
|
|
||||||
|
|
||||||
return ret.toString().toLowerCase();
|
|
||||||
}
|
|
||||||
|
|
||||||
/** If possible, try to provide an alias domain,
|
/** If possible, try to provide an alias domain,
|
||||||
* i.e. a domain name that is very likely to link to this one
|
* i.e. a domain name that is very likely to link to this one
|
||||||
* */
|
* */
|
||||||
|
@@ -1,16 +1,14 @@
|
|||||||
package nu.marginalia.model;
|
package nu.marginalia.model;
|
||||||
|
|
||||||
import nu.marginalia.util.QueryParams;
|
import nu.marginalia.util.QueryParams;
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
|
||||||
import javax.annotation.Nullable;
|
import javax.annotation.Nullable;
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
import java.net.MalformedURLException;
|
import java.net.*;
|
||||||
import java.net.URI;
|
import java.nio.charset.StandardCharsets;
|
||||||
import java.net.URISyntaxException;
|
|
||||||
import java.net.URL;
|
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
import java.util.regex.Pattern;
|
|
||||||
|
|
||||||
public class EdgeUrl implements Serializable {
|
public class EdgeUrl implements Serializable {
|
||||||
public final String proto;
|
public final String proto;
|
||||||
@@ -33,7 +31,7 @@ public class EdgeUrl implements Serializable {
|
|||||||
|
|
||||||
private static URI parseURI(String url) throws URISyntaxException {
|
private static URI parseURI(String url) throws URISyntaxException {
|
||||||
try {
|
try {
|
||||||
return new URI(urlencodeFixer(url));
|
return EdgeUriFactory.parseURILenient(url);
|
||||||
} catch (URISyntaxException ex) {
|
} catch (URISyntaxException ex) {
|
||||||
throw new URISyntaxException("Failed to parse URI '" + url + "'", ex.getMessage());
|
throw new URISyntaxException("Failed to parse URI '" + url + "'", ex.getMessage());
|
||||||
}
|
}
|
||||||
@@ -51,58 +49,6 @@ public class EdgeUrl implements Serializable {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Pattern badCharPattern = Pattern.compile("[ \t\n\"<>\\[\\]()',|]");
|
|
||||||
|
|
||||||
/* Java's URI parser is a bit too strict in throwing exceptions when there's an error.
|
|
||||||
|
|
||||||
Here on the Internet, standards are like the picture on the box of the frozen pizza,
|
|
||||||
and what you get is more like what's on the inside, we try to patch things instead,
|
|
||||||
just give it a best-effort attempt att cleaning out broken or unnecessary constructions
|
|
||||||
like bad or missing URLEncoding
|
|
||||||
*/
|
|
||||||
public static String urlencodeFixer(String url) throws URISyntaxException {
|
|
||||||
var s = new StringBuilder();
|
|
||||||
String goodChars = "&.?:/-;+$#";
|
|
||||||
String hexChars = "0123456789abcdefABCDEF";
|
|
||||||
|
|
||||||
int pathIdx = findPathIdx(url);
|
|
||||||
if (pathIdx < 0) { // url looks like http://marginalia.nu
|
|
||||||
return url + "/";
|
|
||||||
}
|
|
||||||
s.append(url, 0, pathIdx);
|
|
||||||
|
|
||||||
// We don't want the fragment, and multiple fragments breaks the Java URIParser for some reason
|
|
||||||
int end = url.indexOf("#");
|
|
||||||
if (end < 0) end = url.length();
|
|
||||||
|
|
||||||
for (int i = pathIdx; i < end; i++) {
|
|
||||||
int c = url.charAt(i);
|
|
||||||
|
|
||||||
if (goodChars.indexOf(c) >= 0 || (c >= 'A' && c <= 'Z') || (c >= 'a' && c <= 'z') || (c >= '0' && c <= '9')) {
|
|
||||||
s.appendCodePoint(c);
|
|
||||||
} else if (c == '%' && i + 2 < end) {
|
|
||||||
int cn = url.charAt(i + 1);
|
|
||||||
int cnn = url.charAt(i + 2);
|
|
||||||
if (hexChars.indexOf(cn) >= 0 && hexChars.indexOf(cnn) >= 0) {
|
|
||||||
s.appendCodePoint(c);
|
|
||||||
} else {
|
|
||||||
s.append("%25");
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
s.append(String.format("%%%02X", c));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return s.toString();
|
|
||||||
}
|
|
||||||
|
|
||||||
private static int findPathIdx(String url) throws URISyntaxException {
|
|
||||||
int colonIdx = url.indexOf(':');
|
|
||||||
if (colonIdx < 0 || colonIdx + 2 >= url.length()) {
|
|
||||||
throw new URISyntaxException(url, "Lacking protocol");
|
|
||||||
}
|
|
||||||
return url.indexOf('/', colonIdx + 2);
|
|
||||||
}
|
|
||||||
|
|
||||||
public EdgeUrl(URI URI) {
|
public EdgeUrl(URI URI) {
|
||||||
try {
|
try {
|
||||||
@@ -166,11 +112,32 @@ public class EdgeUrl implements Serializable {
|
|||||||
sb.append(port);
|
sb.append(port);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
EdgeUriFactory.urlencodePath(sb, path);
|
||||||
|
|
||||||
|
if (param != null) {
|
||||||
|
EdgeUriFactory.urlencodeQuery(sb, param);
|
||||||
|
}
|
||||||
|
|
||||||
|
return sb.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public String toDisplayString() {
|
||||||
|
StringBuilder sb = new StringBuilder(256);
|
||||||
|
|
||||||
|
sb.append(proto);
|
||||||
|
sb.append("://");
|
||||||
|
sb.append(domain);
|
||||||
|
|
||||||
|
if (port != null) {
|
||||||
|
sb.append(':');
|
||||||
|
sb.append(port);
|
||||||
|
}
|
||||||
|
|
||||||
sb.append(path);
|
sb.append(path);
|
||||||
|
|
||||||
if (param != null) {
|
if (param != null) {
|
||||||
sb.append('?');
|
sb.append('?').append(param);
|
||||||
sb.append(param);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return sb.toString();
|
return sb.toString();
|
||||||
@@ -247,3 +214,244 @@ public class EdgeUrl implements Serializable {
|
|||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
class EdgeUriFactory {
|
||||||
|
public static URI parseURILenient(String url) throws URISyntaxException {
|
||||||
|
|
||||||
|
if (shouldOmitUrlencodeRepair(url)) {
|
||||||
|
try {
|
||||||
|
return new URI(url);
|
||||||
|
}
|
||||||
|
catch (URISyntaxException ex) {
|
||||||
|
// ignore and run the lenient parser
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var s = new StringBuilder(url.length()+8);
|
||||||
|
|
||||||
|
int pathIdx = findPathIdx(url);
|
||||||
|
if (pathIdx < 0) { // url looks like http://marginalia.nu
|
||||||
|
return new URI(url + "/");
|
||||||
|
}
|
||||||
|
s.append(url, 0, pathIdx);
|
||||||
|
|
||||||
|
// We don't want the fragment, and multiple fragments breaks the Java URIParser for some reason
|
||||||
|
int end = url.indexOf("#");
|
||||||
|
if (end < 0) end = url.length();
|
||||||
|
|
||||||
|
int queryIdx = url.indexOf('?');
|
||||||
|
if (queryIdx < 0) queryIdx = end;
|
||||||
|
|
||||||
|
urlencodePath(s, url.substring(pathIdx, queryIdx));
|
||||||
|
if (queryIdx < end) {
|
||||||
|
urlencodeQuery(s, url.substring(queryIdx + 1, end));
|
||||||
|
}
|
||||||
|
return new URI(s.toString());
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Break apart the path element of an URI into its components, and then
|
||||||
|
* urlencode any component that needs it, and recombine it into a single
|
||||||
|
* path element again.
|
||||||
|
*/
|
||||||
|
public static void urlencodePath(StringBuilder sb, String path) {
|
||||||
|
if (path == null || path.isEmpty()) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
String[] pathParts = StringUtils.split(path, '/');
|
||||||
|
if (pathParts.length == 0) {
|
||||||
|
sb.append('/');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
boolean shouldUrlEncode = false;
|
||||||
|
for (String pathPart : pathParts) {
|
||||||
|
if (pathPart.isEmpty()) continue;
|
||||||
|
|
||||||
|
if (needsUrlEncode(pathPart)) {
|
||||||
|
shouldUrlEncode = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (String pathPart : pathParts) {
|
||||||
|
if (pathPart.isEmpty()) continue;
|
||||||
|
|
||||||
|
if (shouldUrlEncode) {
|
||||||
|
sb.append('/');
|
||||||
|
sb.append(URLEncoder.encode(pathPart, StandardCharsets.UTF_8).replace("+", "%20"));
|
||||||
|
} else {
|
||||||
|
sb.append('/');
|
||||||
|
sb.append(pathPart);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (path.endsWith("/")) {
|
||||||
|
sb.append('/');
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Break apart the query element of a URI into its components, and then
|
||||||
|
* urlencode any component that needs it, and recombine it into a single
|
||||||
|
* query element again.
|
||||||
|
*/
|
||||||
|
public static void urlencodeQuery(StringBuilder sb, String param) {
|
||||||
|
if (param == null || param.isEmpty()) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
String[] queryParts = StringUtils.split(param, '&');
|
||||||
|
|
||||||
|
boolean shouldUrlEncode = false;
|
||||||
|
for (String queryPart : queryParts) {
|
||||||
|
if (queryPart.isEmpty()) continue;
|
||||||
|
|
||||||
|
if (needsUrlEncode(queryPart)) {
|
||||||
|
shouldUrlEncode = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
boolean first = true;
|
||||||
|
for (String queryPart : queryParts) {
|
||||||
|
if (queryPart.isEmpty()) continue;
|
||||||
|
|
||||||
|
if (first) {
|
||||||
|
sb.append('?');
|
||||||
|
first = false;
|
||||||
|
} else {
|
||||||
|
sb.append('&');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (shouldUrlEncode) {
|
||||||
|
int idx = queryPart.indexOf('=');
|
||||||
|
if (idx < 0) {
|
||||||
|
sb.append(URLEncoder.encode(queryPart, StandardCharsets.UTF_8));
|
||||||
|
} else {
|
||||||
|
sb.append(URLEncoder.encode(queryPart.substring(0, idx), StandardCharsets.UTF_8));
|
||||||
|
sb.append('=');
|
||||||
|
sb.append(URLEncoder.encode(queryPart.substring(idx + 1), StandardCharsets.UTF_8));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
sb.append(queryPart);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Test if the url element needs URL encoding.
|
||||||
|
* <p></p>
|
||||||
|
* Note we may have been given an already encoded path element,
|
||||||
|
* so we include % and + in the list of good characters
|
||||||
|
*/
|
||||||
|
static boolean needsUrlEncode(String urlElement) {
|
||||||
|
for (int i = 0; i < urlElement.length(); i++) {
|
||||||
|
char c = urlElement.charAt(i);
|
||||||
|
|
||||||
|
if (isUrlSafe(c)) continue;
|
||||||
|
if ("+".indexOf(c) >= 0) continue;
|
||||||
|
if (c == '%' && i + 2 < urlElement.length()) {
|
||||||
|
char c1 = urlElement.charAt(i + 1);
|
||||||
|
char c2 = urlElement.charAt(i + 2);
|
||||||
|
if (isHexDigit(c1) && isHexDigit(c2)) {
|
||||||
|
i += 2;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
static boolean isUrlSafe(int c) {
|
||||||
|
if (c >= 'a' && c <= 'z') return true;
|
||||||
|
if (c >= 'A' && c <= 'Z') return true;
|
||||||
|
if (c >= '0' && c <= '9') return true;
|
||||||
|
if (c == '-' || c == '_' || c == '.' || c == '~') return true;
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Test if the URL is a valid URL that does not need to be
|
||||||
|
* urlencoded.
|
||||||
|
* <p></p>
|
||||||
|
* This is a very simple heuristic test that does not guarantee
|
||||||
|
* that the URL is valid, but it will identify cases where we
|
||||||
|
* are fairly certain that the URL does not need encoding,
|
||||||
|
* so we can skip a bunch of allocations and string operations
|
||||||
|
* that would otherwise be needed to fix the URL.
|
||||||
|
*/
|
||||||
|
static boolean shouldOmitUrlencodeRepair(String url) {
|
||||||
|
int idx = 0;
|
||||||
|
final int len = url.length();
|
||||||
|
|
||||||
|
// Validate the scheme
|
||||||
|
while (idx < len - 2) {
|
||||||
|
char c = url.charAt(idx++);
|
||||||
|
if (c == ':') break;
|
||||||
|
if (!isAsciiAlphabetic(c)) return false;
|
||||||
|
}
|
||||||
|
if (url.charAt(idx++) != '/') return false;
|
||||||
|
if (url.charAt(idx++) != '/') return false;
|
||||||
|
|
||||||
|
// Validate the authority
|
||||||
|
while (idx < len) {
|
||||||
|
char c = url.charAt(idx++);
|
||||||
|
if (c == '/') break;
|
||||||
|
if (c == ':') continue;
|
||||||
|
if (c == '@') continue;
|
||||||
|
if (!isUrlSafe(c)) return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate the path
|
||||||
|
if (idx >= len) return true;
|
||||||
|
|
||||||
|
while (idx < len) {
|
||||||
|
char c = url.charAt(idx++);
|
||||||
|
if (c == '?') break;
|
||||||
|
if (c == '/') continue;
|
||||||
|
if (c == '#') return true;
|
||||||
|
if (!isUrlSafe(c)) return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (idx >= len) return true;
|
||||||
|
|
||||||
|
// Validate the query
|
||||||
|
while (idx < len) {
|
||||||
|
char c = url.charAt(idx++);
|
||||||
|
if (c == '&') continue;
|
||||||
|
if (c == '=') continue;
|
||||||
|
if (c == '#') return true;
|
||||||
|
if (!isUrlSafe(c)) return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
private static boolean isAsciiAlphabetic(int c) {
|
||||||
|
return (c >= 'a' && c <= 'f') || (c >= 'A' && c <= 'F');
|
||||||
|
}
|
||||||
|
|
||||||
|
private static boolean isHexDigit(int c) {
|
||||||
|
return (c >= '0' && c <= '9') || (c >= 'a' && c <= 'f') || (c >= 'A' && c <= 'F');
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Find the index of the path element in a URL.
|
||||||
|
* <p></p>
|
||||||
|
* The path element starts after the scheme and authority part of the URL,
|
||||||
|
* which is everything up to and including the first slash after the colon.
|
||||||
|
*/
|
||||||
|
private static int findPathIdx(String url) throws URISyntaxException {
|
||||||
|
int colonIdx = url.indexOf(':');
|
||||||
|
if (colonIdx < 0 || colonIdx + 3 >= url.length()) {
|
||||||
|
throw new URISyntaxException(url, "Lacking scheme");
|
||||||
|
}
|
||||||
|
return url.indexOf('/', colonIdx + 3);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
@@ -28,6 +28,8 @@ public enum HtmlFeature {
|
|||||||
|
|
||||||
GA_SPAM("special:gaspam"),
|
GA_SPAM("special:gaspam"),
|
||||||
|
|
||||||
|
PDF("format:pdf"),
|
||||||
|
|
||||||
/** For fingerprinting and ranking */
|
/** For fingerprinting and ranking */
|
||||||
OPENGRAPH("special:opengraph"),
|
OPENGRAPH("special:opengraph"),
|
||||||
OPENGRAPH_IMAGE("special:opengraph:image"),
|
OPENGRAPH_IMAGE("special:opengraph:image"),
|
||||||
|
@@ -1,22 +0,0 @@
|
|||||||
package nu.marginalia.model.html;
|
|
||||||
|
|
||||||
// This class really doesn't belong anywhere, but will squat here for now
|
|
||||||
public enum HtmlStandard {
|
|
||||||
PLAIN(0, 1),
|
|
||||||
UNKNOWN(0, 1),
|
|
||||||
HTML123(0, 1),
|
|
||||||
HTML4(-0.1, 1.05),
|
|
||||||
XHTML(-0.1, 1.05),
|
|
||||||
HTML5(0.5, 1.1);
|
|
||||||
|
|
||||||
/** Used to tune quality score */
|
|
||||||
public final double offset;
|
|
||||||
/** Used to tune quality score */
|
|
||||||
public final double scale;
|
|
||||||
|
|
||||||
HtmlStandard(double offset, double scale) {
|
|
||||||
this.offset = offset;
|
|
||||||
this.scale = scale;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
@@ -9,7 +9,7 @@ public enum DocumentFlags {
|
|||||||
GeneratorForum,
|
GeneratorForum,
|
||||||
GeneratorWiki,
|
GeneratorWiki,
|
||||||
Sideloaded,
|
Sideloaded,
|
||||||
Unused7,
|
PdfFile,
|
||||||
Unused8,
|
Unused8,
|
||||||
;
|
;
|
||||||
|
|
||||||
|
@@ -83,6 +83,11 @@ public class QueryParams {
|
|||||||
if (path.endsWith("StoryView.py")) { // folklore.org is neat
|
if (path.endsWith("StoryView.py")) { // folklore.org is neat
|
||||||
return param.startsWith("project=") || param.startsWith("story=");
|
return param.startsWith("project=") || param.startsWith("story=");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// www.perseus.tufts.edu:
|
||||||
|
if (param.startsWith("collection=")) return true;
|
||||||
|
if (param.startsWith("doc=")) return true;
|
||||||
|
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -1,6 +1,6 @@
|
|||||||
package nu.marginalia.model;
|
package nu.marginalia.model;
|
||||||
|
|
||||||
import nu.marginalia.model.EdgeUrl;
|
import org.junit.jupiter.api.Assertions;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
import java.net.URISyntaxException;
|
import java.net.URISyntaxException;
|
||||||
@@ -21,25 +21,70 @@ class EdgeUrlTest {
|
|||||||
new EdgeUrl("https://memex.marginalia.nu/#here")
|
new EdgeUrl("https://memex.marginalia.nu/#here")
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testParam() throws URISyntaxException {
|
void testUriFromString() throws URISyntaxException {
|
||||||
System.out.println(new EdgeUrl("https://memex.marginalia.nu/index.php?id=1").toString());
|
// We test these URLs several times as we perform URLEncode-fixing both when parsing the URL and when
|
||||||
System.out.println(new EdgeUrl("https://memex.marginalia.nu/showthread.php?id=1&count=5&tracking=123").toString());
|
// converting it back to a string, we want to ensure there is no changes along the way.
|
||||||
}
|
|
||||||
@Test
|
Assertions.assertEquals("/", EdgeUriFactory.parseURILenient("https://www.example.com/").getPath());
|
||||||
void urlencodeFixer() throws URISyntaxException {
|
Assertions.assertEquals("https://www.example.com/", EdgeUriFactory.parseURILenient("https://www.example.com/").toString());
|
||||||
System.out.println(EdgeUrl.urlencodeFixer("https://www.example.com/#heredoc"));
|
Assertions.assertEquals("https://www.example.com/", new EdgeUrl("https://www.example.com/").toString());
|
||||||
System.out.println(EdgeUrl.urlencodeFixer("https://www.example.com/%-sign"));
|
|
||||||
System.out.println(EdgeUrl.urlencodeFixer("https://www.example.com/%22-sign"));
|
Assertions.assertEquals("/", EdgeUriFactory.parseURILenient("https://www.example.com/#heredoc").getPath());
|
||||||
System.out.println(EdgeUrl.urlencodeFixer("https://www.example.com/\n \"huh\""));
|
Assertions.assertEquals("https://www.example.com/", EdgeUriFactory.parseURILenient("https://www.example.com/#heredoc").toString());
|
||||||
|
Assertions.assertEquals("https://www.example.com/", new EdgeUrl("https://www.example.com/#heredoc").toString());
|
||||||
|
|
||||||
|
Assertions.assertEquals("/trailingslash/", EdgeUriFactory.parseURILenient("https://www.example.com/trailingslash/").getPath());
|
||||||
|
Assertions.assertEquals("https://www.example.com/trailingslash/", EdgeUriFactory.parseURILenient("https://www.example.com/trailingslash/").toString());
|
||||||
|
Assertions.assertEquals("https://www.example.com/trailingslash/", new EdgeUrl("https://www.example.com/trailingslash/").toString());
|
||||||
|
|
||||||
|
Assertions.assertEquals("/%-sign", EdgeUriFactory.parseURILenient("https://www.example.com/%-sign").getPath());
|
||||||
|
Assertions.assertEquals("https://www.example.com/%25-sign", EdgeUriFactory.parseURILenient("https://www.example.com/%-sign").toString());
|
||||||
|
Assertions.assertEquals("https://www.example.com/%25-sign", new EdgeUrl("https://www.example.com/%-sign").toString());
|
||||||
|
|
||||||
|
Assertions.assertEquals("/%-sign/\"-sign", EdgeUriFactory.parseURILenient("https://www.example.com//%-sign/\"-sign").getPath());
|
||||||
|
Assertions.assertEquals("https://www.example.com/%25-sign/%22-sign", EdgeUriFactory.parseURILenient("https://www.example.com//%-sign/\"-sign").toString());
|
||||||
|
Assertions.assertEquals("https://www.example.com/%25-sign/%22-sign", new EdgeUrl("https://www.example.com//%-sign/\"-sign").toString());
|
||||||
|
|
||||||
|
Assertions.assertEquals("/\"-sign", EdgeUriFactory.parseURILenient("https://www.example.com/%22-sign").getPath());
|
||||||
|
Assertions.assertEquals("https://www.example.com/%22-sign", EdgeUriFactory.parseURILenient("https://www.example.com/%22-sign").toString());
|
||||||
|
Assertions.assertEquals("https://www.example.com/%22-sign", new EdgeUrl("https://www.example.com/%22-sign").toString());
|
||||||
|
|
||||||
|
Assertions.assertEquals("/\n \"huh\"", EdgeUriFactory.parseURILenient("https://www.example.com/\n \"huh\"").getPath());
|
||||||
|
Assertions.assertEquals("https://www.example.com/%0A%20%22huh%22", EdgeUriFactory.parseURILenient("https://www.example.com/\n \"huh\"").toString());
|
||||||
|
Assertions.assertEquals("https://www.example.com/%0A%20%22huh%22", new EdgeUrl("https://www.example.com/\n \"huh\"").toString());
|
||||||
|
|
||||||
|
Assertions.assertEquals("/wiki/Sámi", EdgeUriFactory.parseURILenient("https://en.wikipedia.org/wiki/Sámi").getPath());
|
||||||
|
Assertions.assertEquals("https://en.wikipedia.org/wiki/S%C3%A1mi", EdgeUriFactory.parseURILenient("https://en.wikipedia.org/wiki/Sámi").toString());
|
||||||
|
Assertions.assertEquals("https://en.wikipedia.org/wiki/S%C3%A1mi", new EdgeUrl("https://en.wikipedia.org/wiki/Sámi").toString());
|
||||||
|
|
||||||
|
Assertions.assertEquals("https://www.prijatelji-zivotinja.hr/index.en.php?id=2301k", new EdgeUrl("https://www.prijatelji-zivotinja.hr/index.en.php?id=2301k").toString());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void testParms() throws URISyntaxException {
|
void testParms() throws URISyntaxException {
|
||||||
System.out.println(new EdgeUrl("https://search.marginalia.nu/?id=123"));
|
Assertions.assertEquals("id=123", new EdgeUrl("https://search.marginalia.nu/?id=123").param);
|
||||||
System.out.println(new EdgeUrl("https://search.marginalia.nu/?t=123"));
|
Assertions.assertEquals("https://search.marginalia.nu/?id=123", new EdgeUrl("https://search.marginalia.nu/?id=123").toString());
|
||||||
System.out.println(new EdgeUrl("https://search.marginalia.nu/?v=123"));
|
|
||||||
System.out.println(new EdgeUrl("https://search.marginalia.nu/?m=123"));
|
Assertions.assertEquals("t=123", new EdgeUrl("https://search.marginalia.nu/?t=123").param);
|
||||||
System.out.println(new EdgeUrl("https://search.marginalia.nu/?follow=123"));
|
Assertions.assertEquals("https://search.marginalia.nu/?t=123", new EdgeUrl("https://search.marginalia.nu/?t=123").toString());
|
||||||
|
|
||||||
|
Assertions.assertEquals("v=123", new EdgeUrl("https://search.marginalia.nu/?v=123").param);
|
||||||
|
Assertions.assertEquals("https://search.marginalia.nu/?v=123", new EdgeUrl("https://search.marginalia.nu/?v=123").toString());
|
||||||
|
|
||||||
|
Assertions.assertEquals("id=1", new EdgeUrl("https://memex.marginalia.nu/showthread.php?id=1&count=5&tracking=123").param);
|
||||||
|
Assertions.assertEquals("https://memex.marginalia.nu/showthread.php?id=1",
|
||||||
|
new EdgeUrl("https://memex.marginalia.nu/showthread.php?id=1&count=5&tracking=123").toString());
|
||||||
|
|
||||||
|
|
||||||
|
Assertions.assertEquals("id=1&t=5", new EdgeUrl("https://memex.marginalia.nu/shöwthrëad.php?id=1&t=5&tracking=123").param);
|
||||||
|
Assertions.assertEquals("https://memex.marginalia.nu/sh%C3%B6wthr%C3%ABad.php?id=1&t=5", new EdgeUrl("https://memex.marginalia.nu/shöwthrëad.php?id=1&t=5&tracking=123").toString());
|
||||||
|
|
||||||
|
Assertions.assertEquals("id=1&t=5", new EdgeUrl("https://memex.marginalia.nu/shöwthrëad.php?trëaking=123&id=1&t=5&").param);
|
||||||
|
Assertions.assertEquals("https://memex.marginalia.nu/sh%C3%B6wthr%C3%ABad.php?id=1&t=5", new EdgeUrl("https://memex.marginalia.nu/shöwthrëad.php?trëaking=123&id=1&t=5&").toString());
|
||||||
|
|
||||||
|
Assertions.assertNull(new EdgeUrl("https://search.marginalia.nu/?m=123").param);
|
||||||
|
Assertions.assertNull(new EdgeUrl("https://search.marginalia.nu/?follow=123").param);
|
||||||
}
|
}
|
||||||
}
|
}
|
@@ -42,6 +42,12 @@ dependencies {
|
|||||||
implementation libs.bundles.curator
|
implementation libs.bundles.curator
|
||||||
implementation libs.bundles.flyway
|
implementation libs.bundles.flyway
|
||||||
|
|
||||||
|
libs.bundles.jooby.get().each {
|
||||||
|
implementation dependencies.create(it) {
|
||||||
|
exclude group: 'org.slf4j'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
testImplementation libs.bundles.slf4j.test
|
testImplementation libs.bundles.slf4j.test
|
||||||
implementation libs.bundles.mariadb
|
implementation libs.bundles.mariadb
|
||||||
|
|
||||||
|
@@ -59,16 +59,13 @@ public class ProcessAdHocTaskHeartbeatImpl implements AutoCloseable, ProcessAdHo
|
|||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public void progress(String step, int stepProgress, int stepCount) {
|
public void progress(String step, int stepProgress, int stepCount) {
|
||||||
|
int lastProgress = this.progress;
|
||||||
this.step = step;
|
this.step = step;
|
||||||
|
|
||||||
|
|
||||||
// off by one since we calculate the progress based on the number of steps,
|
|
||||||
// and Enum.ordinal() is zero-based (so the 5th step in a 5 step task is 4, not 5; resulting in the
|
|
||||||
// final progress being 80% and not 100%)
|
|
||||||
|
|
||||||
this.progress = (int) Math.round(100. * stepProgress / (double) stepCount);
|
this.progress = (int) Math.round(100. * stepProgress / (double) stepCount);
|
||||||
|
|
||||||
logger.info("ProcessTask {} progress: {}%", taskBase, progress);
|
if (this.progress / 10 != lastProgress / 10) {
|
||||||
|
logger.info("ProcessTask {} progress: {}%", taskBase, progress);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Wrap a collection to provide heartbeat progress updates as it's iterated through */
|
/** Wrap a collection to provide heartbeat progress updates as it's iterated through */
|
||||||
|
@@ -10,7 +10,9 @@ import java.nio.charset.StandardCharsets;
|
|||||||
import java.nio.file.Files;
|
import java.nio.file.Files;
|
||||||
import java.nio.file.Path;
|
import java.nio.file.Path;
|
||||||
import java.time.LocalDateTime;
|
import java.time.LocalDateTime;
|
||||||
import java.util.*;
|
import java.util.HashSet;
|
||||||
|
import java.util.Optional;
|
||||||
|
import java.util.Set;
|
||||||
import java.util.function.Function;
|
import java.util.function.Function;
|
||||||
|
|
||||||
/** WorkLog is a journal of work done by a process,
|
/** WorkLog is a journal of work done by a process,
|
||||||
@@ -61,6 +63,12 @@ public class WorkLog implements AutoCloseable, Closeable {
|
|||||||
return new WorkLoadIterable<>(logFile, mapper);
|
return new WorkLoadIterable<>(logFile, mapper);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static int countEntries(Path crawlerLog) throws IOException{
|
||||||
|
try (var linesStream = Files.lines(crawlerLog)) {
|
||||||
|
return (int) linesStream.filter(WorkLogEntry::isJobId).count();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Use synchro over concurrent set to avoid competing writes
|
// Use synchro over concurrent set to avoid competing writes
|
||||||
// - correct is better than fast here, it's sketchy enough to use
|
// - correct is better than fast here, it's sketchy enough to use
|
||||||
// a PrintWriter
|
// a PrintWriter
|
||||||
|
@@ -7,8 +7,6 @@ import nu.marginalia.service.discovery.property.PartitionTraits;
|
|||||||
import nu.marginalia.service.discovery.property.ServiceEndpoint;
|
import nu.marginalia.service.discovery.property.ServiceEndpoint;
|
||||||
import nu.marginalia.service.discovery.property.ServiceKey;
|
import nu.marginalia.service.discovery.property.ServiceKey;
|
||||||
import nu.marginalia.service.discovery.property.ServicePartition;
|
import nu.marginalia.service.discovery.property.ServicePartition;
|
||||||
import org.slf4j.Logger;
|
|
||||||
import org.slf4j.LoggerFactory;
|
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.concurrent.CompletableFuture;
|
import java.util.concurrent.CompletableFuture;
|
||||||
@@ -24,7 +22,7 @@ import java.util.function.Function;
|
|||||||
public class GrpcMultiNodeChannelPool<STUB> {
|
public class GrpcMultiNodeChannelPool<STUB> {
|
||||||
private final ConcurrentHashMap<Integer, GrpcSingleNodeChannelPool<STUB>> pools =
|
private final ConcurrentHashMap<Integer, GrpcSingleNodeChannelPool<STUB>> pools =
|
||||||
new ConcurrentHashMap<>();
|
new ConcurrentHashMap<>();
|
||||||
private static final Logger logger = LoggerFactory.getLogger(GrpcMultiNodeChannelPool.class);
|
|
||||||
private final ServiceRegistryIf serviceRegistryIf;
|
private final ServiceRegistryIf serviceRegistryIf;
|
||||||
private final ServiceKey<? extends PartitionTraits.Multicast> serviceKey;
|
private final ServiceKey<? extends PartitionTraits.Multicast> serviceKey;
|
||||||
private final Function<ServiceEndpoint.InstanceAddress, ManagedChannel> channelConstructor;
|
private final Function<ServiceEndpoint.InstanceAddress, ManagedChannel> channelConstructor;
|
||||||
|
@@ -10,6 +10,8 @@ import nu.marginalia.service.discovery.property.ServiceKey;
|
|||||||
import org.jetbrains.annotations.NotNull;
|
import org.jetbrains.annotations.NotNull;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
import org.slf4j.Marker;
|
||||||
|
import org.slf4j.MarkerFactory;
|
||||||
|
|
||||||
import java.time.Duration;
|
import java.time.Duration;
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
@@ -26,13 +28,13 @@ import java.util.function.Function;
|
|||||||
public class GrpcSingleNodeChannelPool<STUB> extends ServiceChangeMonitor {
|
public class GrpcSingleNodeChannelPool<STUB> extends ServiceChangeMonitor {
|
||||||
private final Map<InstanceAddress, ConnectionHolder> channels = new ConcurrentHashMap<>();
|
private final Map<InstanceAddress, ConnectionHolder> channels = new ConcurrentHashMap<>();
|
||||||
|
|
||||||
|
private final Marker grpcMarker = MarkerFactory.getMarker("GRPC");
|
||||||
private static final Logger logger = LoggerFactory.getLogger(GrpcSingleNodeChannelPool.class);
|
private static final Logger logger = LoggerFactory.getLogger(GrpcSingleNodeChannelPool.class);
|
||||||
|
|
||||||
private final ServiceRegistryIf serviceRegistryIf;
|
private final ServiceRegistryIf serviceRegistryIf;
|
||||||
private final Function<InstanceAddress, ManagedChannel> channelConstructor;
|
private final Function<InstanceAddress, ManagedChannel> channelConstructor;
|
||||||
private final Function<ManagedChannel, STUB> stubConstructor;
|
private final Function<ManagedChannel, STUB> stubConstructor;
|
||||||
|
|
||||||
|
|
||||||
public GrpcSingleNodeChannelPool(ServiceRegistryIf serviceRegistryIf,
|
public GrpcSingleNodeChannelPool(ServiceRegistryIf serviceRegistryIf,
|
||||||
ServiceKey<? extends PartitionTraits.Unicast> serviceKey,
|
ServiceKey<? extends PartitionTraits.Unicast> serviceKey,
|
||||||
Function<InstanceAddress, ManagedChannel> channelConstructor,
|
Function<InstanceAddress, ManagedChannel> channelConstructor,
|
||||||
@@ -48,8 +50,6 @@ public class GrpcSingleNodeChannelPool<STUB> extends ServiceChangeMonitor {
|
|||||||
serviceRegistryIf.registerMonitor(this);
|
serviceRegistryIf.registerMonitor(this);
|
||||||
|
|
||||||
onChange();
|
onChange();
|
||||||
|
|
||||||
awaitChannel(Duration.ofSeconds(5));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -62,10 +62,10 @@ public class GrpcSingleNodeChannelPool<STUB> extends ServiceChangeMonitor {
|
|||||||
for (var route : Sets.symmetricDifference(oldRoutes, newRoutes)) {
|
for (var route : Sets.symmetricDifference(oldRoutes, newRoutes)) {
|
||||||
ConnectionHolder oldChannel;
|
ConnectionHolder oldChannel;
|
||||||
if (newRoutes.contains(route)) {
|
if (newRoutes.contains(route)) {
|
||||||
logger.info("Adding route {}", route);
|
logger.info(grpcMarker, "Adding route {} => {}", serviceKey, route);
|
||||||
oldChannel = channels.put(route, new ConnectionHolder(route));
|
oldChannel = channels.put(route, new ConnectionHolder(route));
|
||||||
} else {
|
} else {
|
||||||
logger.info("Expelling route {}", route);
|
logger.info(grpcMarker, "Expelling route {} => {}", serviceKey, route);
|
||||||
oldChannel = channels.remove(route);
|
oldChannel = channels.remove(route);
|
||||||
}
|
}
|
||||||
if (oldChannel != null) {
|
if (oldChannel != null) {
|
||||||
@@ -103,7 +103,7 @@ public class GrpcSingleNodeChannelPool<STUB> extends ServiceChangeMonitor {
|
|||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
logger.info("Creating channel for {}:{}", serviceKey, address);
|
logger.info(grpcMarker, "Creating channel for {} => {}", serviceKey, address);
|
||||||
value = channelConstructor.apply(address);
|
value = channelConstructor.apply(address);
|
||||||
if (channel.compareAndSet(null, value)) {
|
if (channel.compareAndSet(null, value)) {
|
||||||
return value;
|
return value;
|
||||||
@@ -114,7 +114,7 @@ public class GrpcSingleNodeChannelPool<STUB> extends ServiceChangeMonitor {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
catch (Exception e) {
|
catch (Exception e) {
|
||||||
logger.error("Failed to get channel for " + address, e);
|
logger.error(grpcMarker, "Failed to get channel for " + address, e);
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -206,7 +206,7 @@ public class GrpcSingleNodeChannelPool<STUB> extends ServiceChangeMonitor {
|
|||||||
}
|
}
|
||||||
|
|
||||||
for (var e : exceptions) {
|
for (var e : exceptions) {
|
||||||
logger.error("Failed to call service {}", serviceKey, e);
|
logger.error(grpcMarker, "Failed to call service {}", serviceKey, e);
|
||||||
}
|
}
|
||||||
|
|
||||||
throw new ServiceNotAvailableException(serviceKey);
|
throw new ServiceNotAvailableException(serviceKey);
|
||||||
|
@@ -4,6 +4,11 @@ import nu.marginalia.service.discovery.property.ServiceKey;
|
|||||||
|
|
||||||
public class ServiceNotAvailableException extends RuntimeException {
|
public class ServiceNotAvailableException extends RuntimeException {
|
||||||
public ServiceNotAvailableException(ServiceKey<?> key) {
|
public ServiceNotAvailableException(ServiceKey<?> key) {
|
||||||
super("Service " + key + " not available");
|
super(key.toString());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public StackTraceElement[] getStackTrace() { // Suppress stack trace
|
||||||
|
return new StackTraceElement[0];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -57,16 +57,13 @@ public class ServiceAdHocTaskHeartbeatImpl implements AutoCloseable, ServiceAdHo
|
|||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public void progress(String step, int stepProgress, int stepCount) {
|
public void progress(String step, int stepProgress, int stepCount) {
|
||||||
|
int lastProgress = this.progress;
|
||||||
this.step = step;
|
this.step = step;
|
||||||
|
|
||||||
|
|
||||||
// off by one since we calculate the progress based on the number of steps,
|
|
||||||
// and Enum.ordinal() is zero-based (so the 5th step in a 5 step task is 4, not 5; resulting in the
|
|
||||||
// final progress being 80% and not 100%)
|
|
||||||
|
|
||||||
this.progress = (int) Math.round(100. * stepProgress / (double) stepCount);
|
this.progress = (int) Math.round(100. * stepProgress / (double) stepCount);
|
||||||
|
|
||||||
logger.info("ServiceTask {} progress: {}%", taskBase, progress);
|
if (this.progress / 10 != lastProgress / 10) {
|
||||||
|
logger.info("ProcessTask {} progress: {}%", taskBase, progress);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void shutDown() {
|
public void shutDown() {
|
||||||
|
@@ -48,5 +48,10 @@ public record ServiceEndpoint(String host, int port) {
|
|||||||
public int port() {
|
public int port() {
|
||||||
return endpoint.port();
|
return endpoint.port();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return endpoint().host() + ":" + endpoint.port() + " [" + instance + "]";
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -48,6 +48,19 @@ public sealed interface ServiceKey<P extends ServicePartition> {
|
|||||||
{
|
{
|
||||||
throw new UnsupportedOperationException();
|
throw new UnsupportedOperationException();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
final String shortName;
|
||||||
|
|
||||||
|
int periodIndex = name.lastIndexOf('.');
|
||||||
|
|
||||||
|
if (periodIndex >= 0) shortName = name.substring(periodIndex+1);
|
||||||
|
else shortName = name;
|
||||||
|
|
||||||
|
return "rest:" + shortName;
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
record Grpc<P extends ServicePartition>(String name, P partition) implements ServiceKey<P> {
|
record Grpc<P extends ServicePartition>(String name, P partition) implements ServiceKey<P> {
|
||||||
public String baseName() {
|
public String baseName() {
|
||||||
@@ -64,6 +77,18 @@ public sealed interface ServiceKey<P extends ServicePartition> {
|
|||||||
{
|
{
|
||||||
return new Grpc<>(name, partition);
|
return new Grpc<>(name, partition);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
final String shortName;
|
||||||
|
|
||||||
|
int periodIndex = name.lastIndexOf('.');
|
||||||
|
|
||||||
|
if (periodIndex >= 0) shortName = name.substring(periodIndex+1);
|
||||||
|
else shortName = name;
|
||||||
|
|
||||||
|
return "grpc:" + shortName + "[" + partition.identifier() + "]";
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@@ -89,7 +89,7 @@ public class DatabaseModule extends AbstractModule {
|
|||||||
config.addDataSourceProperty("prepStmtCacheSize", "250");
|
config.addDataSourceProperty("prepStmtCacheSize", "250");
|
||||||
config.addDataSourceProperty("prepStmtCacheSqlLimit", "2048");
|
config.addDataSourceProperty("prepStmtCacheSqlLimit", "2048");
|
||||||
|
|
||||||
config.setMaximumPoolSize(5);
|
config.setMaximumPoolSize(Integer.getInteger("db.poolSize", 5));
|
||||||
config.setMinimumIdle(2);
|
config.setMinimumIdle(2);
|
||||||
|
|
||||||
config.setMaxLifetime(Duration.ofMinutes(9).toMillis());
|
config.setMaxLifetime(Duration.ofMinutes(9).toMillis());
|
||||||
|
@@ -6,6 +6,7 @@ import nu.marginalia.service.ServiceId;
|
|||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
import java.net.InetAddress;
|
import java.net.InetAddress;
|
||||||
import java.net.NetworkInterface;
|
import java.net.NetworkInterface;
|
||||||
import java.util.Enumeration;
|
import java.util.Enumeration;
|
||||||
@@ -115,11 +116,12 @@ public class ServiceConfigurationModule extends AbstractModule {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public static String getLocalNetworkIP() throws Exception {
|
public static String getLocalNetworkIP() throws IOException {
|
||||||
Enumeration<NetworkInterface> nets = NetworkInterface.getNetworkInterfaces();
|
Enumeration<NetworkInterface> nets = NetworkInterface.getNetworkInterfaces();
|
||||||
|
|
||||||
while (nets.hasMoreElements()) {
|
while (nets.hasMoreElements()) {
|
||||||
NetworkInterface netif = nets.nextElement();
|
NetworkInterface netif = nets.nextElement();
|
||||||
|
logger.info("Considering network interface {}: Up? {}, Loopback? {}", netif.getDisplayName(), netif.isUp(), netif.isLoopback());
|
||||||
if (!netif.isUp() || netif.isLoopback()) {
|
if (!netif.isUp() || netif.isLoopback()) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
@@ -127,6 +129,7 @@ public class ServiceConfigurationModule extends AbstractModule {
|
|||||||
Enumeration<InetAddress> inetAddresses = netif.getInetAddresses();
|
Enumeration<InetAddress> inetAddresses = netif.getInetAddresses();
|
||||||
while (inetAddresses.hasMoreElements()) {
|
while (inetAddresses.hasMoreElements()) {
|
||||||
InetAddress addr = inetAddresses.nextElement();
|
InetAddress addr = inetAddresses.nextElement();
|
||||||
|
logger.info("Considering address {}: SiteLocal? {}, Loopback? {}", addr.getHostAddress(), addr.isSiteLocalAddress(), addr.isLoopbackAddress());
|
||||||
if (addr.isSiteLocalAddress() && !addr.isLoopbackAddress()) {
|
if (addr.isSiteLocalAddress() && !addr.isLoopbackAddress()) {
|
||||||
return addr.getHostAddress();
|
return addr.getHostAddress();
|
||||||
}
|
}
|
||||||
|
@@ -0,0 +1,187 @@
|
|||||||
|
package nu.marginalia.service.server;
|
||||||
|
|
||||||
|
import io.jooby.*;
|
||||||
|
import io.prometheus.client.Counter;
|
||||||
|
import nu.marginalia.mq.inbox.MqInboxIf;
|
||||||
|
import nu.marginalia.service.client.ServiceNotAvailableException;
|
||||||
|
import nu.marginalia.service.discovery.property.ServiceEndpoint;
|
||||||
|
import nu.marginalia.service.discovery.property.ServiceKey;
|
||||||
|
import nu.marginalia.service.discovery.property.ServicePartition;
|
||||||
|
import nu.marginalia.service.module.ServiceConfiguration;
|
||||||
|
import nu.marginalia.service.server.jte.JteModule;
|
||||||
|
import nu.marginalia.service.server.mq.ServiceMqSubscription;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
import org.slf4j.Marker;
|
||||||
|
import org.slf4j.MarkerFactory;
|
||||||
|
|
||||||
|
import java.nio.file.Files;
|
||||||
|
import java.nio.file.Path;
|
||||||
|
import java.nio.file.Paths;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
public class JoobyService {
|
||||||
|
private final Logger logger = LoggerFactory.getLogger(getClass());
|
||||||
|
|
||||||
|
// Marker for filtering out sensitive content from the persistent logs
|
||||||
|
private final Marker httpMarker = MarkerFactory.getMarker("HTTP");
|
||||||
|
|
||||||
|
private final Initialization initialization;
|
||||||
|
|
||||||
|
private final static Counter request_counter = Counter.build("wmsa_request_counter", "Request Counter")
|
||||||
|
.labelNames("service", "node")
|
||||||
|
.register();
|
||||||
|
private final static Counter request_counter_good = Counter.build("wmsa_request_counter_good", "Good Requests")
|
||||||
|
.labelNames("service", "node")
|
||||||
|
.register();
|
||||||
|
private final static Counter request_counter_bad = Counter.build("wmsa_request_counter_bad", "Bad Requests")
|
||||||
|
.labelNames("service", "node")
|
||||||
|
.register();
|
||||||
|
private final static Counter request_counter_err = Counter.build("wmsa_request_counter_err", "Error Requests")
|
||||||
|
.labelNames("service", "node")
|
||||||
|
.register();
|
||||||
|
private final String serviceName;
|
||||||
|
private static volatile boolean initialized = false;
|
||||||
|
|
||||||
|
protected final MqInboxIf messageQueueInbox;
|
||||||
|
private final int node;
|
||||||
|
private GrpcServer grpcServer;
|
||||||
|
|
||||||
|
private ServiceConfiguration config;
|
||||||
|
private final List<MvcExtension> joobyServices;
|
||||||
|
private final ServiceEndpoint restEndpoint;
|
||||||
|
|
||||||
|
public JoobyService(BaseServiceParams params,
|
||||||
|
ServicePartition partition,
|
||||||
|
List<DiscoverableService> grpcServices,
|
||||||
|
List<MvcExtension> joobyServices
|
||||||
|
) throws Exception {
|
||||||
|
|
||||||
|
this.joobyServices = joobyServices;
|
||||||
|
this.initialization = params.initialization;
|
||||||
|
config = params.configuration;
|
||||||
|
node = config.node();
|
||||||
|
|
||||||
|
String inboxName = config.serviceName();
|
||||||
|
logger.info("Inbox name: {}", inboxName);
|
||||||
|
|
||||||
|
var serviceRegistry = params.serviceRegistry;
|
||||||
|
|
||||||
|
restEndpoint = serviceRegistry.registerService(ServiceKey.forRest(config.serviceId(), config.node()),
|
||||||
|
config.instanceUuid(), config.externalAddress());
|
||||||
|
|
||||||
|
var mqInboxFactory = params.messageQueueInboxFactory;
|
||||||
|
messageQueueInbox = mqInboxFactory.createSynchronousInbox(inboxName, config.node(), config.instanceUuid());
|
||||||
|
messageQueueInbox.subscribe(new ServiceMqSubscription(this));
|
||||||
|
|
||||||
|
serviceName = System.getProperty("service-name");
|
||||||
|
|
||||||
|
initialization.addCallback(params.heartbeat::start);
|
||||||
|
initialization.addCallback(messageQueueInbox::start);
|
||||||
|
initialization.addCallback(() -> params.eventLog.logEvent("SVC-INIT", serviceName + ":" + config.node()));
|
||||||
|
initialization.addCallback(() -> serviceRegistry.announceInstance(config.instanceUuid()));
|
||||||
|
|
||||||
|
Thread.setDefaultUncaughtExceptionHandler((t, e) -> {
|
||||||
|
if (e instanceof ServiceNotAvailableException) {
|
||||||
|
// reduce log spam for this common case
|
||||||
|
logger.error("Service not available: {}", e.getMessage());
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
logger.error("Uncaught exception", e);
|
||||||
|
}
|
||||||
|
request_counter_err.labels(serviceName, Integer.toString(node)).inc();
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!initialization.isReady() && ! initialized ) {
|
||||||
|
initialized = true;
|
||||||
|
grpcServer = new GrpcServer(config, serviceRegistry, partition, grpcServices);
|
||||||
|
grpcServer.start();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void startJooby(Jooby jooby) {
|
||||||
|
|
||||||
|
logger.info("{} Listening to {}:{} ({})", getClass().getSimpleName(),
|
||||||
|
restEndpoint.host(),
|
||||||
|
restEndpoint.port(),
|
||||||
|
config.externalAddress());
|
||||||
|
|
||||||
|
// FIXME: This won't work outside of docker, may need to submit a PR to jooby to allow classpaths here
|
||||||
|
if (Files.exists(Path.of("/app/resources/jte")) || Files.exists(Path.of("/app/classes/jte-precompiled"))) {
|
||||||
|
jooby.install(new JteModule(Path.of("/app/resources/jte"), Path.of("/app/classes/jte-precompiled")));
|
||||||
|
}
|
||||||
|
if (Files.exists(Path.of("/app/resources/static"))) {
|
||||||
|
jooby.assets("/*", Paths.get("/app/resources/static"));
|
||||||
|
}
|
||||||
|
var options = new ServerOptions();
|
||||||
|
options.setHost(config.bindAddress());
|
||||||
|
options.setPort(restEndpoint.port());
|
||||||
|
|
||||||
|
// Enable gzip compression of response data, but set compression to the lowest level
|
||||||
|
// since it doesn't really save much more space to dial it up. It's typically a
|
||||||
|
// single digit percentage difference since HTML already compresses very well with level = 1.
|
||||||
|
options.setCompressionLevel(1);
|
||||||
|
|
||||||
|
// Set a cap on the number of worker threads, as Jooby's default value does not seem to consider
|
||||||
|
// multi-tenant servers with high thread counts, and spins up an exorbitant number of threads in that
|
||||||
|
// scenario
|
||||||
|
options.setWorkerThreads(Math.min(128, options.getWorkerThreads()));
|
||||||
|
|
||||||
|
|
||||||
|
jooby.setServerOptions(options);
|
||||||
|
|
||||||
|
jooby.get("/internal/ping", ctx -> "pong");
|
||||||
|
jooby.get("/internal/started", this::isInitialized);
|
||||||
|
jooby.get("/internal/ready", this::isReady);
|
||||||
|
|
||||||
|
for (var service : joobyServices) {
|
||||||
|
jooby.mvc(service);
|
||||||
|
}
|
||||||
|
|
||||||
|
jooby.before(this::auditRequestIn);
|
||||||
|
jooby.after(this::auditRequestOut);
|
||||||
|
}
|
||||||
|
|
||||||
|
private Object isInitialized(Context ctx) {
|
||||||
|
if (initialization.isReady()) {
|
||||||
|
return "ok";
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
ctx.setResponseCode(StatusCode.FAILED_DEPENDENCY_CODE);
|
||||||
|
return "bad";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean isReady() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
private String isReady(Context ctx) {
|
||||||
|
if (isReady()) {
|
||||||
|
return "ok";
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
ctx.setResponseCode(StatusCode.FAILED_DEPENDENCY_CODE);
|
||||||
|
return "bad";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void auditRequestIn(Context ctx) {
|
||||||
|
request_counter.labels(serviceName, Integer.toString(node)).inc();
|
||||||
|
}
|
||||||
|
|
||||||
|
private void auditRequestOut(Context ctx, Object result, Throwable failure) {
|
||||||
|
if (ctx.getResponseCode().value() < 400) {
|
||||||
|
request_counter_good.labels(serviceName, Integer.toString(node)).inc();
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
request_counter_bad.labels(serviceName, Integer.toString(node)).inc();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (failure != null) {
|
||||||
|
logger.error("Request failed " + ctx.getMethod() + " " + ctx.getRequestURL(), failure);
|
||||||
|
request_counter_err.labels(serviceName, Integer.toString(node)).inc();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@@ -6,25 +6,36 @@ import nu.marginalia.service.module.ServiceConfiguration;
|
|||||||
import org.eclipse.jetty.server.Server;
|
import org.eclipse.jetty.server.Server;
|
||||||
import org.eclipse.jetty.servlet.ServletContextHandler;
|
import org.eclipse.jetty.servlet.ServletContextHandler;
|
||||||
import org.eclipse.jetty.servlet.ServletHolder;
|
import org.eclipse.jetty.servlet.ServletHolder;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import java.net.InetSocketAddress;
|
import java.net.InetSocketAddress;
|
||||||
|
|
||||||
public class MetricsServer {
|
public class MetricsServer {
|
||||||
|
|
||||||
|
private static final Logger logger = LoggerFactory.getLogger(MetricsServer.class);
|
||||||
|
|
||||||
@Inject
|
@Inject
|
||||||
public MetricsServer(ServiceConfiguration configuration) throws Exception {
|
public MetricsServer(ServiceConfiguration configuration) {
|
||||||
// If less than zero, we forego setting up a metrics server
|
// If less than zero, we forego setting up a metrics server
|
||||||
if (configuration.metricsPort() < 0)
|
if (configuration.metricsPort() < 0)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
Server server = new Server(new InetSocketAddress(configuration.bindAddress(), configuration.metricsPort()));
|
try {
|
||||||
|
Server server = new Server(new InetSocketAddress(configuration.bindAddress(), configuration.metricsPort()));
|
||||||
|
|
||||||
ServletContextHandler context = new ServletContextHandler();
|
ServletContextHandler context = new ServletContextHandler();
|
||||||
context.setContextPath("/");
|
context.setContextPath("/");
|
||||||
server.setHandler(context);
|
server.setHandler(context);
|
||||||
|
|
||||||
context.addServlet(new ServletHolder(new MetricsServlet()), "/metrics");
|
context.addServlet(new ServletHolder(new MetricsServlet()), "/metrics");
|
||||||
|
|
||||||
server.start();
|
logger.info("MetricsServer listening on {}:{}", configuration.bindAddress(), configuration.metricsPort());
|
||||||
|
|
||||||
|
server.start();
|
||||||
|
}
|
||||||
|
catch (Exception|NoSuchMethodError ex) {
|
||||||
|
logger.error("Failed to set up metrics server", ex);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -35,21 +35,8 @@ public class RateLimiter {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
public static RateLimiter forExpensiveRequest() {
|
|
||||||
return new RateLimiter(5, 10);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static RateLimiter custom(int perMinute) {
|
public static RateLimiter custom(int perMinute) {
|
||||||
return new RateLimiter(perMinute, 60);
|
return new RateLimiter(4 * perMinute, perMinute);
|
||||||
}
|
|
||||||
|
|
||||||
public static RateLimiter forSpamBots() {
|
|
||||||
return new RateLimiter(120, 3600);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
public static RateLimiter forLogin() {
|
|
||||||
return new RateLimiter(3, 15);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void cleanIdleBuckets() {
|
private void cleanIdleBuckets() {
|
||||||
@@ -62,7 +49,7 @@ public class RateLimiter {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private Bucket createBucket() {
|
private Bucket createBucket() {
|
||||||
var refill = Refill.greedy(1, Duration.ofSeconds(refillRate));
|
var refill = Refill.greedy(refillRate, Duration.ofSeconds(60));
|
||||||
var bw = Bandwidth.classic(capacity, refill);
|
var bw = Bandwidth.classic(capacity, refill);
|
||||||
return Bucket.builder().addLimit(bw).build();
|
return Bucket.builder().addLimit(bw).build();
|
||||||
}
|
}
|
||||||
|
@@ -16,7 +16,7 @@ import spark.Spark;
|
|||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
public class Service {
|
public class SparkService {
|
||||||
private final Logger logger = LoggerFactory.getLogger(getClass());
|
private final Logger logger = LoggerFactory.getLogger(getClass());
|
||||||
|
|
||||||
// Marker for filtering out sensitive content from the persistent logs
|
// Marker for filtering out sensitive content from the persistent logs
|
||||||
@@ -43,10 +43,10 @@ public class Service {
|
|||||||
private final int node;
|
private final int node;
|
||||||
private GrpcServer grpcServer;
|
private GrpcServer grpcServer;
|
||||||
|
|
||||||
public Service(BaseServiceParams params,
|
public SparkService(BaseServiceParams params,
|
||||||
Runnable configureStaticFiles,
|
Runnable configureStaticFiles,
|
||||||
ServicePartition partition,
|
ServicePartition partition,
|
||||||
List<DiscoverableService> grpcServices) throws Exception {
|
List<DiscoverableService> grpcServices) throws Exception {
|
||||||
|
|
||||||
this.initialization = params.initialization;
|
this.initialization = params.initialization;
|
||||||
var config = params.configuration;
|
var config = params.configuration;
|
||||||
@@ -126,18 +126,18 @@ public class Service {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public Service(BaseServiceParams params,
|
public SparkService(BaseServiceParams params,
|
||||||
ServicePartition partition,
|
ServicePartition partition,
|
||||||
List<DiscoverableService> grpcServices) throws Exception {
|
List<DiscoverableService> grpcServices) throws Exception {
|
||||||
this(params,
|
this(params,
|
||||||
Service::defaultSparkConfig,
|
SparkService::defaultSparkConfig,
|
||||||
partition,
|
partition,
|
||||||
grpcServices);
|
grpcServices);
|
||||||
}
|
}
|
||||||
|
|
||||||
public Service(BaseServiceParams params) throws Exception {
|
public SparkService(BaseServiceParams params) throws Exception {
|
||||||
this(params,
|
this(params,
|
||||||
Service::defaultSparkConfig,
|
SparkService::defaultSparkConfig,
|
||||||
ServicePartition.any(),
|
ServicePartition.any(),
|
||||||
List.of());
|
List.of());
|
||||||
}
|
}
|
@@ -0,0 +1,61 @@
|
|||||||
|
package nu.marginalia.service.server.jte;
|
||||||
|
|
||||||
|
import edu.umd.cs.findbugs.annotations.NonNull;
|
||||||
|
import edu.umd.cs.findbugs.annotations.Nullable;
|
||||||
|
import gg.jte.ContentType;
|
||||||
|
import gg.jte.TemplateEngine;
|
||||||
|
import gg.jte.resolve.DirectoryCodeResolver;
|
||||||
|
import io.jooby.*;
|
||||||
|
|
||||||
|
import java.io.File;
|
||||||
|
import java.nio.file.Path;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Objects;
|
||||||
|
import java.util.Optional;
|
||||||
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
|
// Temporary workaround for a bug
|
||||||
|
// APL-2.0 https://github.com/jooby-project/jooby
|
||||||
|
public class JteModule implements Extension {
|
||||||
|
private Path sourceDirectory;
|
||||||
|
private Path classDirectory;
|
||||||
|
private TemplateEngine templateEngine;
|
||||||
|
|
||||||
|
public JteModule(@NonNull Path sourceDirectory, @NonNull Path classDirectory) {
|
||||||
|
this.sourceDirectory = (Path)Objects.requireNonNull(sourceDirectory, "Source directory is required.");
|
||||||
|
this.classDirectory = (Path)Objects.requireNonNull(classDirectory, "Class directory is required.");
|
||||||
|
}
|
||||||
|
|
||||||
|
public JteModule(@NonNull Path sourceDirectory) {
|
||||||
|
this.sourceDirectory = (Path)Objects.requireNonNull(sourceDirectory, "Source directory is required.");
|
||||||
|
}
|
||||||
|
|
||||||
|
public JteModule(@NonNull TemplateEngine templateEngine) {
|
||||||
|
this.templateEngine = (TemplateEngine)Objects.requireNonNull(templateEngine, "Template engine is required.");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void install(@NonNull Jooby application) {
|
||||||
|
if (this.templateEngine == null) {
|
||||||
|
this.templateEngine = create(application.getEnvironment(), this.sourceDirectory, this.classDirectory);
|
||||||
|
}
|
||||||
|
|
||||||
|
ServiceRegistry services = application.getServices();
|
||||||
|
services.put(TemplateEngine.class, this.templateEngine);
|
||||||
|
application.encoder(MediaType.html, new JteTemplateEngine(this.templateEngine));
|
||||||
|
}
|
||||||
|
|
||||||
|
public static TemplateEngine create(@NonNull Environment environment, @NonNull Path sourceDirectory, @Nullable Path classDirectory) {
|
||||||
|
boolean dev = environment.isActive("dev", new String[]{"test"});
|
||||||
|
if (dev) {
|
||||||
|
Objects.requireNonNull(sourceDirectory, "Source directory is required.");
|
||||||
|
Path requiredClassDirectory = (Path)Optional.ofNullable(classDirectory).orElseGet(() -> sourceDirectory.resolve("jte-classes"));
|
||||||
|
TemplateEngine engine = TemplateEngine.create(new DirectoryCodeResolver(sourceDirectory), requiredClassDirectory, ContentType.Html, environment.getClassLoader());
|
||||||
|
Optional<List<String>> var10000 = Optional.ofNullable(System.getProperty("jooby.run.classpath")).map((it) -> it.split(File.pathSeparator)).map(Stream::of).map(Stream::toList);
|
||||||
|
Objects.requireNonNull(engine);
|
||||||
|
var10000.ifPresent(engine::setClassPath);
|
||||||
|
return engine;
|
||||||
|
} else {
|
||||||
|
return classDirectory == null ? TemplateEngine.createPrecompiled(ContentType.Html) : TemplateEngine.createPrecompiled(classDirectory, ContentType.Html);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@@ -0,0 +1,48 @@
|
|||||||
|
package nu.marginalia.service.server.jte;
|
||||||
|
|
||||||
|
import edu.umd.cs.findbugs.annotations.NonNull;
|
||||||
|
import gg.jte.TemplateEngine;
|
||||||
|
import io.jooby.Context;
|
||||||
|
import io.jooby.MapModelAndView;
|
||||||
|
import io.jooby.ModelAndView;
|
||||||
|
import io.jooby.buffer.DataBuffer;
|
||||||
|
import io.jooby.internal.jte.DataBufferOutput;
|
||||||
|
|
||||||
|
import java.nio.charset.StandardCharsets;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
// Temporary workaround for a bug
|
||||||
|
// APL-2.0 https://github.com/jooby-project/jooby
|
||||||
|
class JteTemplateEngine implements io.jooby.TemplateEngine {
|
||||||
|
private final TemplateEngine jte;
|
||||||
|
private final List<String> extensions;
|
||||||
|
|
||||||
|
public JteTemplateEngine(TemplateEngine jte) {
|
||||||
|
this.jte = jte;
|
||||||
|
this.extensions = List.of(".jte", ".kte");
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@NonNull @Override
|
||||||
|
public List<String> extensions() {
|
||||||
|
return extensions;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public DataBuffer render(Context ctx, ModelAndView modelAndView) {
|
||||||
|
var buffer = ctx.getBufferFactory().allocateBuffer();
|
||||||
|
var output = new DataBufferOutput(buffer, StandardCharsets.UTF_8);
|
||||||
|
var attributes = ctx.getAttributes();
|
||||||
|
if (modelAndView instanceof MapModelAndView mapModelAndView) {
|
||||||
|
var mapModel = new HashMap<String, Object>();
|
||||||
|
mapModel.putAll(attributes);
|
||||||
|
mapModel.putAll(mapModelAndView.getModel());
|
||||||
|
jte.render(modelAndView.getView(), mapModel, output);
|
||||||
|
} else {
|
||||||
|
jte.render(modelAndView.getView(), modelAndView.getModel(), output);
|
||||||
|
}
|
||||||
|
|
||||||
|
return buffer;
|
||||||
|
}
|
||||||
|
}
|
@@ -3,7 +3,6 @@ package nu.marginalia.service.server.mq;
|
|||||||
import nu.marginalia.mq.MqMessage;
|
import nu.marginalia.mq.MqMessage;
|
||||||
import nu.marginalia.mq.inbox.MqInboxResponse;
|
import nu.marginalia.mq.inbox.MqInboxResponse;
|
||||||
import nu.marginalia.mq.inbox.MqSubscription;
|
import nu.marginalia.mq.inbox.MqSubscription;
|
||||||
import nu.marginalia.service.server.Service;
|
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
@@ -15,10 +14,10 @@ import java.util.Map;
|
|||||||
public class ServiceMqSubscription implements MqSubscription {
|
public class ServiceMqSubscription implements MqSubscription {
|
||||||
private static final Logger logger = LoggerFactory.getLogger(ServiceMqSubscription.class);
|
private static final Logger logger = LoggerFactory.getLogger(ServiceMqSubscription.class);
|
||||||
private final Map<String, Method> requests = new HashMap<>();
|
private final Map<String, Method> requests = new HashMap<>();
|
||||||
private final Service service;
|
private final Object service;
|
||||||
|
|
||||||
|
|
||||||
public ServiceMqSubscription(Service service) {
|
public ServiceMqSubscription(Object service) {
|
||||||
this.service = service;
|
this.service = service;
|
||||||
|
|
||||||
/* Wire up all methods annotated with @MqRequest and @MqNotification
|
/* Wire up all methods annotated with @MqRequest and @MqNotification
|
||||||
|
@@ -3,8 +3,16 @@
|
|||||||
<Console name="Console" target="SYSTEM_OUT">
|
<Console name="Console" target="SYSTEM_OUT">
|
||||||
<PatternLayout pattern="%d{HH:mm:ss,SSS} %style{%-8markerSimpleName}{FG_Cyan} %highlight{%-5level}{FATAL=red, ERROR=red, WARN=yellow} %-24t %-20c{1} -- %msg%n"/>
|
<PatternLayout pattern="%d{HH:mm:ss,SSS} %style{%-8markerSimpleName}{FG_Cyan} %highlight{%-5level}{FATAL=red, ERROR=red, WARN=yellow} %-24t %-20c{1} -- %msg%n"/>
|
||||||
<Filters>
|
<Filters>
|
||||||
|
<MarkerFilter marker="PROCESS" onMatch="DENY" onMismatch="NEUTRAL" />
|
||||||
<MarkerFilter marker="QUERY" onMatch="DENY" onMismatch="NEUTRAL" />
|
<MarkerFilter marker="QUERY" onMatch="DENY" onMismatch="NEUTRAL" />
|
||||||
<MarkerFilter marker="HTTP" onMatch="DENY" onMismatch="NEUTRAL" />
|
<MarkerFilter marker="HTTP" onMatch="DENY" onMismatch="NEUTRAL" />
|
||||||
|
<MarkerFilter marker="CRAWLER" onMatch="DENY" onMismatch="NEUTRAL" />
|
||||||
|
</Filters>
|
||||||
|
</Console>
|
||||||
|
<Console name="ProcessConsole" target="SYSTEM_OUT">
|
||||||
|
<PatternLayout pattern="%style{P}{FG_Cyan} %msg%n"/>
|
||||||
|
<Filters>
|
||||||
|
<MarkerFilter marker="PROCESS" onMatch="ALLOW" onMismatch="DENY" />
|
||||||
</Filters>
|
</Filters>
|
||||||
</Console>
|
</Console>
|
||||||
<RollingFile name="LogToFile" fileName="${env:WMSA_LOG_DIR:-/var/log/wmsa}/wmsa-${sys:service-name}-${env:WMSA_SERVICE_NODE:-0}.log" filePattern="/var/log/wmsa/wmsa-${sys:service-name}-${env:WMSA_SERVICE_NODE:-0}-log-%d{MM-dd-yy-HH-mm-ss}-%i.log.gz"
|
<RollingFile name="LogToFile" fileName="${env:WMSA_LOG_DIR:-/var/log/wmsa}/wmsa-${sys:service-name}-${env:WMSA_SERVICE_NODE:-0}.log" filePattern="/var/log/wmsa/wmsa-${sys:service-name}-${env:WMSA_SERVICE_NODE:-0}-log-%d{MM-dd-yy-HH-mm-ss}-%i.log.gz"
|
||||||
@@ -13,15 +21,29 @@
|
|||||||
<Filters>
|
<Filters>
|
||||||
<MarkerFilter marker="QUERY" onMatch="DENY" onMismatch="NEUTRAL" />
|
<MarkerFilter marker="QUERY" onMatch="DENY" onMismatch="NEUTRAL" />
|
||||||
<MarkerFilter marker="HTTP" onMatch="DENY" onMismatch="NEUTRAL" />
|
<MarkerFilter marker="HTTP" onMatch="DENY" onMismatch="NEUTRAL" />
|
||||||
|
<MarkerFilter marker="CRAWLER" onMatch="DENY" onMismatch="NEUTRAL" />
|
||||||
|
<MarkerFilter marker="PROCESS" onMatch="DENY" onMismatch="NEUTRAL" />
|
||||||
</Filters>
|
</Filters>
|
||||||
<SizeBasedTriggeringPolicy size="10MB" />
|
<SizeBasedTriggeringPolicy size="10MB" />
|
||||||
</RollingFile>
|
</RollingFile>
|
||||||
|
<RollingFile name="LogToFile" fileName="${env:WMSA_LOG_DIR:-/var/log/wmsa}/crawler-audit-${env:WMSA_SERVICE_NODE:-0}.log" filePattern="/var/log/wmsa/crawler-audit-${env:WMSA_SERVICE_NODE:-0}-log-%d{MM-dd-yy-HH-mm-ss}-%i.log.gz"
|
||||||
|
ignoreExceptions="false">
|
||||||
|
<PatternLayout>
|
||||||
|
<Pattern>%d{yyyy-MM-dd HH:mm:ss,SSS}: %msg{nolookups}%n</Pattern>
|
||||||
|
</PatternLayout>
|
||||||
|
<SizeBasedTriggeringPolicy size="100MB" />
|
||||||
|
<Filters>
|
||||||
|
<MarkerFilter marker="CRAWLER" onMatch="ALLOW" onMismatch="DENY" />
|
||||||
|
</Filters>
|
||||||
|
</RollingFile>
|
||||||
</Appenders>
|
</Appenders>
|
||||||
<Loggers>
|
<Loggers>
|
||||||
<Logger name="org.apache.zookeeper" level="WARN" />
|
<Logger name="org.apache.zookeeper" level="WARN" />
|
||||||
|
<Logger name="org.apache.pdfbox" level="ERROR" />
|
||||||
|
<Logger name="org.apache.fontbox.ttf" level="ERROR" />
|
||||||
<Root level="info">
|
<Root level="info">
|
||||||
<AppenderRef ref="Console"/>
|
<AppenderRef ref="Console"/>
|
||||||
|
<AppenderRef ref="ProcessConsole"/>
|
||||||
<AppenderRef ref="LogToFile"/>
|
<AppenderRef ref="LogToFile"/>
|
||||||
</Root>
|
</Root>
|
||||||
</Loggers>
|
</Loggers>
|
||||||
|
@@ -1,10 +1,49 @@
|
|||||||
<Configuration xmlns="http://logging.apache.org/log4j/2.0/config" >
|
<Configuration xmlns="http://logging.apache.org/log4j/2.0/config" >
|
||||||
<Appenders>
|
<Appenders>
|
||||||
<Console name="Console" target="SYSTEM_OUT">
|
<Console name="ConsoleInfo" target="SYSTEM_OUT">
|
||||||
<PatternLayout pattern="%d{HH:mm:ss,SSS} %style{%-8markerSimpleName}{FG_Cyan} %highlight{%-5level}{FATAL=red, ERROR=red, WARN=yellow} %-24t %-20c{1} -- %msg%n"/>
|
<PatternLayout pattern="- %d{HH:mm:ss,SSS} %-20c{1} -- %msg%n"/>
|
||||||
<Filters>
|
<Filters>
|
||||||
|
<LevelMatchFilter level="INFO" onMatch="ALLOW" onMismatch="DENY"/>
|
||||||
|
<MarkerFilter marker="PROCESS" onMatch="DENY" onMismatch="NEUTRAL" />
|
||||||
<MarkerFilter marker="QUERY" onMatch="DENY" onMismatch="NEUTRAL" />
|
<MarkerFilter marker="QUERY" onMatch="DENY" onMismatch="NEUTRAL" />
|
||||||
<MarkerFilter marker="HTTP" onMatch="DENY" onMismatch="NEUTRAL" />
|
<MarkerFilter marker="HTTP" onMatch="DENY" onMismatch="NEUTRAL" />
|
||||||
|
<MarkerFilter marker="CRAWLER" onMatch="DENY" onMismatch="NEUTRAL" />
|
||||||
|
</Filters>
|
||||||
|
</Console>
|
||||||
|
<Console name="ConsoleWarn" target="SYSTEM_OUT">
|
||||||
|
<PatternLayout pattern="⚠ %d{HH:mm:ss,SSS} %-20c{1} -- %msg%n"/>
|
||||||
|
<Filters>
|
||||||
|
<LevelMatchFilter level="WARN" onMatch="ALLOW" onMismatch="DENY"/>
|
||||||
|
<MarkerFilter marker="PROCESS" onMatch="DENY" onMismatch="NEUTRAL" />
|
||||||
|
<MarkerFilter marker="QUERY" onMatch="DENY" onMismatch="NEUTRAL" />
|
||||||
|
<MarkerFilter marker="HTTP" onMatch="DENY" onMismatch="NEUTRAL" />
|
||||||
|
<MarkerFilter marker="CRAWLER" onMatch="DENY" onMismatch="NEUTRAL" />
|
||||||
|
</Filters>
|
||||||
|
</Console>
|
||||||
|
<Console name="ConsoleError" target="SYSTEM_OUT">
|
||||||
|
<PatternLayout pattern="🔥 %d{HH:mm:ss,SSS} %-20c{1} -- %msg%n"/>
|
||||||
|
<Filters>
|
||||||
|
<LevelMatchFilter level="ERROR" onMatch="ALLOW" onMismatch="DENY"/>
|
||||||
|
<MarkerFilter marker="PROCESS" onMatch="DENY" onMismatch="NEUTRAL" />
|
||||||
|
<MarkerFilter marker="QUERY" onMatch="DENY" onMismatch="NEUTRAL" />
|
||||||
|
<MarkerFilter marker="HTTP" onMatch="DENY" onMismatch="NEUTRAL" />
|
||||||
|
<MarkerFilter marker="CRAWLER" onMatch="DENY" onMismatch="NEUTRAL" />
|
||||||
|
</Filters>
|
||||||
|
</Console>
|
||||||
|
<Console name="ConsoleFatal" target="SYSTEM_OUT">
|
||||||
|
<PatternLayout pattern="💀 %d{HH:mm:ss,SSS} %-20c{1} -- %msg%n"/>
|
||||||
|
<Filters>
|
||||||
|
<LevelMatchFilter level="FATAL" onMatch="ALLOW" onMismatch="DENY"/>
|
||||||
|
<MarkerFilter marker="PROCESS" onMatch="DENY" onMismatch="NEUTRAL" />
|
||||||
|
<MarkerFilter marker="QUERY" onMatch="DENY" onMismatch="NEUTRAL" />
|
||||||
|
<MarkerFilter marker="HTTP" onMatch="DENY" onMismatch="NEUTRAL" />
|
||||||
|
<MarkerFilter marker="CRAWLER" onMatch="DENY" onMismatch="NEUTRAL" />
|
||||||
|
</Filters>
|
||||||
|
</Console>
|
||||||
|
<Console name="ProcessConsole" target="SYSTEM_OUT">
|
||||||
|
<PatternLayout pattern="%style{%msg%n}{FG_Cyan}"/>
|
||||||
|
<Filters>
|
||||||
|
<MarkerFilter marker="PROCESS" onMatch="ALLOW" onMismatch="DENY" />
|
||||||
</Filters>
|
</Filters>
|
||||||
</Console>
|
</Console>
|
||||||
<RollingFile name="LogToFile" fileName="${env:WMSA_LOG_DIR:-/var/log/wmsa}/wmsa-${sys:service-name}-${env:WMSA_SERVICE_NODE:-0}.log" filePattern="/var/log/wmsa/wmsa-${sys:service-name}-${env:WMSA_SERVICE_NODE:-0}-log-%d{MM-dd-yy-HH-mm-ss}-%i.log.gz"
|
<RollingFile name="LogToFile" fileName="${env:WMSA_LOG_DIR:-/var/log/wmsa}/wmsa-${sys:service-name}-${env:WMSA_SERVICE_NODE:-0}.log" filePattern="/var/log/wmsa/wmsa-${sys:service-name}-${env:WMSA_SERVICE_NODE:-0}-log-%d{MM-dd-yy-HH-mm-ss}-%i.log.gz"
|
||||||
@@ -17,14 +56,30 @@
|
|||||||
<MarkerFilter marker="PROCESS" onMatch="DENY" onMismatch="NEUTRAL" />
|
<MarkerFilter marker="PROCESS" onMatch="DENY" onMismatch="NEUTRAL" />
|
||||||
<MarkerFilter marker="QUERY" onMatch="DENY" onMismatch="NEUTRAL" />
|
<MarkerFilter marker="QUERY" onMatch="DENY" onMismatch="NEUTRAL" />
|
||||||
<MarkerFilter marker="HTTP" onMatch="DENY" onMismatch="NEUTRAL" />
|
<MarkerFilter marker="HTTP" onMatch="DENY" onMismatch="NEUTRAL" />
|
||||||
|
<MarkerFilter marker="CRAWLER" onMatch="DENY" onMismatch="NEUTRAL" />
|
||||||
|
</Filters>
|
||||||
|
</RollingFile>
|
||||||
|
<RollingFile name="LogToFile" fileName="${env:WMSA_LOG_DIR:-/var/log/wmsa}/crawler-audit-${env:WMSA_SERVICE_NODE:-0}.log" filePattern="/var/log/wmsa/crawler-audit-${env:WMSA_SERVICE_NODE:-0}-log-%d{MM-dd-yy-HH-mm-ss}-%i.log.gz"
|
||||||
|
ignoreExceptions="false">
|
||||||
|
<PatternLayout>
|
||||||
|
<Pattern>%d{yyyy-MM-dd HH:mm:ss,SSS}: %msg{nolookups}%n</Pattern>
|
||||||
|
</PatternLayout>
|
||||||
|
<SizeBasedTriggeringPolicy size="100MB" />
|
||||||
|
<Filters>
|
||||||
|
<MarkerFilter marker="CRAWLER" onMatch="ALLOW" onMismatch="DENY" />
|
||||||
</Filters>
|
</Filters>
|
||||||
</RollingFile>
|
</RollingFile>
|
||||||
</Appenders>
|
</Appenders>
|
||||||
<Loggers>
|
<Loggers>
|
||||||
<Logger name="org.apache.zookeeper" level="WARN" />
|
<Logger name="org.apache.zookeeper" level="WARN" />
|
||||||
|
<Logger name="org.apache.pdfbox" level="ERROR" />
|
||||||
|
<Logger name="org.apache.fontbox.ttf" level="ERROR" />
|
||||||
<Root level="info">
|
<Root level="info">
|
||||||
<AppenderRef ref="Console"/>
|
<AppenderRef ref="ConsoleInfo"/>
|
||||||
|
<AppenderRef ref="ConsoleWarn"/>
|
||||||
|
<AppenderRef ref="ConsoleError"/>
|
||||||
|
<AppenderRef ref="ConsoleFatal"/>
|
||||||
|
<AppenderRef ref="ProcessConsole"/>
|
||||||
<AppenderRef ref="LogToFile"/>
|
<AppenderRef ref="LogToFile"/>
|
||||||
</Root>
|
</Root>
|
||||||
</Loggers>
|
</Loggers>
|
||||||
|
@@ -1,15 +1,50 @@
|
|||||||
<Configuration xmlns="http://logging.apache.org/log4j/2.0/config" >
|
<Configuration xmlns="http://logging.apache.org/log4j/2.0/config" >
|
||||||
<Appenders>
|
<Appenders>
|
||||||
<Console name="Console" target="SYSTEM_OUT">
|
<Console name="ConsoleInfo" target="SYSTEM_OUT">
|
||||||
<PatternLayout pattern="%d{HH:mm:ss,SSS} %style{%-8markerSimpleName}{FG_Cyan} %highlight{%-5level}{FATAL=red, ERROR=red, WARN=yellow} %-24t %-20c{1} -- %msg%n"/>
|
<PatternLayout pattern="- %d{HH:mm:ss,SSS} %-20c{1} -- %msg%n"/>
|
||||||
|
<Filters>
|
||||||
|
<LevelMatchFilter level="INFO" onMatch="ALLOW" onMismatch="DENY"/>
|
||||||
|
<MarkerFilter marker="PROCESS" onMatch="DENY" onMismatch="NEUTRAL" />
|
||||||
|
</Filters>
|
||||||
|
</Console>
|
||||||
|
<Console name="ConsoleWarn" target="SYSTEM_OUT">
|
||||||
|
<PatternLayout pattern="⚠ %d{HH:mm:ss,SSS} %-20c{1} -- %msg%n"/>
|
||||||
|
<Filters>
|
||||||
|
<LevelMatchFilter level="WARN" onMatch="ALLOW" onMismatch="DENY"/>
|
||||||
|
<MarkerFilter marker="PROCESS" onMatch="DENY" onMismatch="NEUTRAL" />
|
||||||
|
</Filters>
|
||||||
|
</Console>
|
||||||
|
<Console name="ConsoleError" target="SYSTEM_OUT">
|
||||||
|
<PatternLayout pattern="🔥 %d{HH:mm:ss,SSS} %-20c{1} -- %msg%n"/>
|
||||||
|
<Filters>
|
||||||
|
<LevelMatchFilter level="ERROR" onMatch="ALLOW" onMismatch="DENY"/>
|
||||||
|
<MarkerFilter marker="PROCESS" onMatch="DENY" onMismatch="NEUTRAL" />
|
||||||
|
</Filters>
|
||||||
|
</Console>
|
||||||
|
<Console name="ConsoleFatal" target="SYSTEM_OUT">
|
||||||
|
<PatternLayout pattern="💀 %d{HH:mm:ss,SSS} %-20c{1} -- %msg%n"/>
|
||||||
|
<Filters>
|
||||||
|
<LevelMatchFilter level="FATAL" onMatch="ALLOW" onMismatch="DENY"/>
|
||||||
|
<MarkerFilter marker="PROCESS" onMatch="DENY" onMismatch="NEUTRAL" />
|
||||||
|
</Filters>
|
||||||
|
</Console>
|
||||||
|
<Console name="ProcessConsole" target="SYSTEM_OUT">
|
||||||
|
<PatternLayout pattern="%style{%msg%n}{FG_Cyan}"/>
|
||||||
|
<Filters>
|
||||||
|
<MarkerFilter marker="PROCESS" onMatch="ALLOW" onMismatch="DENY" />
|
||||||
|
</Filters>
|
||||||
</Console>
|
</Console>
|
||||||
</Appenders>
|
</Appenders>
|
||||||
<Loggers>
|
<Loggers>
|
||||||
<Logger name="org.apache.zookeeper" level="WARN" />
|
<Logger name="org.apache.zookeeper" level="WARN" />
|
||||||
|
<Logger name="org.apache.pdfbox" level="ERROR" />
|
||||||
|
<Logger name="org.apache.fontbox.ttf" level="ERROR" />
|
||||||
<Root level="info">
|
<Root level="info">
|
||||||
<AppenderRef ref="Console"/>
|
<AppenderRef ref="ConsoleInfo"/>
|
||||||
<AppenderRef ref="LogToFile"/>
|
<AppenderRef ref="ConsoleWarn"/>
|
||||||
|
<AppenderRef ref="ConsoleError"/>
|
||||||
|
<AppenderRef ref="ConsoleFatal"/>
|
||||||
|
<AppenderRef ref="ProcessConsole"/>
|
||||||
</Root>
|
</Root>
|
||||||
</Loggers>
|
</Loggers>
|
||||||
</Configuration>
|
</Configuration>
|
@@ -25,7 +25,7 @@ import static org.mockito.Mockito.when;
|
|||||||
class ZkServiceRegistryTest {
|
class ZkServiceRegistryTest {
|
||||||
private static final int ZOOKEEPER_PORT = 2181;
|
private static final int ZOOKEEPER_PORT = 2181;
|
||||||
private static final GenericContainer<?> zookeeper =
|
private static final GenericContainer<?> zookeeper =
|
||||||
new GenericContainer<>("zookeeper:3.8.0")
|
new GenericContainer<>("zookeeper:3.8")
|
||||||
.withExposedPorts(ZOOKEEPER_PORT);
|
.withExposedPorts(ZOOKEEPER_PORT);
|
||||||
|
|
||||||
List<ZkServiceRegistry> registries = new ArrayList<>();
|
List<ZkServiceRegistry> registries = new ArrayList<>();
|
||||||
|
@@ -48,12 +48,13 @@ public class ExecutorExportClient {
|
|||||||
return msgId;
|
return msgId;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void exportSampleData(int node, FileStorageId fid, int size, String name) {
|
public void exportSampleData(int node, FileStorageId fid, int size, String ctFilter, String name) {
|
||||||
channelPool.call(ExecutorExportApiBlockingStub::exportSampleData)
|
channelPool.call(ExecutorExportApiBlockingStub::exportSampleData)
|
||||||
.forNode(node)
|
.forNode(node)
|
||||||
.run(RpcExportSampleData.newBuilder()
|
.run(RpcExportSampleData.newBuilder()
|
||||||
.setFileStorageId(fid.id())
|
.setFileStorageId(fid.id())
|
||||||
.setSize(size)
|
.setSize(size)
|
||||||
|
.setCtFilter(ctFilter)
|
||||||
.setName(name)
|
.setName(name)
|
||||||
.build());
|
.build());
|
||||||
}
|
}
|
||||||
|
@@ -100,6 +100,7 @@ message RpcExportSampleData {
|
|||||||
int64 fileStorageId = 1;
|
int64 fileStorageId = 1;
|
||||||
int32 size = 2;
|
int32 size = 2;
|
||||||
string name = 3;
|
string name = 3;
|
||||||
|
string ctFilter = 4;
|
||||||
}
|
}
|
||||||
message RpcDownloadSampleData {
|
message RpcDownloadSampleData {
|
||||||
string sampleSet = 1;
|
string sampleSet = 1;
|
||||||
|
@@ -20,6 +20,7 @@ public enum ExecutorActor {
|
|||||||
EXPORT_FEEDS(NodeProfile.BATCH_CRAWL, NodeProfile.MIXED),
|
EXPORT_FEEDS(NodeProfile.BATCH_CRAWL, NodeProfile.MIXED),
|
||||||
EXPORT_SAMPLE_DATA(NodeProfile.BATCH_CRAWL, NodeProfile.MIXED),
|
EXPORT_SAMPLE_DATA(NodeProfile.BATCH_CRAWL, NodeProfile.MIXED),
|
||||||
DOWNLOAD_SAMPLE(NodeProfile.BATCH_CRAWL, NodeProfile.MIXED),
|
DOWNLOAD_SAMPLE(NodeProfile.BATCH_CRAWL, NodeProfile.MIXED),
|
||||||
|
MIGRATE_CRAWL_DATA(NodeProfile.BATCH_CRAWL, NodeProfile.MIXED),
|
||||||
|
|
||||||
PROC_CONVERTER_SPAWNER(NodeProfile.BATCH_CRAWL, NodeProfile.MIXED, NodeProfile.SIDELOAD),
|
PROC_CONVERTER_SPAWNER(NodeProfile.BATCH_CRAWL, NodeProfile.MIXED, NodeProfile.SIDELOAD),
|
||||||
PROC_LOADER_SPAWNER(NodeProfile.BATCH_CRAWL, NodeProfile.MIXED, NodeProfile.SIDELOAD),
|
PROC_LOADER_SPAWNER(NodeProfile.BATCH_CRAWL, NodeProfile.MIXED, NodeProfile.SIDELOAD),
|
||||||
|
@@ -66,6 +66,7 @@ public class ExecutorActorControlService {
|
|||||||
DownloadSampleActor downloadSampleActor,
|
DownloadSampleActor downloadSampleActor,
|
||||||
ScrapeFeedsActor scrapeFeedsActor,
|
ScrapeFeedsActor scrapeFeedsActor,
|
||||||
ExecutorActorStateMachines stateMachines,
|
ExecutorActorStateMachines stateMachines,
|
||||||
|
MigrateCrawlDataActor migrateCrawlDataActor,
|
||||||
ExportAllPrecessionActor exportAllPrecessionActor,
|
ExportAllPrecessionActor exportAllPrecessionActor,
|
||||||
UpdateRssActor updateRssActor) throws SQLException {
|
UpdateRssActor updateRssActor) throws SQLException {
|
||||||
this.messageQueueFactory = messageQueueFactory;
|
this.messageQueueFactory = messageQueueFactory;
|
||||||
@@ -107,6 +108,8 @@ public class ExecutorActorControlService {
|
|||||||
register(ExecutorActor.SCRAPE_FEEDS, scrapeFeedsActor);
|
register(ExecutorActor.SCRAPE_FEEDS, scrapeFeedsActor);
|
||||||
register(ExecutorActor.UPDATE_RSS, updateRssActor);
|
register(ExecutorActor.UPDATE_RSS, updateRssActor);
|
||||||
|
|
||||||
|
register(ExecutorActor.MIGRATE_CRAWL_DATA, migrateCrawlDataActor);
|
||||||
|
|
||||||
if (serviceConfiguration.node() == 1) {
|
if (serviceConfiguration.node() == 1) {
|
||||||
register(ExecutorActor.PREC_EXPORT_ALL, exportAllPrecessionActor);
|
register(ExecutorActor.PREC_EXPORT_ALL, exportAllPrecessionActor);
|
||||||
}
|
}
|
||||||
|
@@ -14,6 +14,8 @@ import nu.marginalia.mq.persistence.MqPersistence;
|
|||||||
import nu.marginalia.nodecfg.NodeConfigurationService;
|
import nu.marginalia.nodecfg.NodeConfigurationService;
|
||||||
import nu.marginalia.nodecfg.model.NodeProfile;
|
import nu.marginalia.nodecfg.model.NodeProfile;
|
||||||
import nu.marginalia.service.module.ServiceConfiguration;
|
import nu.marginalia.service.module.ServiceConfiguration;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import java.time.Duration;
|
import java.time.Duration;
|
||||||
import java.time.LocalDateTime;
|
import java.time.LocalDateTime;
|
||||||
@@ -29,6 +31,7 @@ public class UpdateRssActor extends RecordActorPrototype {
|
|||||||
|
|
||||||
private final NodeConfigurationService nodeConfigurationService;
|
private final NodeConfigurationService nodeConfigurationService;
|
||||||
private final MqPersistence persistence;
|
private final MqPersistence persistence;
|
||||||
|
private static final Logger logger = LoggerFactory.getLogger(UpdateRssActor.class);
|
||||||
|
|
||||||
@Inject
|
@Inject
|
||||||
public UpdateRssActor(Gson gson,
|
public UpdateRssActor(Gson gson,
|
||||||
@@ -101,8 +104,8 @@ public class UpdateRssActor extends RecordActorPrototype {
|
|||||||
case UpdateRefresh(int count, long msgId) -> {
|
case UpdateRefresh(int count, long msgId) -> {
|
||||||
MqMessage msg = persistence.waitForMessageTerminalState(msgId, Duration.ofSeconds(10), Duration.ofHours(12));
|
MqMessage msg = persistence.waitForMessageTerminalState(msgId, Duration.ofSeconds(10), Duration.ofHours(12));
|
||||||
if (msg == null) {
|
if (msg == null) {
|
||||||
// Retry the update
|
logger.warn("UpdateRefresh is taking a very long time");
|
||||||
yield new Error("Failed to update feeds: message not found");
|
yield new UpdateRefresh(count, msgId);
|
||||||
} else if (msg.state() != MqMessageState.OK) {
|
} else if (msg.state() != MqMessageState.OK) {
|
||||||
// Retry the update
|
// Retry the update
|
||||||
yield new Error("Failed to update feeds: " + msg.state());
|
yield new Error("Failed to update feeds: " + msg.state());
|
||||||
@@ -119,8 +122,8 @@ public class UpdateRssActor extends RecordActorPrototype {
|
|||||||
case UpdateClean(long msgId) -> {
|
case UpdateClean(long msgId) -> {
|
||||||
MqMessage msg = persistence.waitForMessageTerminalState(msgId, Duration.ofSeconds(10), Duration.ofHours(12));
|
MqMessage msg = persistence.waitForMessageTerminalState(msgId, Duration.ofSeconds(10), Duration.ofHours(12));
|
||||||
if (msg == null) {
|
if (msg == null) {
|
||||||
// Retry the update
|
logger.warn("UpdateClean is taking a very long time");
|
||||||
yield new Error("Failed to update feeds: message not found");
|
yield new UpdateClean(msgId);
|
||||||
} else if (msg.state() != MqMessageState.OK) {
|
} else if (msg.state() != MqMessageState.OK) {
|
||||||
// Retry the update
|
// Retry the update
|
||||||
yield new Error("Failed to update feeds: " + msg.state());
|
yield new Error("Failed to update feeds: " + msg.state());
|
||||||
|
@@ -8,6 +8,7 @@ import nu.marginalia.actor.state.ActorResumeBehavior;
|
|||||||
import nu.marginalia.actor.state.ActorStep;
|
import nu.marginalia.actor.state.ActorStep;
|
||||||
import nu.marginalia.actor.state.Resume;
|
import nu.marginalia.actor.state.Resume;
|
||||||
import nu.marginalia.service.control.ServiceEventLog;
|
import nu.marginalia.service.control.ServiceEventLog;
|
||||||
|
import nu.marginalia.service.control.ServiceHeartbeat;
|
||||||
import nu.marginalia.storage.FileStorageService;
|
import nu.marginalia.storage.FileStorageService;
|
||||||
import nu.marginalia.storage.model.FileStorage;
|
import nu.marginalia.storage.model.FileStorage;
|
||||||
import nu.marginalia.storage.model.FileStorageId;
|
import nu.marginalia.storage.model.FileStorageId;
|
||||||
@@ -19,6 +20,7 @@ import org.slf4j.Logger;
|
|||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import java.io.*;
|
import java.io.*;
|
||||||
|
import java.net.HttpURLConnection;
|
||||||
import java.net.MalformedURLException;
|
import java.net.MalformedURLException;
|
||||||
import java.net.URI;
|
import java.net.URI;
|
||||||
import java.net.URL;
|
import java.net.URL;
|
||||||
@@ -32,6 +34,7 @@ public class DownloadSampleActor extends RecordActorPrototype {
|
|||||||
|
|
||||||
private final FileStorageService storageService;
|
private final FileStorageService storageService;
|
||||||
private final ServiceEventLog eventLog;
|
private final ServiceEventLog eventLog;
|
||||||
|
private final ServiceHeartbeat heartbeat;
|
||||||
private final Logger logger = LoggerFactory.getLogger(getClass());
|
private final Logger logger = LoggerFactory.getLogger(getClass());
|
||||||
|
|
||||||
@Resume(behavior = ActorResumeBehavior.ERROR)
|
@Resume(behavior = ActorResumeBehavior.ERROR)
|
||||||
@@ -66,15 +69,39 @@ public class DownloadSampleActor extends RecordActorPrototype {
|
|||||||
|
|
||||||
Files.deleteIfExists(Path.of(tarFileName));
|
Files.deleteIfExists(Path.of(tarFileName));
|
||||||
|
|
||||||
try (var is = new BufferedInputStream(new URI(downloadURI).toURL().openStream());
|
HttpURLConnection urlConnection = (HttpURLConnection) new URI(downloadURI).toURL().openConnection();
|
||||||
var os = new BufferedOutputStream(Files.newOutputStream(Path.of(tarFileName), StandardOpenOption.CREATE))) {
|
|
||||||
is.transferTo(os);
|
try (var hb = heartbeat.createServiceAdHocTaskHeartbeat("Downloading sample")) {
|
||||||
|
long size = urlConnection.getContentLengthLong();
|
||||||
|
byte[] buffer = new byte[8192];
|
||||||
|
|
||||||
|
try (var is = new BufferedInputStream(urlConnection.getInputStream());
|
||||||
|
var os = new BufferedOutputStream(Files.newOutputStream(Path.of(tarFileName), StandardOpenOption.CREATE))) {
|
||||||
|
long copiedSize = 0;
|
||||||
|
|
||||||
|
while (copiedSize < size) {
|
||||||
|
int read = is.read(buffer);
|
||||||
|
|
||||||
|
if (read < 0) // We've been promised a file of length 'size'
|
||||||
|
throw new IOException("Unexpected end of stream");
|
||||||
|
|
||||||
|
os.write(buffer, 0, read);
|
||||||
|
copiedSize += read;
|
||||||
|
|
||||||
|
// Update progress bar
|
||||||
|
hb.progress(String.format("%d MB", copiedSize / 1024 / 1024), (int) (copiedSize / 1024), (int) (size / 1024));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
catch (Exception ex) {
|
catch (Exception ex) {
|
||||||
eventLog.logEvent(DownloadSampleActor.class, "Error downloading sample");
|
eventLog.logEvent(DownloadSampleActor.class, "Error downloading sample");
|
||||||
logger.error("Error downloading sample", ex);
|
logger.error("Error downloading sample", ex);
|
||||||
yield new Error();
|
yield new Error();
|
||||||
}
|
}
|
||||||
|
finally {
|
||||||
|
urlConnection.disconnect();
|
||||||
|
}
|
||||||
|
|
||||||
eventLog.logEvent(DownloadSampleActor.class, "Download complete");
|
eventLog.logEvent(DownloadSampleActor.class, "Download complete");
|
||||||
yield new Extract(fileStorageId, tarFileName);
|
yield new Extract(fileStorageId, tarFileName);
|
||||||
@@ -170,11 +197,12 @@ public class DownloadSampleActor extends RecordActorPrototype {
|
|||||||
@Inject
|
@Inject
|
||||||
public DownloadSampleActor(Gson gson,
|
public DownloadSampleActor(Gson gson,
|
||||||
FileStorageService storageService,
|
FileStorageService storageService,
|
||||||
ServiceEventLog eventLog)
|
ServiceEventLog eventLog, ServiceHeartbeat heartbeat)
|
||||||
{
|
{
|
||||||
super(gson);
|
super(gson);
|
||||||
this.storageService = storageService;
|
this.storageService = storageService;
|
||||||
this.eventLog = eventLog;
|
this.eventLog = eventLog;
|
||||||
|
this.heartbeat = heartbeat;
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@@ -26,32 +26,32 @@ public class ExportSampleDataActor extends RecordActorPrototype {
|
|||||||
private final MqOutbox exportTasksOutbox;
|
private final MqOutbox exportTasksOutbox;
|
||||||
private final Logger logger = LoggerFactory.getLogger(getClass());
|
private final Logger logger = LoggerFactory.getLogger(getClass());
|
||||||
|
|
||||||
public record Export(FileStorageId crawlId, int size, String name) implements ActorStep {}
|
public record Export(FileStorageId crawlId, int size, String ctFilter, String name) implements ActorStep {}
|
||||||
public record Run(FileStorageId crawlId, FileStorageId destId, int size, String name, long msgId) implements ActorStep {
|
public record Run(FileStorageId crawlId, FileStorageId destId, int size, String ctFilter, String name, long msgId) implements ActorStep {
|
||||||
public Run(FileStorageId crawlId, FileStorageId destId, int size, String name) {
|
public Run(FileStorageId crawlId, FileStorageId destId, int size, String name, String ctFilter) {
|
||||||
this(crawlId, destId, size, name, -1);
|
this(crawlId, destId, size, name, ctFilter,-1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ActorStep transition(ActorStep self) throws Exception {
|
public ActorStep transition(ActorStep self) throws Exception {
|
||||||
return switch(self) {
|
return switch(self) {
|
||||||
case Export(FileStorageId crawlId, int size, String name) -> {
|
case Export(FileStorageId crawlId, int size, String ctFilter, String name) -> {
|
||||||
var storage = storageService.allocateStorage(FileStorageType.EXPORT,
|
var storage = storageService.allocateStorage(FileStorageType.EXPORT,
|
||||||
"crawl-sample-export",
|
"crawl-sample-export",
|
||||||
"Crawl Data Sample " + name + "/" + size + " " + LocalDateTime.now()
|
"Crawl Data Sample " + name + "/" + size + " " + LocalDateTime.now()
|
||||||
);
|
);
|
||||||
|
|
||||||
if (storage == null) yield new Error("Bad storage id");
|
if (storage == null) yield new Error("Bad storage id");
|
||||||
yield new Run(crawlId, storage.id(), size, name);
|
yield new Run(crawlId, storage.id(), size, ctFilter, name);
|
||||||
}
|
}
|
||||||
case Run(FileStorageId crawlId, FileStorageId destId, int size, String name, long msgId) when msgId < 0 -> {
|
case Run(FileStorageId crawlId, FileStorageId destId, int size, String ctFilter, String name, long msgId) when msgId < 0 -> {
|
||||||
storageService.setFileStorageState(destId, FileStorageState.NEW);
|
storageService.setFileStorageState(destId, FileStorageState.NEW);
|
||||||
|
|
||||||
long newMsgId = exportTasksOutbox.sendAsync(ExportTaskRequest.sampleData(crawlId, destId, size, name));
|
long newMsgId = exportTasksOutbox.sendAsync(ExportTaskRequest.sampleData(crawlId, destId, ctFilter, size, name));
|
||||||
yield new Run(crawlId, destId, size, name, newMsgId);
|
yield new Run(crawlId, destId, size, ctFilter, name, newMsgId);
|
||||||
}
|
}
|
||||||
case Run(_, FileStorageId destId, _, _, long msgId) -> {
|
case Run(_, FileStorageId destId, _, _, _, long msgId) -> {
|
||||||
var rsp = processWatcher.waitResponse(exportTasksOutbox, ProcessService.ProcessId.EXPORT_TASKS, msgId);
|
var rsp = processWatcher.waitResponse(exportTasksOutbox, ProcessService.ProcessId.EXPORT_TASKS, msgId);
|
||||||
|
|
||||||
if (rsp.state() != MqMessageState.OK) {
|
if (rsp.state() != MqMessageState.OK) {
|
||||||
@@ -70,7 +70,7 @@ public class ExportSampleDataActor extends RecordActorPrototype {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String describe() {
|
public String describe() {
|
||||||
return "Export RSS/Atom feeds from crawl data";
|
return "Export sample crawl data";
|
||||||
}
|
}
|
||||||
|
|
||||||
@Inject
|
@Inject
|
||||||
|
@@ -0,0 +1,150 @@
|
|||||||
|
package nu.marginalia.actor.task;
|
||||||
|
|
||||||
|
import com.google.gson.Gson;
|
||||||
|
import jakarta.inject.Inject;
|
||||||
|
import jakarta.inject.Singleton;
|
||||||
|
import nu.marginalia.actor.prototype.RecordActorPrototype;
|
||||||
|
import nu.marginalia.actor.state.ActorStep;
|
||||||
|
import nu.marginalia.io.CrawlerOutputFile;
|
||||||
|
import nu.marginalia.process.log.WorkLog;
|
||||||
|
import nu.marginalia.process.log.WorkLogEntry;
|
||||||
|
import nu.marginalia.service.control.ServiceHeartbeat;
|
||||||
|
import nu.marginalia.slop.SlopCrawlDataRecord;
|
||||||
|
import nu.marginalia.storage.FileStorageService;
|
||||||
|
import nu.marginalia.storage.model.FileStorage;
|
||||||
|
import nu.marginalia.storage.model.FileStorageId;
|
||||||
|
import org.apache.logging.log4j.util.Strings;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import java.nio.file.Files;
|
||||||
|
import java.nio.file.Path;
|
||||||
|
import java.nio.file.StandardCopyOption;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Optional;
|
||||||
|
import java.util.function.Function;
|
||||||
|
|
||||||
|
@Singleton
|
||||||
|
public class MigrateCrawlDataActor extends RecordActorPrototype {
|
||||||
|
|
||||||
|
private final FileStorageService fileStorageService;
|
||||||
|
private final ServiceHeartbeat serviceHeartbeat;
|
||||||
|
private static final Logger logger = LoggerFactory.getLogger(MigrateCrawlDataActor.class);
|
||||||
|
|
||||||
|
@Inject
|
||||||
|
public MigrateCrawlDataActor(Gson gson, FileStorageService fileStorageService, ServiceHeartbeat serviceHeartbeat) {
|
||||||
|
super(gson);
|
||||||
|
|
||||||
|
this.fileStorageService = fileStorageService;
|
||||||
|
this.serviceHeartbeat = serviceHeartbeat;
|
||||||
|
}
|
||||||
|
|
||||||
|
public record Run(long fileStorageId) implements ActorStep {}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ActorStep transition(ActorStep self) throws Exception {
|
||||||
|
return switch (self) {
|
||||||
|
case Run(long fileStorageId) -> {
|
||||||
|
|
||||||
|
FileStorage storage = fileStorageService.getStorage(FileStorageId.of(fileStorageId));
|
||||||
|
Path root = storage.asPath();
|
||||||
|
|
||||||
|
Path crawlerLog = root.resolve("crawler.log");
|
||||||
|
Path newCrawlerLog = Files.createTempFile(root, "crawler", ".migrate.log");
|
||||||
|
|
||||||
|
int totalEntries = WorkLog.countEntries(crawlerLog);
|
||||||
|
|
||||||
|
try (WorkLog workLog = new WorkLog(newCrawlerLog);
|
||||||
|
var heartbeat = serviceHeartbeat.createServiceAdHocTaskHeartbeat("Migrating")
|
||||||
|
) {
|
||||||
|
int entryIdx = 0;
|
||||||
|
|
||||||
|
for (Map.Entry<WorkLogEntry, Path> item : WorkLog.iterableMap(crawlerLog, new CrawlDataLocator(root))) {
|
||||||
|
|
||||||
|
final WorkLogEntry entry = item.getKey();
|
||||||
|
final Path inputPath = item.getValue();
|
||||||
|
|
||||||
|
Path outputPath = inputPath;
|
||||||
|
heartbeat.progress("Migrating" + inputPath.getFileName(), entryIdx++, totalEntries);
|
||||||
|
|
||||||
|
if (inputPath.toString().endsWith(".parquet")) {
|
||||||
|
String domain = entry.id();
|
||||||
|
String id = Integer.toHexString(domain.hashCode());
|
||||||
|
|
||||||
|
outputPath = CrawlerOutputFile.createSlopPath(root, id, domain);
|
||||||
|
|
||||||
|
if (Files.exists(inputPath)) {
|
||||||
|
try {
|
||||||
|
SlopCrawlDataRecord.convertFromParquet(inputPath, outputPath);
|
||||||
|
Files.deleteIfExists(inputPath);
|
||||||
|
} catch (Exception ex) {
|
||||||
|
outputPath = inputPath; // don't update the work log on error
|
||||||
|
logger.error("Failed to convert " + inputPath, ex);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if (!Files.exists(inputPath) && !Files.exists(outputPath)) {
|
||||||
|
// if the input file is missing, and the output file is missing, we just write the log
|
||||||
|
// record identical to the old one
|
||||||
|
outputPath = inputPath;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write a log entry for the (possibly) converted file
|
||||||
|
workLog.setJobToFinished(entry.id(), outputPath.toString(), entry.cnt());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Path oldCrawlerLog = Files.createTempFile(root, "crawler-", ".migrate.old.log");
|
||||||
|
Files.move(crawlerLog, oldCrawlerLog, StandardCopyOption.REPLACE_EXISTING);
|
||||||
|
Files.move(newCrawlerLog, crawlerLog);
|
||||||
|
|
||||||
|
yield new End();
|
||||||
|
}
|
||||||
|
default -> new Error();
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private static class CrawlDataLocator implements Function<WorkLogEntry, Optional<Map.Entry<WorkLogEntry, Path>>> {
|
||||||
|
|
||||||
|
private final Path crawlRootDir;
|
||||||
|
|
||||||
|
CrawlDataLocator(Path crawlRootDir) {
|
||||||
|
this.crawlRootDir = crawlRootDir;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Optional<Map.Entry<WorkLogEntry, Path>> apply(WorkLogEntry entry) {
|
||||||
|
var path = getCrawledFilePath(crawlRootDir, entry.path());
|
||||||
|
|
||||||
|
if (!Files.exists(path)) {
|
||||||
|
return Optional.empty();
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
return Optional.of(Map.entry(entry, path));
|
||||||
|
}
|
||||||
|
catch (Exception ex) {
|
||||||
|
return Optional.empty();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private Path getCrawledFilePath(Path crawlDir, String fileName) {
|
||||||
|
int sp = fileName.lastIndexOf('/');
|
||||||
|
|
||||||
|
// Normalize the filename
|
||||||
|
if (sp >= 0 && sp + 1< fileName.length())
|
||||||
|
fileName = fileName.substring(sp + 1);
|
||||||
|
if (fileName.length() < 4)
|
||||||
|
fileName = Strings.repeat("0", 4 - fileName.length()) + fileName;
|
||||||
|
|
||||||
|
String sp1 = fileName.substring(0, 2);
|
||||||
|
String sp2 = fileName.substring(2, 4);
|
||||||
|
return crawlDir.resolve(sp1).resolve(sp2).resolve(fileName);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String describe() {
|
||||||
|
return "Migrates crawl data to the latest format";
|
||||||
|
}
|
||||||
|
}
|
@@ -49,6 +49,7 @@ public class ExecutorExportGrpcService
|
|||||||
new ExportSampleDataActor.Export(
|
new ExportSampleDataActor.Export(
|
||||||
FileStorageId.of(request.getFileStorageId()),
|
FileStorageId.of(request.getFileStorageId()),
|
||||||
request.getSize(),
|
request.getSize(),
|
||||||
|
request.getCtFilter(),
|
||||||
request.getName()
|
request.getName()
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
@@ -6,4 +6,8 @@ public record BrowseResultSet(Collection<BrowseResult> results, String focusDoma
|
|||||||
public BrowseResultSet(Collection<BrowseResult> results) {
|
public BrowseResultSet(Collection<BrowseResult> results) {
|
||||||
this(results, "");
|
this(results, "");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public boolean hasFocusDomain() {
|
||||||
|
return focusDomain != null && !focusDomain.isBlank();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@@ -38,6 +38,7 @@ public class DomainsProtobufCodec {
|
|||||||
sd.getIndexed(),
|
sd.getIndexed(),
|
||||||
sd.getActive(),
|
sd.getActive(),
|
||||||
sd.getScreenshot(),
|
sd.getScreenshot(),
|
||||||
|
sd.getFeed(),
|
||||||
SimilarDomain.LinkType.valueOf(sd.getLinkType().name())
|
SimilarDomain.LinkType.valueOf(sd.getLinkType().name())
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@@ -71,6 +71,23 @@ public class DomainInformation {
|
|||||||
return new String(Character.toChars(firstChar)) + new String(Character.toChars(secondChar));
|
return new String(Character.toChars(firstChar)) + new String(Character.toChars(secondChar));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public String getAsnFlag() {
|
||||||
|
if (asnCountry == null || asnCountry.codePointCount(0, asnCountry.length()) != 2) {
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
String country = asnCountry;
|
||||||
|
|
||||||
|
if ("UK".equals(country)) {
|
||||||
|
country = "GB";
|
||||||
|
}
|
||||||
|
|
||||||
|
int offset = 0x1F1E6;
|
||||||
|
int asciiOffset = 0x41;
|
||||||
|
int firstChar = Character.codePointAt(country, 0) - asciiOffset + offset;
|
||||||
|
int secondChar = Character.codePointAt(country, 1) - asciiOffset + offset;
|
||||||
|
return new String(Character.toChars(firstChar)) + new String(Character.toChars(secondChar));
|
||||||
|
}
|
||||||
|
|
||||||
public EdgeDomain getDomain() {
|
public EdgeDomain getDomain() {
|
||||||
return this.domain;
|
return this.domain;
|
||||||
}
|
}
|
||||||
|
@@ -9,6 +9,7 @@ public record SimilarDomain(EdgeUrl url,
|
|||||||
boolean indexed,
|
boolean indexed,
|
||||||
boolean active,
|
boolean active,
|
||||||
boolean screenshot,
|
boolean screenshot,
|
||||||
|
boolean feed,
|
||||||
LinkType linkType) {
|
LinkType linkType) {
|
||||||
|
|
||||||
public String getRankSymbols() {
|
public String getRankSymbols() {
|
||||||
@@ -52,12 +53,12 @@ public record SimilarDomain(EdgeUrl url,
|
|||||||
return NONE;
|
return NONE;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String toString() {
|
public String faIcon() {
|
||||||
return switch (this) {
|
return switch (this) {
|
||||||
case FOWARD -> "→";
|
case FOWARD -> "fa-solid fa-arrow-right";
|
||||||
case BACKWARD -> "←";
|
case BACKWARD -> "fa-solid fa-arrow-left";
|
||||||
case BIDIRECTIONAL -> "⇆";
|
case BIDIRECTIONAL -> "fa-solid fa-arrow-right-arrow-left";
|
||||||
case NONE -> "-";
|
case NONE -> "";
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -101,6 +101,7 @@ message RpcSimilarDomain {
|
|||||||
bool active = 6;
|
bool active = 6;
|
||||||
bool screenshot = 7;
|
bool screenshot = 7;
|
||||||
LINK_TYPE linkType = 8;
|
LINK_TYPE linkType = 8;
|
||||||
|
bool feed = 9;
|
||||||
|
|
||||||
enum LINK_TYPE {
|
enum LINK_TYPE {
|
||||||
BACKWARD = 0;
|
BACKWARD = 0;
|
||||||
|
@@ -9,6 +9,7 @@ import gnu.trove.map.hash.TIntIntHashMap;
|
|||||||
import gnu.trove.set.TIntSet;
|
import gnu.trove.set.TIntSet;
|
||||||
import gnu.trove.set.hash.TIntHashSet;
|
import gnu.trove.set.hash.TIntHashSet;
|
||||||
import it.unimi.dsi.fastutil.ints.Int2DoubleArrayMap;
|
import it.unimi.dsi.fastutil.ints.Int2DoubleArrayMap;
|
||||||
|
import nu.marginalia.WmsaHome;
|
||||||
import nu.marginalia.api.domains.RpcSimilarDomain;
|
import nu.marginalia.api.domains.RpcSimilarDomain;
|
||||||
import nu.marginalia.api.domains.model.SimilarDomain;
|
import nu.marginalia.api.domains.model.SimilarDomain;
|
||||||
import nu.marginalia.api.linkgraph.AggregateLinkGraphClient;
|
import nu.marginalia.api.linkgraph.AggregateLinkGraphClient;
|
||||||
@@ -17,10 +18,14 @@ import org.roaringbitmap.RoaringBitmap;
|
|||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import java.nio.file.Path;
|
||||||
|
import java.sql.DriverManager;
|
||||||
import java.sql.ResultSet;
|
import java.sql.ResultSet;
|
||||||
import java.sql.SQLException;
|
import java.sql.SQLException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
import java.util.HashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Set;
|
||||||
import java.util.concurrent.Executors;
|
import java.util.concurrent.Executors;
|
||||||
import java.util.concurrent.ScheduledExecutorService;
|
import java.util.concurrent.ScheduledExecutorService;
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
@@ -32,12 +37,13 @@ public class SimilarDomainsService {
|
|||||||
private final HikariDataSource dataSource;
|
private final HikariDataSource dataSource;
|
||||||
private final AggregateLinkGraphClient linkGraphClient;
|
private final AggregateLinkGraphClient linkGraphClient;
|
||||||
|
|
||||||
private volatile TIntIntHashMap domainIdToIdx = new TIntIntHashMap(100_000);
|
private final TIntIntHashMap domainIdToIdx = new TIntIntHashMap(100_000);
|
||||||
private volatile int[] domainIdxToId;
|
private volatile int[] domainIdxToId;
|
||||||
|
|
||||||
public volatile Int2DoubleArrayMap[] relatedDomains;
|
public volatile Int2DoubleArrayMap[] relatedDomains;
|
||||||
public volatile TIntList[] domainNeighbors = null;
|
public volatile TIntList[] domainNeighbors = null;
|
||||||
public volatile RoaringBitmap screenshotDomains = null;
|
public volatile RoaringBitmap screenshotDomains = null;
|
||||||
|
public volatile RoaringBitmap feedDomains = null;
|
||||||
public volatile RoaringBitmap activeDomains = null;
|
public volatile RoaringBitmap activeDomains = null;
|
||||||
public volatile RoaringBitmap indexedDomains = null;
|
public volatile RoaringBitmap indexedDomains = null;
|
||||||
public volatile TIntDoubleHashMap domainRanks = null;
|
public volatile TIntDoubleHashMap domainRanks = null;
|
||||||
@@ -82,6 +88,7 @@ public class SimilarDomainsService {
|
|||||||
domainNames = new String[domainIdToIdx.size()];
|
domainNames = new String[domainIdToIdx.size()];
|
||||||
domainNeighbors = new TIntList[domainIdToIdx.size()];
|
domainNeighbors = new TIntList[domainIdToIdx.size()];
|
||||||
screenshotDomains = new RoaringBitmap();
|
screenshotDomains = new RoaringBitmap();
|
||||||
|
feedDomains = new RoaringBitmap();
|
||||||
activeDomains = new RoaringBitmap();
|
activeDomains = new RoaringBitmap();
|
||||||
indexedDomains = new RoaringBitmap();
|
indexedDomains = new RoaringBitmap();
|
||||||
relatedDomains = new Int2DoubleArrayMap[domainIdToIdx.size()];
|
relatedDomains = new Int2DoubleArrayMap[domainIdToIdx.size()];
|
||||||
@@ -145,10 +152,12 @@ public class SimilarDomainsService {
|
|||||||
activeDomains.add(idx);
|
activeDomains.add(idx);
|
||||||
}
|
}
|
||||||
|
|
||||||
updateScreenshotInfo();
|
|
||||||
|
|
||||||
logger.info("Loaded {} domains", domainRanks.size());
|
logger.info("Loaded {} domains", domainRanks.size());
|
||||||
isReady = true;
|
isReady = true;
|
||||||
|
|
||||||
|
// We can defer these as they only populate a roaringbitmap, and will degrade gracefully when not complete
|
||||||
|
updateScreenshotInfo();
|
||||||
|
updateFeedInfo();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
catch (SQLException throwables) {
|
catch (SQLException throwables) {
|
||||||
@@ -156,6 +165,42 @@ public class SimilarDomainsService {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void updateFeedInfo() {
|
||||||
|
Set<String> feedsDomainNames = new HashSet<>(500_000);
|
||||||
|
Path readerDbPath = WmsaHome.getDataPath().resolve("rss-feeds.db").toAbsolutePath();
|
||||||
|
String dbUrl = "jdbc:sqlite:" + readerDbPath;
|
||||||
|
|
||||||
|
logger.info("Opening feed db at " + dbUrl);
|
||||||
|
|
||||||
|
try (var conn = DriverManager.getConnection(dbUrl);
|
||||||
|
var stmt = conn.createStatement()) {
|
||||||
|
var rs = stmt.executeQuery("""
|
||||||
|
select
|
||||||
|
json_extract(feed, '$.domain') as domain
|
||||||
|
from feed
|
||||||
|
where json_array_length(feed, '$.items') > 0
|
||||||
|
""");
|
||||||
|
while (rs.next()) {
|
||||||
|
feedsDomainNames.add(rs.getString(1));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (SQLException ex) {
|
||||||
|
logger.error("Failed to read RSS feed items", ex);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (int idx = 0; idx < domainNames.length; idx++) {
|
||||||
|
String name = domainNames[idx];
|
||||||
|
if (name == null) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (feedsDomainNames.contains(name)) {
|
||||||
|
feedDomains.add(idx);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
private void updateScreenshotInfo() {
|
private void updateScreenshotInfo() {
|
||||||
try (var connection = dataSource.getConnection()) {
|
try (var connection = dataSource.getConnection()) {
|
||||||
try (var stmt = connection.createStatement()) {
|
try (var stmt = connection.createStatement()) {
|
||||||
@@ -254,6 +299,7 @@ public class SimilarDomainsService {
|
|||||||
.setIndexed(indexedDomains.contains(idx))
|
.setIndexed(indexedDomains.contains(idx))
|
||||||
.setActive(activeDomains.contains(idx))
|
.setActive(activeDomains.contains(idx))
|
||||||
.setScreenshot(screenshotDomains.contains(idx))
|
.setScreenshot(screenshotDomains.contains(idx))
|
||||||
|
.setFeed(feedDomains.contains(idx))
|
||||||
.setLinkType(RpcSimilarDomain.LINK_TYPE.valueOf(linkType.name()))
|
.setLinkType(RpcSimilarDomain.LINK_TYPE.valueOf(linkType.name()))
|
||||||
.build());
|
.build());
|
||||||
|
|
||||||
@@ -369,6 +415,7 @@ public class SimilarDomainsService {
|
|||||||
.setIndexed(indexedDomains.contains(idx))
|
.setIndexed(indexedDomains.contains(idx))
|
||||||
.setActive(activeDomains.contains(idx))
|
.setActive(activeDomains.contains(idx))
|
||||||
.setScreenshot(screenshotDomains.contains(idx))
|
.setScreenshot(screenshotDomains.contains(idx))
|
||||||
|
.setFeed(feedDomains.contains(idx))
|
||||||
.setLinkType(RpcSimilarDomain.LINK_TYPE.valueOf(linkType.name()))
|
.setLinkType(RpcSimilarDomain.LINK_TYPE.valueOf(linkType.name()))
|
||||||
.build());
|
.build());
|
||||||
|
|
||||||
|
47
code/functions/favicon/api/build.gradle
Normal file
47
code/functions/favicon/api/build.gradle
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
plugins {
|
||||||
|
id 'java'
|
||||||
|
|
||||||
|
id "com.google.protobuf" version "0.9.4"
|
||||||
|
id 'jvm-test-suite'
|
||||||
|
}
|
||||||
|
|
||||||
|
java {
|
||||||
|
toolchain {
|
||||||
|
languageVersion.set(JavaLanguageVersion.of(rootProject.ext.jvmVersion))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
jar.archiveBaseName = 'favicon-api'
|
||||||
|
|
||||||
|
apply from: "$rootProject.projectDir/protobuf.gradle"
|
||||||
|
apply from: "$rootProject.projectDir/srcsets.gradle"
|
||||||
|
|
||||||
|
dependencies {
|
||||||
|
implementation project(':code:common:model')
|
||||||
|
implementation project(':code:common:config')
|
||||||
|
implementation project(':code:common:service')
|
||||||
|
|
||||||
|
implementation libs.bundles.slf4j
|
||||||
|
|
||||||
|
implementation libs.prometheus
|
||||||
|
implementation libs.notnull
|
||||||
|
implementation libs.guava
|
||||||
|
implementation dependencies.create(libs.guice.get()) {
|
||||||
|
exclude group: 'com.google.guava'
|
||||||
|
}
|
||||||
|
implementation libs.gson
|
||||||
|
implementation libs.bundles.protobuf
|
||||||
|
implementation libs.guava
|
||||||
|
libs.bundles.grpc.get().each {
|
||||||
|
implementation dependencies.create(it) {
|
||||||
|
exclude group: 'com.google.guava'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
testImplementation libs.bundles.slf4j.test
|
||||||
|
testImplementation libs.bundles.junit
|
||||||
|
testImplementation libs.mockito
|
||||||
|
|
||||||
|
}
|
@@ -0,0 +1,39 @@
|
|||||||
|
package nu.marginalia.api.favicon;
|
||||||
|
|
||||||
|
import com.google.inject.Inject;
|
||||||
|
import nu.marginalia.service.client.GrpcChannelPoolFactory;
|
||||||
|
import nu.marginalia.service.client.GrpcMultiNodeChannelPool;
|
||||||
|
import nu.marginalia.service.discovery.property.ServiceKey;
|
||||||
|
import nu.marginalia.service.discovery.property.ServicePartition;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
|
public class FaviconClient {
|
||||||
|
private static final Logger logger = LoggerFactory.getLogger(FaviconClient.class);
|
||||||
|
|
||||||
|
private final GrpcMultiNodeChannelPool<FaviconAPIGrpc.FaviconAPIBlockingStub> channelPool;
|
||||||
|
|
||||||
|
@Inject
|
||||||
|
public FaviconClient(GrpcChannelPoolFactory factory) {
|
||||||
|
this.channelPool = factory.createMulti(
|
||||||
|
ServiceKey.forGrpcApi(FaviconAPIGrpc.class, ServicePartition.multi()),
|
||||||
|
FaviconAPIGrpc::newBlockingStub);
|
||||||
|
}
|
||||||
|
|
||||||
|
public record FaviconData(byte[] bytes, String contentType) {}
|
||||||
|
|
||||||
|
|
||||||
|
public Optional<FaviconData> getFavicon(String domain, int node) {
|
||||||
|
RpcFaviconResponse rsp = channelPool.call(FaviconAPIGrpc.FaviconAPIBlockingStub::getFavicon)
|
||||||
|
.forNode(node)
|
||||||
|
.run(RpcFaviconRequest.newBuilder().setDomain(domain).build());
|
||||||
|
|
||||||
|
if (rsp.getData().isEmpty())
|
||||||
|
return Optional.empty();
|
||||||
|
|
||||||
|
return Optional.of(new FaviconData(rsp.getData().toByteArray(), rsp.getContentType()));
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
20
code/functions/favicon/api/src/main/protobuf/favicon.proto
Normal file
20
code/functions/favicon/api/src/main/protobuf/favicon.proto
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
syntax="proto3";
|
||||||
|
package marginalia.api.favicon;
|
||||||
|
|
||||||
|
option java_package="nu.marginalia.api.favicon";
|
||||||
|
option java_multiple_files=true;
|
||||||
|
|
||||||
|
service FaviconAPI {
|
||||||
|
/** Fetches information about a domain. */
|
||||||
|
rpc getFavicon(RpcFaviconRequest) returns (RpcFaviconResponse) {}
|
||||||
|
}
|
||||||
|
|
||||||
|
message RpcFaviconRequest {
|
||||||
|
string domain = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
message RpcFaviconResponse {
|
||||||
|
string domain = 1;
|
||||||
|
bytes data = 2;
|
||||||
|
string contentType = 3;
|
||||||
|
}
|
49
code/functions/favicon/build.gradle
Normal file
49
code/functions/favicon/build.gradle
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
plugins {
|
||||||
|
id 'java'
|
||||||
|
|
||||||
|
id 'application'
|
||||||
|
id 'jvm-test-suite'
|
||||||
|
}
|
||||||
|
|
||||||
|
java {
|
||||||
|
toolchain {
|
||||||
|
languageVersion.set(JavaLanguageVersion.of(rootProject.ext.jvmVersion))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
apply from: "$rootProject.projectDir/srcsets.gradle"
|
||||||
|
|
||||||
|
dependencies {
|
||||||
|
implementation project(':code:common:config')
|
||||||
|
implementation project(':code:common:service')
|
||||||
|
implementation project(':code:common:model')
|
||||||
|
implementation project(':code:common:db')
|
||||||
|
implementation project(':code:functions:favicon:api')
|
||||||
|
implementation project(':code:processes:crawling-process')
|
||||||
|
|
||||||
|
implementation libs.bundles.slf4j
|
||||||
|
|
||||||
|
implementation libs.prometheus
|
||||||
|
implementation libs.guava
|
||||||
|
libs.bundles.grpc.get().each {
|
||||||
|
implementation dependencies.create(it) {
|
||||||
|
exclude group: 'com.google.guava'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
implementation libs.notnull
|
||||||
|
implementation libs.guava
|
||||||
|
implementation dependencies.create(libs.guice.get()) {
|
||||||
|
exclude group: 'com.google.guava'
|
||||||
|
}
|
||||||
|
implementation dependencies.create(libs.spark.get()) {
|
||||||
|
exclude group: 'org.eclipse.jetty'
|
||||||
|
}
|
||||||
|
|
||||||
|
testImplementation libs.bundles.slf4j.test
|
||||||
|
testImplementation libs.bundles.junit
|
||||||
|
testImplementation libs.mockito
|
||||||
|
|
||||||
|
|
||||||
|
}
|
@@ -0,0 +1,48 @@
|
|||||||
|
package nu.marginalia.functions.favicon;
|
||||||
|
|
||||||
|
import com.google.inject.Inject;
|
||||||
|
import com.google.inject.Singleton;
|
||||||
|
import com.google.protobuf.ByteString;
|
||||||
|
import io.grpc.stub.StreamObserver;
|
||||||
|
import nu.marginalia.api.favicon.FaviconAPIGrpc;
|
||||||
|
import nu.marginalia.api.favicon.RpcFaviconRequest;
|
||||||
|
import nu.marginalia.api.favicon.RpcFaviconResponse;
|
||||||
|
import nu.marginalia.crawl.DomainStateDb;
|
||||||
|
import nu.marginalia.service.server.DiscoverableService;
|
||||||
|
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
|
@Singleton
|
||||||
|
public class FaviconGrpcService extends FaviconAPIGrpc.FaviconAPIImplBase implements DiscoverableService {
|
||||||
|
private final DomainStateDb domainStateDb;
|
||||||
|
|
||||||
|
@Inject
|
||||||
|
public FaviconGrpcService(DomainStateDb domainStateDb) {
|
||||||
|
this.domainStateDb = domainStateDb;
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean shouldRegisterService() {
|
||||||
|
return domainStateDb.isAvailable();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void getFavicon(RpcFaviconRequest request, StreamObserver<RpcFaviconResponse> responseObserver) {
|
||||||
|
Optional<DomainStateDb.FaviconRecord> icon = domainStateDb.getIcon(request.getDomain());
|
||||||
|
|
||||||
|
RpcFaviconResponse response;
|
||||||
|
if (icon.isEmpty()) {
|
||||||
|
response = RpcFaviconResponse.newBuilder().build();
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
var iconRecord = icon.get();
|
||||||
|
response = RpcFaviconResponse.newBuilder()
|
||||||
|
.setContentType(iconRecord.contentType())
|
||||||
|
.setDomain(request.getDomain())
|
||||||
|
.setData(ByteString.copyFrom(iconRecord.imageData()))
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
responseObserver.onNext(response);
|
||||||
|
responseObserver.onCompleted();
|
||||||
|
}
|
||||||
|
}
|
@@ -5,6 +5,7 @@ import com.google.inject.Singleton;
|
|||||||
import nu.marginalia.api.livecapture.LiveCaptureApiGrpc.LiveCaptureApiBlockingStub;
|
import nu.marginalia.api.livecapture.LiveCaptureApiGrpc.LiveCaptureApiBlockingStub;
|
||||||
import nu.marginalia.service.client.GrpcChannelPoolFactory;
|
import nu.marginalia.service.client.GrpcChannelPoolFactory;
|
||||||
import nu.marginalia.service.client.GrpcSingleNodeChannelPool;
|
import nu.marginalia.service.client.GrpcSingleNodeChannelPool;
|
||||||
|
import nu.marginalia.service.client.ServiceNotAvailableException;
|
||||||
import nu.marginalia.service.discovery.property.ServiceKey;
|
import nu.marginalia.service.discovery.property.ServiceKey;
|
||||||
import nu.marginalia.service.discovery.property.ServicePartition;
|
import nu.marginalia.service.discovery.property.ServicePartition;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
@@ -29,6 +30,9 @@ public class LiveCaptureClient {
|
|||||||
channelPool.call(LiveCaptureApiBlockingStub::requestScreengrab)
|
channelPool.call(LiveCaptureApiBlockingStub::requestScreengrab)
|
||||||
.run(RpcDomainId.newBuilder().setDomainId(domainId).build());
|
.run(RpcDomainId.newBuilder().setDomainId(domainId).build());
|
||||||
}
|
}
|
||||||
|
catch (ServiceNotAvailableException e) {
|
||||||
|
logger.info("requestScreengrab() failed since the service is not available");
|
||||||
|
}
|
||||||
catch (Exception e) {
|
catch (Exception e) {
|
||||||
logger.error("API Exception", e);
|
logger.error("API Exception", e);
|
||||||
}
|
}
|
||||||
|
@@ -24,14 +24,17 @@ dependencies {
|
|||||||
implementation project(':code:libraries:message-queue')
|
implementation project(':code:libraries:message-queue')
|
||||||
|
|
||||||
implementation project(':code:execution:api')
|
implementation project(':code:execution:api')
|
||||||
|
implementation project(':code:processes:crawling-process:ft-content-type')
|
||||||
|
|
||||||
implementation libs.jsoup
|
implementation libs.jsoup
|
||||||
implementation libs.rssreader
|
implementation project(':third-party:rssreader')
|
||||||
implementation libs.opencsv
|
implementation libs.opencsv
|
||||||
|
implementation libs.slop
|
||||||
implementation libs.sqlite
|
implementation libs.sqlite
|
||||||
implementation libs.bundles.slf4j
|
implementation libs.bundles.slf4j
|
||||||
implementation libs.commons.lang3
|
implementation libs.commons.lang3
|
||||||
implementation libs.commons.io
|
implementation libs.commons.io
|
||||||
|
implementation libs.wiremock
|
||||||
|
|
||||||
implementation libs.prometheus
|
implementation libs.prometheus
|
||||||
implementation libs.guava
|
implementation libs.guava
|
||||||
|
@@ -1,6 +1,7 @@
|
|||||||
package nu.marginalia.livecapture;
|
package nu.marginalia.livecapture;
|
||||||
|
|
||||||
import com.google.gson.Gson;
|
import com.google.gson.Gson;
|
||||||
|
import nu.marginalia.WmsaHome;
|
||||||
import nu.marginalia.model.gson.GsonFactory;
|
import nu.marginalia.model.gson.GsonFactory;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
@@ -12,10 +13,13 @@ import java.net.http.HttpRequest;
|
|||||||
import java.net.http.HttpResponse;
|
import java.net.http.HttpResponse;
|
||||||
import java.time.Duration;
|
import java.time.Duration;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
/** Client for local browserless.io API */
|
/** Client for local browserless.io API */
|
||||||
public class BrowserlessClient implements AutoCloseable {
|
public class BrowserlessClient implements AutoCloseable {
|
||||||
|
|
||||||
private static final Logger logger = LoggerFactory.getLogger(BrowserlessClient.class);
|
private static final Logger logger = LoggerFactory.getLogger(BrowserlessClient.class);
|
||||||
|
private static final String BROWSERLESS_TOKEN = System.getProperty("live-capture.browserless-token", "BROWSERLESS_TOKEN");
|
||||||
|
|
||||||
private final HttpClient httpClient = HttpClient.newBuilder()
|
private final HttpClient httpClient = HttpClient.newBuilder()
|
||||||
.version(HttpClient.Version.HTTP_1_1)
|
.version(HttpClient.Version.HTTP_1_1)
|
||||||
@@ -25,18 +29,21 @@ public class BrowserlessClient implements AutoCloseable {
|
|||||||
private final URI browserlessURI;
|
private final URI browserlessURI;
|
||||||
private final Gson gson = GsonFactory.get();
|
private final Gson gson = GsonFactory.get();
|
||||||
|
|
||||||
|
private final String userAgent = WmsaHome.getUserAgent().uaString();
|
||||||
|
|
||||||
public BrowserlessClient(URI browserlessURI) {
|
public BrowserlessClient(URI browserlessURI) {
|
||||||
this.browserlessURI = browserlessURI;
|
this.browserlessURI = browserlessURI;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String content(String url, GotoOptions gotoOptions) throws IOException, InterruptedException {
|
public Optional<String> content(String url, GotoOptions gotoOptions) throws IOException, InterruptedException {
|
||||||
Map<String, Object> requestData = Map.of(
|
Map<String, Object> requestData = Map.of(
|
||||||
"url", url,
|
"url", url,
|
||||||
|
"userAgent", userAgent,
|
||||||
"gotoOptions", gotoOptions
|
"gotoOptions", gotoOptions
|
||||||
);
|
);
|
||||||
|
|
||||||
var request = HttpRequest.newBuilder()
|
var request = HttpRequest.newBuilder()
|
||||||
.uri(browserlessURI.resolve("/content"))
|
.uri(browserlessURI.resolve("/content?token="+BROWSERLESS_TOKEN))
|
||||||
.method("POST", HttpRequest.BodyPublishers.ofString(
|
.method("POST", HttpRequest.BodyPublishers.ofString(
|
||||||
gson.toJson(requestData)
|
gson.toJson(requestData)
|
||||||
))
|
))
|
||||||
@@ -47,10 +54,10 @@ public class BrowserlessClient implements AutoCloseable {
|
|||||||
|
|
||||||
if (rsp.statusCode() >= 300) {
|
if (rsp.statusCode() >= 300) {
|
||||||
logger.info("Failed to fetch content for {}, status {}", url, rsp.statusCode());
|
logger.info("Failed to fetch content for {}, status {}", url, rsp.statusCode());
|
||||||
return null;
|
return Optional.empty();
|
||||||
}
|
}
|
||||||
|
|
||||||
return rsp.body();
|
return Optional.of(rsp.body());
|
||||||
}
|
}
|
||||||
|
|
||||||
public byte[] screenshot(String url, GotoOptions gotoOptions, ScreenshotOptions screenshotOptions)
|
public byte[] screenshot(String url, GotoOptions gotoOptions, ScreenshotOptions screenshotOptions)
|
||||||
@@ -58,12 +65,13 @@ public class BrowserlessClient implements AutoCloseable {
|
|||||||
|
|
||||||
Map<String, Object> requestData = Map.of(
|
Map<String, Object> requestData = Map.of(
|
||||||
"url", url,
|
"url", url,
|
||||||
|
"userAgent", userAgent,
|
||||||
"options", screenshotOptions,
|
"options", screenshotOptions,
|
||||||
"gotoOptions", gotoOptions
|
"gotoOptions", gotoOptions
|
||||||
);
|
);
|
||||||
|
|
||||||
var request = HttpRequest.newBuilder()
|
var request = HttpRequest.newBuilder()
|
||||||
.uri(browserlessURI.resolve("/screenshot"))
|
.uri(browserlessURI.resolve("/screenshot?token="+BROWSERLESS_TOKEN))
|
||||||
.method("POST", HttpRequest.BodyPublishers.ofString(
|
.method("POST", HttpRequest.BodyPublishers.ofString(
|
||||||
gson.toJson(requestData)
|
gson.toJson(requestData)
|
||||||
))
|
))
|
||||||
@@ -82,7 +90,7 @@ public class BrowserlessClient implements AutoCloseable {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void close() throws Exception {
|
public void close() {
|
||||||
httpClient.shutdownNow();
|
httpClient.shutdownNow();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -8,6 +8,7 @@ import nu.marginalia.rss.model.FeedDefinition;
|
|||||||
import nu.marginalia.rss.model.FeedItems;
|
import nu.marginalia.rss.model.FeedItems;
|
||||||
import nu.marginalia.service.module.ServiceConfiguration;
|
import nu.marginalia.service.module.ServiceConfiguration;
|
||||||
import org.jetbrains.annotations.NotNull;
|
import org.jetbrains.annotations.NotNull;
|
||||||
|
import org.jetbrains.annotations.Nullable;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
@@ -127,6 +128,26 @@ public class FeedDb {
|
|||||||
return FeedItems.none();
|
return FeedItems.none();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Nullable
|
||||||
|
public String getEtag(EdgeDomain domain) {
|
||||||
|
if (!feedDbEnabled) {
|
||||||
|
throw new IllegalStateException("Feed database is disabled on this node");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Capture the current reader to avoid concurrency issues
|
||||||
|
FeedDbReader reader = this.reader;
|
||||||
|
try {
|
||||||
|
if (reader != null) {
|
||||||
|
return reader.getEtag(domain);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (Exception e) {
|
||||||
|
logger.error("Error getting etag for " + domain, e);
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
public Optional<String> getFeedAsJson(String domain) {
|
public Optional<String> getFeedAsJson(String domain) {
|
||||||
if (!feedDbEnabled) {
|
if (!feedDbEnabled) {
|
||||||
throw new IllegalStateException("Feed database is disabled on this node");
|
throw new IllegalStateException("Feed database is disabled on this node");
|
||||||
@@ -214,7 +235,7 @@ public class FeedDb {
|
|||||||
|
|
||||||
public Instant getFetchTime() {
|
public Instant getFetchTime() {
|
||||||
if (!Files.exists(readerDbPath)) {
|
if (!Files.exists(readerDbPath)) {
|
||||||
return Instant.ofEpochMilli(0);
|
return Instant.EPOCH;
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
@@ -224,7 +245,23 @@ public class FeedDb {
|
|||||||
}
|
}
|
||||||
catch (IOException ex) {
|
catch (IOException ex) {
|
||||||
logger.error("Failed to read the creatiom time of {}", readerDbPath);
|
logger.error("Failed to read the creatiom time of {}", readerDbPath);
|
||||||
return Instant.ofEpochMilli(0);
|
return Instant.EPOCH;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public boolean hasData() {
|
||||||
|
if (!feedDbEnabled) {
|
||||||
|
throw new IllegalStateException("Feed database is disabled on this node");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Capture the current reader to avoid concurrency issues
|
||||||
|
FeedDbReader reader = this.reader;
|
||||||
|
|
||||||
|
if (reader != null) {
|
||||||
|
return reader.hasData();
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@@ -8,6 +8,7 @@ import nu.marginalia.rss.model.FeedItems;
|
|||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import javax.annotation.Nullable;
|
||||||
import java.nio.file.Path;
|
import java.nio.file.Path;
|
||||||
import java.sql.Connection;
|
import java.sql.Connection;
|
||||||
import java.sql.DriverManager;
|
import java.sql.DriverManager;
|
||||||
@@ -32,6 +33,7 @@ public class FeedDbReader implements AutoCloseable {
|
|||||||
try (var stmt = connection.createStatement()) {
|
try (var stmt = connection.createStatement()) {
|
||||||
stmt.executeUpdate("CREATE TABLE IF NOT EXISTS feed (domain TEXT PRIMARY KEY, feed JSON)");
|
stmt.executeUpdate("CREATE TABLE IF NOT EXISTS feed (domain TEXT PRIMARY KEY, feed JSON)");
|
||||||
stmt.executeUpdate("CREATE TABLE IF NOT EXISTS errors (domain TEXT PRIMARY KEY, cnt INT DEFAULT 0)");
|
stmt.executeUpdate("CREATE TABLE IF NOT EXISTS errors (domain TEXT PRIMARY KEY, cnt INT DEFAULT 0)");
|
||||||
|
stmt.executeUpdate("CREATE TABLE IF NOT EXISTS etags (domain TEXT PRIMARY KEY, etag TEXT)");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -106,6 +108,22 @@ public class FeedDbReader implements AutoCloseable {
|
|||||||
return FeedItems.none();
|
return FeedItems.none();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Nullable
|
||||||
|
public String getEtag(EdgeDomain domain) {
|
||||||
|
try (var stmt = connection.prepareStatement("SELECT etag FROM etags WHERE DOMAIN = ?")) {
|
||||||
|
stmt.setString(1, domain.toString());
|
||||||
|
var rs = stmt.executeQuery();
|
||||||
|
|
||||||
|
if (rs.next()) {
|
||||||
|
return rs.getString(1);
|
||||||
|
}
|
||||||
|
} catch (SQLException e) {
|
||||||
|
logger.error("Error getting etag for " + domain, e);
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
private FeedItems deserialize(String string) {
|
private FeedItems deserialize(String string) {
|
||||||
return gson.fromJson(string, FeedItems.class);
|
return gson.fromJson(string, FeedItems.class);
|
||||||
}
|
}
|
||||||
@@ -141,4 +159,18 @@ public class FeedDbReader implements AutoCloseable {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public boolean hasData() {
|
||||||
|
try (var stmt = connection.prepareStatement("SELECT 1 FROM feed LIMIT 1")) {
|
||||||
|
var rs = stmt.executeQuery();
|
||||||
|
if (rs.next()) {
|
||||||
|
return rs.getBoolean(1);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (SQLException ex) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@@ -20,6 +20,7 @@ public class FeedDbWriter implements AutoCloseable {
|
|||||||
private final Connection connection;
|
private final Connection connection;
|
||||||
private final PreparedStatement insertFeedStmt;
|
private final PreparedStatement insertFeedStmt;
|
||||||
private final PreparedStatement insertErrorStmt;
|
private final PreparedStatement insertErrorStmt;
|
||||||
|
private final PreparedStatement insertEtagStmt;
|
||||||
private final Path dbPath;
|
private final Path dbPath;
|
||||||
|
|
||||||
private volatile boolean closed = false;
|
private volatile boolean closed = false;
|
||||||
@@ -34,10 +35,12 @@ public class FeedDbWriter implements AutoCloseable {
|
|||||||
try (var stmt = connection.createStatement()) {
|
try (var stmt = connection.createStatement()) {
|
||||||
stmt.executeUpdate("CREATE TABLE IF NOT EXISTS feed (domain TEXT PRIMARY KEY, feed JSON)");
|
stmt.executeUpdate("CREATE TABLE IF NOT EXISTS feed (domain TEXT PRIMARY KEY, feed JSON)");
|
||||||
stmt.executeUpdate("CREATE TABLE IF NOT EXISTS errors (domain TEXT PRIMARY KEY, cnt INT DEFAULT 0)");
|
stmt.executeUpdate("CREATE TABLE IF NOT EXISTS errors (domain TEXT PRIMARY KEY, cnt INT DEFAULT 0)");
|
||||||
|
stmt.executeUpdate("CREATE TABLE IF NOT EXISTS etags (domain TEXT PRIMARY KEY, etag TEXT)");
|
||||||
}
|
}
|
||||||
|
|
||||||
insertFeedStmt = connection.prepareStatement("INSERT INTO feed (domain, feed) VALUES (?, ?)");
|
insertFeedStmt = connection.prepareStatement("INSERT INTO feed (domain, feed) VALUES (?, ?)");
|
||||||
insertErrorStmt = connection.prepareStatement("INSERT INTO errors (domain, cnt) VALUES (?, ?)");
|
insertErrorStmt = connection.prepareStatement("INSERT INTO errors (domain, cnt) VALUES (?, ?)");
|
||||||
|
insertEtagStmt = connection.prepareStatement("INSERT INTO etags (domain, etag) VALUES (?, ?)");
|
||||||
}
|
}
|
||||||
|
|
||||||
public Path getDbPath() {
|
public Path getDbPath() {
|
||||||
@@ -56,6 +59,20 @@ public class FeedDbWriter implements AutoCloseable {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public synchronized void saveEtag(String domain, String etag) {
|
||||||
|
if (etag == null || etag.isBlank())
|
||||||
|
return;
|
||||||
|
|
||||||
|
try {
|
||||||
|
insertEtagStmt.setString(1, domain.toLowerCase());
|
||||||
|
insertEtagStmt.setString(2, etag);
|
||||||
|
insertEtagStmt.executeUpdate();
|
||||||
|
}
|
||||||
|
catch (SQLException e) {
|
||||||
|
logger.error("Error saving etag for " + domain, e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public synchronized void setErrorCount(String domain, int count) {
|
public synchronized void setErrorCount(String domain, int count) {
|
||||||
try {
|
try {
|
||||||
insertErrorStmt.setString(1, domain);
|
insertErrorStmt.setString(1, domain);
|
||||||
|
@@ -1,6 +1,6 @@
|
|||||||
package nu.marginalia.rss.model;
|
package nu.marginalia.rss.model;
|
||||||
|
|
||||||
import com.apptasticsoftware.rssreader.Item;
|
import nu.marginalia.rss.svc.SimpleFeedParser;
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
import org.jetbrains.annotations.NotNull;
|
import org.jetbrains.annotations.NotNull;
|
||||||
import org.jsoup.Jsoup;
|
import org.jsoup.Jsoup;
|
||||||
@@ -18,37 +18,33 @@ public record FeedItem(String title,
|
|||||||
public static final int MAX_DESC_LENGTH = 255;
|
public static final int MAX_DESC_LENGTH = 255;
|
||||||
public static final DateTimeFormatter DATE_FORMAT = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSSZ");
|
public static final DateTimeFormatter DATE_FORMAT = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSSZ");
|
||||||
|
|
||||||
public static FeedItem fromItem(Item item, boolean keepFragment) {
|
public static FeedItem fromItem(SimpleFeedParser.ItemData item, boolean keepFragment) {
|
||||||
String title = item.getTitle().orElse("");
|
String title = item.title();
|
||||||
String date = getItemDate(item);
|
String date = getItemDate(item);
|
||||||
String description = getItemDescription(item);
|
String description = getItemDescription(item);
|
||||||
String url;
|
String url;
|
||||||
|
|
||||||
if (keepFragment || item.getLink().isEmpty()) {
|
if (keepFragment) {
|
||||||
url = item.getLink().orElse("");
|
url = item.url();
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
try {
|
try {
|
||||||
String link = item.getLink().get();
|
String link = item.url();
|
||||||
var linkUri = new URI(link);
|
var linkUri = new URI(link);
|
||||||
var cleanUri = new URI(linkUri.getScheme(), linkUri.getAuthority(), linkUri.getPath(), linkUri.getQuery(), null);
|
var cleanUri = new URI(linkUri.getScheme(), linkUri.getAuthority(), linkUri.getPath(), linkUri.getQuery(), null);
|
||||||
url = cleanUri.toString();
|
url = cleanUri.toString();
|
||||||
}
|
}
|
||||||
catch (Exception e) {
|
catch (Exception e) {
|
||||||
// fallback to original link if we can't clean it, this is not a very important step
|
// fallback to original link if we can't clean it, this is not a very important step
|
||||||
url = item.getLink().get();
|
url = item.url();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return new FeedItem(title, date, description, url);
|
return new FeedItem(title, date, description, url);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static String getItemDescription(Item item) {
|
private static String getItemDescription(SimpleFeedParser.ItemData item) {
|
||||||
Optional<String> description = item.getDescription();
|
String rawDescription = item.description();
|
||||||
if (description.isEmpty())
|
|
||||||
return "";
|
|
||||||
|
|
||||||
String rawDescription = description.get();
|
|
||||||
if (rawDescription.indexOf('<') >= 0) {
|
if (rawDescription.indexOf('<') >= 0) {
|
||||||
rawDescription = Jsoup.parseBodyFragment(rawDescription).text();
|
rawDescription = Jsoup.parseBodyFragment(rawDescription).text();
|
||||||
}
|
}
|
||||||
@@ -58,15 +54,18 @@ public record FeedItem(String title,
|
|||||||
|
|
||||||
// e.g. http://fabiensanglard.net/rss.xml does dates like this: 1 Apr 2021 00:00:00 +0000
|
// e.g. http://fabiensanglard.net/rss.xml does dates like this: 1 Apr 2021 00:00:00 +0000
|
||||||
private static final DateTimeFormatter extraFormatter = DateTimeFormatter.ofPattern("d MMM yyyy HH:mm:ss Z");
|
private static final DateTimeFormatter extraFormatter = DateTimeFormatter.ofPattern("d MMM yyyy HH:mm:ss Z");
|
||||||
private static String getItemDate(Item item) {
|
private static String getItemDate(SimpleFeedParser.ItemData item) {
|
||||||
Optional<ZonedDateTime> zonedDateTime = Optional.empty();
|
Optional<ZonedDateTime> zonedDateTime = Optional.empty();
|
||||||
try {
|
try {
|
||||||
zonedDateTime = item.getPubDateZonedDateTime();
|
zonedDateTime = item.getPubDateZonedDateTime();
|
||||||
}
|
}
|
||||||
catch (Exception e) {
|
catch (Exception e) {
|
||||||
zonedDateTime = item.getPubDate()
|
try {
|
||||||
.map(extraFormatter::parse)
|
zonedDateTime = Optional.of(ZonedDateTime.from(extraFormatter.parse(item.pubDate())));
|
||||||
.map(ZonedDateTime::from);
|
}
|
||||||
|
catch (Exception e2) {
|
||||||
|
// ignore
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return zonedDateTime.map(date -> date.format(DATE_FORMAT)).orElse("");
|
return zonedDateTime.map(date -> date.format(DATE_FORMAT)).orElse("");
|
||||||
|
@@ -1,10 +1,10 @@
|
|||||||
package nu.marginalia.rss.svc;
|
package nu.marginalia.rss.svc;
|
||||||
|
|
||||||
import com.apptasticsoftware.rssreader.Item;
|
|
||||||
import com.apptasticsoftware.rssreader.RssReader;
|
|
||||||
import com.google.inject.Inject;
|
import com.google.inject.Inject;
|
||||||
import com.opencsv.CSVReader;
|
import com.opencsv.CSVReader;
|
||||||
import nu.marginalia.WmsaHome;
|
import nu.marginalia.WmsaHome;
|
||||||
|
import nu.marginalia.contenttype.ContentType;
|
||||||
|
import nu.marginalia.contenttype.DocumentBodyToString;
|
||||||
import nu.marginalia.executor.client.ExecutorClient;
|
import nu.marginalia.executor.client.ExecutorClient;
|
||||||
import nu.marginalia.model.EdgeDomain;
|
import nu.marginalia.model.EdgeDomain;
|
||||||
import nu.marginalia.nodecfg.NodeConfigurationService;
|
import nu.marginalia.nodecfg.NodeConfigurationService;
|
||||||
@@ -18,7 +18,6 @@ import nu.marginalia.storage.FileStorageService;
|
|||||||
import nu.marginalia.storage.model.FileStorage;
|
import nu.marginalia.storage.model.FileStorage;
|
||||||
import nu.marginalia.storage.model.FileStorageType;
|
import nu.marginalia.storage.model.FileStorageType;
|
||||||
import nu.marginalia.util.SimpleBlockingThreadPool;
|
import nu.marginalia.util.SimpleBlockingThreadPool;
|
||||||
import org.apache.commons.io.input.BOMInputStream;
|
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
@@ -30,13 +29,11 @@ import java.net.URISyntaxException;
|
|||||||
import java.net.http.HttpClient;
|
import java.net.http.HttpClient;
|
||||||
import java.net.http.HttpRequest;
|
import java.net.http.HttpRequest;
|
||||||
import java.net.http.HttpResponse;
|
import java.net.http.HttpResponse;
|
||||||
import java.nio.charset.StandardCharsets;
|
|
||||||
import java.sql.SQLException;
|
import java.sql.SQLException;
|
||||||
import java.time.Duration;
|
import java.time.*;
|
||||||
import java.time.LocalDateTime;
|
|
||||||
import java.time.ZonedDateTime;
|
|
||||||
import java.time.format.DateTimeFormatter;
|
import java.time.format.DateTimeFormatter;
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
|
import java.util.concurrent.ExecutorService;
|
||||||
import java.util.concurrent.Executors;
|
import java.util.concurrent.Executors;
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
import java.util.concurrent.atomic.AtomicInteger;
|
import java.util.concurrent.atomic.AtomicInteger;
|
||||||
@@ -48,8 +45,6 @@ public class FeedFetcherService {
|
|||||||
private static final int MAX_FEED_ITEMS = 10;
|
private static final int MAX_FEED_ITEMS = 10;
|
||||||
private static final Logger logger = LoggerFactory.getLogger(FeedFetcherService.class);
|
private static final Logger logger = LoggerFactory.getLogger(FeedFetcherService.class);
|
||||||
|
|
||||||
private final RssReader rssReader = new RssReader();
|
|
||||||
|
|
||||||
private final FeedDb feedDb;
|
private final FeedDb feedDb;
|
||||||
private final FileStorageService fileStorageService;
|
private final FileStorageService fileStorageService;
|
||||||
private final NodeConfigurationService nodeConfigurationService;
|
private final NodeConfigurationService nodeConfigurationService;
|
||||||
@@ -59,7 +54,6 @@ public class FeedFetcherService {
|
|||||||
private final DomainLocks domainLocks = new DomainLocks();
|
private final DomainLocks domainLocks = new DomainLocks();
|
||||||
|
|
||||||
private volatile boolean updating;
|
private volatile boolean updating;
|
||||||
private boolean deterministic = false;
|
|
||||||
|
|
||||||
@Inject
|
@Inject
|
||||||
public FeedFetcherService(FeedDb feedDb,
|
public FeedFetcherService(FeedDb feedDb,
|
||||||
@@ -73,27 +67,11 @@ public class FeedFetcherService {
|
|||||||
this.nodeConfigurationService = nodeConfigurationService;
|
this.nodeConfigurationService = nodeConfigurationService;
|
||||||
this.serviceHeartbeat = serviceHeartbeat;
|
this.serviceHeartbeat = serviceHeartbeat;
|
||||||
this.executorClient = executorClient;
|
this.executorClient = executorClient;
|
||||||
|
|
||||||
|
|
||||||
// Add support for some alternate date tags for atom
|
|
||||||
rssReader.addItemExtension("issued", this::setDateFallback);
|
|
||||||
rssReader.addItemExtension("created", this::setDateFallback);
|
|
||||||
}
|
|
||||||
|
|
||||||
private void setDateFallback(Item item, String value) {
|
|
||||||
if (item.getPubDate().isEmpty()) {
|
|
||||||
item.setPubDate(value);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public enum UpdateMode {
|
public enum UpdateMode {
|
||||||
CLEAN,
|
CLEAN,
|
||||||
REFRESH
|
REFRESH
|
||||||
};
|
|
||||||
|
|
||||||
/** Disable random-based heuristics. This is meant for testing */
|
|
||||||
public void setDeterministic() {
|
|
||||||
this.deterministic = true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void updateFeeds(UpdateMode updateMode) throws IOException {
|
public void updateFeeds(UpdateMode updateMode) throws IOException {
|
||||||
@@ -102,6 +80,7 @@ public class FeedFetcherService {
|
|||||||
throw new IllegalStateException("Already updating feeds, refusing to start another update");
|
throw new IllegalStateException("Already updating feeds, refusing to start another update");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
try (FeedDbWriter writer = feedDb.createWriter();
|
try (FeedDbWriter writer = feedDb.createWriter();
|
||||||
HttpClient client = HttpClient.newBuilder()
|
HttpClient client = HttpClient.newBuilder()
|
||||||
.connectTimeout(Duration.ofSeconds(15))
|
.connectTimeout(Duration.ofSeconds(15))
|
||||||
@@ -109,6 +88,8 @@ public class FeedFetcherService {
|
|||||||
.followRedirects(HttpClient.Redirect.NORMAL)
|
.followRedirects(HttpClient.Redirect.NORMAL)
|
||||||
.version(HttpClient.Version.HTTP_2)
|
.version(HttpClient.Version.HTTP_2)
|
||||||
.build();
|
.build();
|
||||||
|
ExecutorService fetchExecutor = Executors.newCachedThreadPool();
|
||||||
|
FeedJournal feedJournal = FeedJournal.create();
|
||||||
var heartbeat = serviceHeartbeat.createServiceAdHocTaskHeartbeat("Update Rss Feeds")
|
var heartbeat = serviceHeartbeat.createServiceAdHocTaskHeartbeat("Update Rss Feeds")
|
||||||
) {
|
) {
|
||||||
updating = true;
|
updating = true;
|
||||||
@@ -135,37 +116,39 @@ public class FeedFetcherService {
|
|||||||
for (var feed : definitions) {
|
for (var feed : definitions) {
|
||||||
executor.submitQuietly(() -> {
|
executor.submitQuietly(() -> {
|
||||||
try {
|
try {
|
||||||
var oldData = feedDb.getFeed(new EdgeDomain(feed.domain()));
|
EdgeDomain domain = new EdgeDomain(feed.domain());
|
||||||
|
var oldData = feedDb.getFeed(domain);
|
||||||
|
|
||||||
// If we have existing data, we might skip updating it with a probability that increases with time,
|
@Nullable
|
||||||
// this is to avoid hammering the feeds that are updated very rarely and save some time and resources
|
String ifModifiedSinceDate = switch(updateMode) {
|
||||||
// on our end
|
case REFRESH -> getIfModifiedSinceDate(feedDb);
|
||||||
|
case CLEAN -> null;
|
||||||
|
};
|
||||||
|
|
||||||
/* Disable for now:
|
@Nullable
|
||||||
|
String ifNoneMatchTag = switch (updateMode) {
|
||||||
if (!oldData.isEmpty()) {
|
case REFRESH -> feedDb.getEtag(domain);
|
||||||
Duration duration = feed.durationSinceUpdated();
|
case CLEAN -> null;
|
||||||
long daysSinceUpdate = duration.toDays();
|
};
|
||||||
|
|
||||||
|
|
||||||
if (deterministic || (daysSinceUpdate > 2 && ThreadLocalRandom.current()
|
|
||||||
.nextInt(1, 1 + (int) Math.min(10, daysSinceUpdate) / 2) > 1)) {
|
|
||||||
// Skip updating this feed, just write the old data back instead
|
|
||||||
writer.saveFeed(oldData);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
|
|
||||||
FetchResult feedData;
|
FetchResult feedData;
|
||||||
try (DomainLocks.DomainLock domainLock = domainLocks.lockDomain(new EdgeDomain(feed.domain()))) {
|
try (DomainLocks.DomainLock domainLock = domainLocks.lockDomain(new EdgeDomain(feed.domain()))) {
|
||||||
feedData = fetchFeedData(feed, client);
|
feedData = fetchFeedData(feed, client, fetchExecutor, ifModifiedSinceDate, ifNoneMatchTag);
|
||||||
} catch (Exception ex) {
|
} catch (Exception ex) {
|
||||||
feedData = new FetchResult.TransientError();
|
feedData = new FetchResult.TransientError();
|
||||||
}
|
}
|
||||||
|
|
||||||
switch (feedData) {
|
switch (feedData) {
|
||||||
case FetchResult.Success(String value) -> writer.saveFeed(parseFeed(value, feed));
|
case FetchResult.Success(String value, String etag) -> {
|
||||||
|
writer.saveEtag(feed.domain(), etag);
|
||||||
|
writer.saveFeed(parseFeed(value, feed));
|
||||||
|
|
||||||
|
feedJournal.record(feed.feedUrl(), value);
|
||||||
|
}
|
||||||
|
case FetchResult.NotModified() -> {
|
||||||
|
writer.saveEtag(feed.domain(), ifNoneMatchTag);
|
||||||
|
writer.saveFeed(oldData);
|
||||||
|
}
|
||||||
case FetchResult.TransientError() -> {
|
case FetchResult.TransientError() -> {
|
||||||
int errorCount = errorCounts.getOrDefault(feed.domain().toLowerCase(), 0);
|
int errorCount = errorCounts.getOrDefault(feed.domain().toLowerCase(), 0);
|
||||||
writer.setErrorCount(feed.domain().toLowerCase(), ++errorCount);
|
writer.setErrorCount(feed.domain().toLowerCase(), ++errorCount);
|
||||||
@@ -212,30 +195,83 @@ public class FeedFetcherService {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private FetchResult fetchFeedData(FeedDefinition feed, HttpClient client) {
|
@Nullable
|
||||||
|
static String getIfModifiedSinceDate(FeedDb feedDb) {
|
||||||
|
|
||||||
|
// If the db is fresh, we don't send If-Modified-Since
|
||||||
|
if (!feedDb.hasData())
|
||||||
|
return null;
|
||||||
|
|
||||||
|
Instant cutoffInstant = feedDb.getFetchTime();
|
||||||
|
|
||||||
|
// If we're unable to establish fetch time, we don't send If-Modified-Since
|
||||||
|
if (cutoffInstant == Instant.EPOCH)
|
||||||
|
return null;
|
||||||
|
|
||||||
|
return cutoffInstant.atZone(ZoneId.of("GMT")).format(DateTimeFormatter.RFC_1123_DATE_TIME);
|
||||||
|
}
|
||||||
|
|
||||||
|
private FetchResult fetchFeedData(FeedDefinition feed,
|
||||||
|
HttpClient client,
|
||||||
|
ExecutorService executorService,
|
||||||
|
@Nullable String ifModifiedSinceDate,
|
||||||
|
@Nullable String ifNoneMatchTag)
|
||||||
|
{
|
||||||
try {
|
try {
|
||||||
URI uri = new URI(feed.feedUrl());
|
URI uri = new URI(feed.feedUrl());
|
||||||
|
|
||||||
HttpRequest getRequest = HttpRequest.newBuilder()
|
HttpRequest.Builder requestBuilder = HttpRequest.newBuilder()
|
||||||
.GET()
|
.GET()
|
||||||
.uri(uri)
|
.uri(uri)
|
||||||
.header("User-Agent", WmsaHome.getUserAgent().uaIdentifier())
|
.header("User-Agent", WmsaHome.getUserAgent().uaIdentifier())
|
||||||
|
.header("Accept-Encoding", "gzip")
|
||||||
.header("Accept", "text/*, */*;q=0.9")
|
.header("Accept", "text/*, */*;q=0.9")
|
||||||
.timeout(Duration.ofSeconds(15))
|
.timeout(Duration.ofSeconds(15))
|
||||||
.build();
|
;
|
||||||
|
|
||||||
|
// Set the If-Modified-Since or If-None-Match headers if we have them
|
||||||
|
// though since there are certain idiosyncrasies in server implementations,
|
||||||
|
// we avoid setting both at the same time as that may turn a 304 into a 200.
|
||||||
|
if (ifNoneMatchTag != null) {
|
||||||
|
requestBuilder.header("If-None-Match", ifNoneMatchTag);
|
||||||
|
} else if (ifModifiedSinceDate != null) {
|
||||||
|
requestBuilder.header("If-Modified-Since", ifModifiedSinceDate);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
HttpRequest getRequest = requestBuilder.build();
|
||||||
|
|
||||||
for (int i = 0; i < 3; i++) {
|
for (int i = 0; i < 3; i++) {
|
||||||
var rs = client.send(getRequest, HttpResponse.BodyHandlers.ofString());
|
|
||||||
if (429 == rs.statusCode()) {
|
/* Note we need to use an executor to time-limit the send() method in HttpClient, as
|
||||||
|
* its support for timeouts only applies to the time until response starts to be received,
|
||||||
|
* and does not catch the case when the server starts to send data but then hangs.
|
||||||
|
*/
|
||||||
|
HttpResponse<byte[]> rs = executorService.submit(
|
||||||
|
() -> client.send(getRequest, HttpResponse.BodyHandlers.ofByteArray()))
|
||||||
|
.get(15, TimeUnit.SECONDS);
|
||||||
|
|
||||||
|
if (rs.statusCode() == 429) { // Too Many Requests
|
||||||
int retryAfter = Integer.parseInt(rs.headers().firstValue("Retry-After").orElse("2"));
|
int retryAfter = Integer.parseInt(rs.headers().firstValue("Retry-After").orElse("2"));
|
||||||
Thread.sleep(Duration.ofSeconds(Math.clamp(retryAfter, 1, 5)));
|
Thread.sleep(Duration.ofSeconds(Math.clamp(retryAfter, 1, 5)));
|
||||||
} else if (200 == rs.statusCode()) {
|
continue;
|
||||||
return new FetchResult.Success(rs.body());
|
|
||||||
} else if (404 == rs.statusCode()) {
|
|
||||||
return new FetchResult.PermanentError(); // never try again
|
|
||||||
} else {
|
|
||||||
return new FetchResult.TransientError(); // we try again in a few days
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
String newEtagValue = rs.headers().firstValue("ETag").orElse("");
|
||||||
|
|
||||||
|
return switch (rs.statusCode()) {
|
||||||
|
case 200 -> {
|
||||||
|
byte[] responseData = getResponseData(rs);
|
||||||
|
|
||||||
|
String contentType = rs.headers().firstValue("Content-Type").orElse("");
|
||||||
|
String bodyText = DocumentBodyToString.getStringData(ContentType.parse(contentType), responseData);
|
||||||
|
|
||||||
|
yield new FetchResult.Success(bodyText, newEtagValue);
|
||||||
|
}
|
||||||
|
case 304 -> new FetchResult.NotModified(); // via If-Modified-Since semantics
|
||||||
|
case 404 -> new FetchResult.PermanentError(); // never try again
|
||||||
|
default -> new FetchResult.TransientError(); // we try again later
|
||||||
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
catch (Exception ex) {
|
catch (Exception ex) {
|
||||||
@@ -245,8 +281,22 @@ public class FeedFetcherService {
|
|||||||
return new FetchResult.TransientError();
|
return new FetchResult.TransientError();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private byte[] getResponseData(HttpResponse<byte[]> response) throws IOException {
|
||||||
|
String encoding = response.headers().firstValue("Content-Encoding").orElse("");
|
||||||
|
|
||||||
|
if ("gzip".equals(encoding)) {
|
||||||
|
try (var stream = new GZIPInputStream(new ByteArrayInputStream(response.body()))) {
|
||||||
|
return stream.readAllBytes();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
return response.body();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public sealed interface FetchResult {
|
public sealed interface FetchResult {
|
||||||
record Success(String value) implements FetchResult {}
|
record Success(String value, String etag) implements FetchResult {}
|
||||||
|
record NotModified() implements FetchResult {}
|
||||||
record TransientError() implements FetchResult {}
|
record TransientError() implements FetchResult {}
|
||||||
record PermanentError() implements FetchResult {}
|
record PermanentError() implements FetchResult {}
|
||||||
}
|
}
|
||||||
@@ -316,12 +366,7 @@ public class FeedFetcherService {
|
|||||||
|
|
||||||
public FeedItems parseFeed(String feedData, FeedDefinition definition) {
|
public FeedItems parseFeed(String feedData, FeedDefinition definition) {
|
||||||
try {
|
try {
|
||||||
feedData = sanitizeEntities(feedData);
|
List<SimpleFeedParser.ItemData> rawItems = SimpleFeedParser.parse(feedData);
|
||||||
|
|
||||||
List<Item> rawItems = rssReader.read(
|
|
||||||
// Massage the data to maximize the possibility of the flaky XML parser consuming it
|
|
||||||
new BOMInputStream(new ByteArrayInputStream(feedData.trim().getBytes(StandardCharsets.UTF_8)), false)
|
|
||||||
).toList();
|
|
||||||
|
|
||||||
boolean keepUriFragment = rawItems.size() < 2 || areFragmentsDisparate(rawItems);
|
boolean keepUriFragment = rawItems.size() < 2 || areFragmentsDisparate(rawItems);
|
||||||
|
|
||||||
@@ -344,32 +389,6 @@ public class FeedFetcherService {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static final Map<String, String> HTML_ENTITIES = Map.of(
|
|
||||||
"»", "»",
|
|
||||||
"«", "«",
|
|
||||||
"—", "--",
|
|
||||||
"–", "-",
|
|
||||||
"’", "'",
|
|
||||||
"‘", "'",
|
|
||||||
" ", ""
|
|
||||||
);
|
|
||||||
|
|
||||||
/** The XML parser will blow up if you insert HTML entities in the feed XML,
|
|
||||||
* which is unfortunately relatively common. Replace them as far as is possible
|
|
||||||
* with their corresponding characters
|
|
||||||
*/
|
|
||||||
static String sanitizeEntities(String feedData) {
|
|
||||||
String result = feedData;
|
|
||||||
for (Map.Entry<String, String> entry : HTML_ENTITIES.entrySet()) {
|
|
||||||
result = result.replace(entry.getKey(), entry.getValue());
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle lone ampersands not part of a recognized XML entity
|
|
||||||
result = result.replaceAll("&(?!(amp|lt|gt|apos|quot);)", "&");
|
|
||||||
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
/** Decide whether to keep URI fragments in the feed items.
|
/** Decide whether to keep URI fragments in the feed items.
|
||||||
* <p></p>
|
* <p></p>
|
||||||
* We keep fragments if there are multiple different fragments in the items.
|
* We keep fragments if there are multiple different fragments in the items.
|
||||||
@@ -377,16 +396,16 @@ public class FeedFetcherService {
|
|||||||
* @param items The items to check
|
* @param items The items to check
|
||||||
* @return True if we should keep the fragments, false otherwise
|
* @return True if we should keep the fragments, false otherwise
|
||||||
*/
|
*/
|
||||||
private boolean areFragmentsDisparate(List<Item> items) {
|
private boolean areFragmentsDisparate(List<SimpleFeedParser.ItemData> items) {
|
||||||
Set<String> seenFragments = new HashSet<>();
|
Set<String> seenFragments = new HashSet<>();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
for (var item : items) {
|
for (var item : items) {
|
||||||
if (item.getLink().isEmpty()) {
|
if (item.url().isBlank()) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
var link = item.getLink().get();
|
var link = item.url();
|
||||||
if (!link.contains("#")) {
|
if (!link.contains("#")) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
@@ -0,0 +1,76 @@
|
|||||||
|
package nu.marginalia.rss.svc;
|
||||||
|
|
||||||
|
import nu.marginalia.WmsaHome;
|
||||||
|
import nu.marginalia.slop.SlopTable;
|
||||||
|
import nu.marginalia.slop.column.string.StringColumn;
|
||||||
|
import nu.marginalia.slop.desc.StorageType;
|
||||||
|
import org.apache.commons.io.FileUtils;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.nio.charset.StandardCharsets;
|
||||||
|
import java.nio.file.Files;
|
||||||
|
import java.nio.file.Path;
|
||||||
|
import java.util.function.BiConsumer;
|
||||||
|
|
||||||
|
/** Utility for recording fetched feeds to a journal, useful in debugging feed parser issues.
|
||||||
|
*/
|
||||||
|
public interface FeedJournal extends AutoCloseable {
|
||||||
|
StringColumn urlColumn = new StringColumn("url");
|
||||||
|
StringColumn contentsColumn = new StringColumn("contents", StandardCharsets.UTF_8, StorageType.ZSTD);
|
||||||
|
|
||||||
|
void record(String url, String contents) throws IOException;
|
||||||
|
void close() throws IOException;
|
||||||
|
|
||||||
|
|
||||||
|
static FeedJournal create() throws IOException {
|
||||||
|
if (Boolean.getBoolean("feedFetcher.persistJournal")) {
|
||||||
|
Path journalPath = WmsaHome.getDataPath().resolve("feed-journal");
|
||||||
|
if (Files.isDirectory(journalPath)) {
|
||||||
|
FileUtils.deleteDirectory(journalPath.toFile());
|
||||||
|
}
|
||||||
|
Files.createDirectories(journalPath);
|
||||||
|
return new RecordingFeedJournal(journalPath);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
return new NoOpFeedJournal();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class NoOpFeedJournal implements FeedJournal {
|
||||||
|
@Override
|
||||||
|
public void record(String url, String contents) {}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void close() {}
|
||||||
|
}
|
||||||
|
|
||||||
|
class RecordingFeedJournal extends SlopTable implements FeedJournal {
|
||||||
|
|
||||||
|
private final StringColumn.Writer urlWriter;
|
||||||
|
private final StringColumn.Writer contentsWriter;
|
||||||
|
|
||||||
|
public RecordingFeedJournal(Path path) throws IOException {
|
||||||
|
super(path, SlopTable.getNumPages(path, FeedJournal.urlColumn));
|
||||||
|
|
||||||
|
urlWriter = urlColumn.create(this);
|
||||||
|
contentsWriter = contentsColumn.create(this);
|
||||||
|
}
|
||||||
|
|
||||||
|
public synchronized void record(String url, String contents) throws IOException {
|
||||||
|
urlWriter.put(url);
|
||||||
|
contentsWriter.put(contents);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static void replay(Path journalPath, BiConsumer<String, String> urlAndContent) throws IOException {
|
||||||
|
try (SlopTable table = new SlopTable(journalPath)) {
|
||||||
|
final StringColumn.Reader urlReader = urlColumn.open(table);
|
||||||
|
final StringColumn.Reader contentsReader = contentsColumn.open(table);
|
||||||
|
|
||||||
|
while (urlReader.hasRemaining()) {
|
||||||
|
urlAndContent.accept(urlReader.get(), contentsReader.get());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
@@ -0,0 +1,102 @@
|
|||||||
|
package nu.marginalia.rss.svc;
|
||||||
|
|
||||||
|
import com.apptasticsoftware.rssreader.DateTimeParser;
|
||||||
|
import com.apptasticsoftware.rssreader.util.Default;
|
||||||
|
import org.jsoup.Jsoup;
|
||||||
|
import org.jsoup.parser.Parser;
|
||||||
|
|
||||||
|
import java.time.ZonedDateTime;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
|
public class SimpleFeedParser {
|
||||||
|
|
||||||
|
private static final DateTimeParser dateTimeParser = Default.getDateTimeParser();
|
||||||
|
|
||||||
|
public record ItemData (
|
||||||
|
String title,
|
||||||
|
String description,
|
||||||
|
String url,
|
||||||
|
String pubDate
|
||||||
|
) {
|
||||||
|
public boolean isWellFormed() {
|
||||||
|
return title != null && !title.isBlank() &&
|
||||||
|
description != null && !description.isBlank() &&
|
||||||
|
url != null && !url.isBlank() &&
|
||||||
|
pubDate != null && !pubDate.isBlank();
|
||||||
|
}
|
||||||
|
|
||||||
|
public Optional<ZonedDateTime> getPubDateZonedDateTime() {
|
||||||
|
try {
|
||||||
|
return Optional.ofNullable(dateTimeParser.parse(pubDate()));
|
||||||
|
}
|
||||||
|
catch (Exception e) {
|
||||||
|
return Optional.empty();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public static List<ItemData> parse(String content) {
|
||||||
|
var doc = Jsoup.parse(content, Parser.xmlParser());
|
||||||
|
List<ItemData> ret = new ArrayList<>();
|
||||||
|
|
||||||
|
doc.select("item, entry").forEach(element -> {
|
||||||
|
String link = "";
|
||||||
|
String title = "";
|
||||||
|
String description = "";
|
||||||
|
String pubDate = "";
|
||||||
|
|
||||||
|
for (String attr : List.of("title", "dc:title")) {
|
||||||
|
if (!title.isBlank())
|
||||||
|
break;
|
||||||
|
var tag = element.getElementsByTag(attr).first();
|
||||||
|
if (tag != null) {
|
||||||
|
title = tag.text();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (String attr : List.of("title", "summary", "content", "description", "dc:description")) {
|
||||||
|
if (!description.isBlank())
|
||||||
|
break;
|
||||||
|
var tag = element.getElementsByTag(attr).first();
|
||||||
|
if (tag != null) {
|
||||||
|
description = tag.text();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (String attr : List.of("pubDate", "published", "updated", "issued", "created", "dc:date")) {
|
||||||
|
if (!pubDate.isBlank())
|
||||||
|
break;
|
||||||
|
var tag = element.getElementsByTag(attr).first();
|
||||||
|
if (tag != null) {
|
||||||
|
pubDate = tag.text();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (String attr : List.of("link", "url")) {
|
||||||
|
if (!link.isBlank())
|
||||||
|
break;
|
||||||
|
var tag = element.getElementsByTag(attr).first();
|
||||||
|
|
||||||
|
if (tag != null) {
|
||||||
|
String linkText = tag.text();
|
||||||
|
|
||||||
|
if (linkText.isBlank()) {
|
||||||
|
linkText = tag.attr("href");
|
||||||
|
}
|
||||||
|
|
||||||
|
link = linkText;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
ret.add(new ItemData(title, description, link, pubDate));
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
27
code/functions/live-capture/test-resources/nlnet.atom
Normal file
27
code/functions/live-capture/test-resources/nlnet.atom
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
<feed xmlns="http://www.w3.org/2005/Atom" xml:base="https://nlnet.nl">
|
||||||
|
<title type="text">NLnet news</title>
|
||||||
|
<updated>2025-01-01T00:00:00Z</updated>
|
||||||
|
<id>https://nlnet.nl/feed.atom</id>
|
||||||
|
<link rel="self" type="application/atom+xml" href="https://nlnet.nl/feed.atom"/>
|
||||||
|
<entry>
|
||||||
|
<id>https://nlnet.nl/news/2025/20250101-announcing-grantees-June-call.html</id>
|
||||||
|
<author>
|
||||||
|
<name>NLnet</name>
|
||||||
|
</author>
|
||||||
|
<title type="xhtml">
|
||||||
|
<div xmlns="http://www.w3.org/1999/xhtml">50 Free and Open Source Projects Selected for NGI Zero grants</div>
|
||||||
|
</title>
|
||||||
|
<link href="/news/2025/20250101-announcing-grantees-June-call.html"/>
|
||||||
|
<updated>2025-01-01T00:00:00Z</updated>
|
||||||
|
<content type="xhtml">
|
||||||
|
<div xmlns="http://www.w3.org/1999/xhtml">
|
||||||
|
<p class="paralead">Happy 2025 everyone! On this first day of the fresh new year we are happy to announce 50 project teams were selected to receive NGI Zero grants. We are welcoming projects from 18 countries involving people and organisations of various types: individuals, associations, small and medium enterprises, foundations, universities, and informal collectives. The new projects are all across the different layers of the NGI technology stack: from trustworthy open hardware to services & applications which provide autonomy for end-users.</p>
|
||||||
|
<p>The 50 free and open source projects were selected across two funds. 19 teams will receive grants from the <a href="/commonsfund/">NGI Zero Commons Fund</a>, a broadly themed fund that supports people working on reclaiming the public nature of the internet. The other 31 projects will work within <a href="/core/">NGI Zero Core</a> which focuses on strengthening the open internet architecture. Both funds offer financial and practical support. The latter consisting of <a href="/NGI0/services/">support services</a> such as accessibility and security audits, advice on license compliance, help with testing, documentation or UX design.</p>
|
||||||
|
<h2>If you applied for a grant</h2>
|
||||||
|
<p>This is the selection for the <a href="https://nlnet.nl/news/2024/20240401-call.html">June call</a>. We always inform <em>all</em> applicants about the outcome of the review ahead of the public announcement, if the are selected or not. If you have not heard anything, you probably applied to a later call that is still under review. You can see which call you applied to by checking the application number assigned to the project when you applied. The second number in the sequence refers to the month of the call, so 06 in the case of the June call. (It should not happen, but if you did apply to the June call and did not hear anything, do contact us.)</p>
|
||||||
|
<h2>Meet the new projects!</h2>
|
||||||
|
</div>
|
||||||
|
</content>
|
||||||
|
</entry>
|
||||||
|
|
||||||
|
</feed>
|
@@ -1,36 +1,97 @@
|
|||||||
package nu.marginalia.livecapture;
|
package nu.marginalia.livecapture;
|
||||||
|
|
||||||
|
import com.github.tomakehurst.wiremock.WireMockServer;
|
||||||
|
import com.github.tomakehurst.wiremock.core.WireMockConfiguration;
|
||||||
|
import nu.marginalia.WmsaHome;
|
||||||
|
import nu.marginalia.service.module.ServiceConfigurationModule;
|
||||||
import org.junit.jupiter.api.Assertions;
|
import org.junit.jupiter.api.Assertions;
|
||||||
import org.junit.jupiter.api.BeforeAll;
|
import org.junit.jupiter.api.BeforeAll;
|
||||||
|
import org.junit.jupiter.api.Tag;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
import org.testcontainers.containers.GenericContainer;
|
import org.testcontainers.containers.GenericContainer;
|
||||||
import org.testcontainers.junit.jupiter.Testcontainers;
|
import org.testcontainers.junit.jupiter.Testcontainers;
|
||||||
import org.testcontainers.utility.DockerImageName;
|
import org.testcontainers.utility.DockerImageName;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
import java.net.URI;
|
import java.net.URI;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import static com.github.tomakehurst.wiremock.client.WireMock.*;
|
||||||
|
|
||||||
|
|
||||||
@Testcontainers
|
@Testcontainers
|
||||||
|
@Tag("slow")
|
||||||
public class BrowserlessClientTest {
|
public class BrowserlessClientTest {
|
||||||
static GenericContainer<?> container = new GenericContainer<>(DockerImageName.parse("browserless/chrome")).withExposedPorts(3000);
|
static GenericContainer<?> container = new GenericContainer<>(DockerImageName.parse("browserless/chrome"))
|
||||||
|
.withEnv(Map.of("TOKEN", "BROWSERLESS_TOKEN"))
|
||||||
|
.withNetworkMode("bridge")
|
||||||
|
.withExposedPorts(3000);
|
||||||
|
|
||||||
|
static WireMockServer wireMockServer =
|
||||||
|
new WireMockServer(WireMockConfiguration.wireMockConfig()
|
||||||
|
.port(18089));
|
||||||
|
|
||||||
|
static String localIp;
|
||||||
|
|
||||||
|
static URI browserlessURI;
|
||||||
|
|
||||||
@BeforeAll
|
@BeforeAll
|
||||||
public static void setup() {
|
public static void setup() throws IOException {
|
||||||
container.start();
|
container.start();
|
||||||
|
|
||||||
|
browserlessURI = URI.create(String.format("http://%s:%d/",
|
||||||
|
container.getHost(),
|
||||||
|
container.getMappedPort(3000))
|
||||||
|
);
|
||||||
|
|
||||||
|
wireMockServer.start();
|
||||||
|
wireMockServer.stubFor(get("/").willReturn(aResponse().withStatus(200).withBody("Ok")));
|
||||||
|
|
||||||
|
localIp = ServiceConfigurationModule.getLocalNetworkIP();
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Tag("flaky")
|
||||||
|
@Test
|
||||||
|
public void testInspectContentUA__Flaky() throws Exception {
|
||||||
|
try (var client = new BrowserlessClient(browserlessURI)) {
|
||||||
|
client.content("http://" + localIp + ":18089/",
|
||||||
|
BrowserlessClient.GotoOptions.defaultValues()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
wireMockServer.verify(getRequestedFor(urlEqualTo("/")).withHeader("User-Agent", equalTo(WmsaHome.getUserAgent().uaString())));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Tag("flaky")
|
||||||
|
@Test
|
||||||
|
public void testInspectScreenshotUA__Flaky() throws Exception {
|
||||||
|
try (var client = new BrowserlessClient(browserlessURI)) {
|
||||||
|
client.screenshot("http://" + localIp + ":18089/",
|
||||||
|
BrowserlessClient.GotoOptions.defaultValues(),
|
||||||
|
BrowserlessClient.ScreenshotOptions.defaultValues()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
wireMockServer.verify(getRequestedFor(urlEqualTo("/")).withHeader("User-Agent", equalTo(WmsaHome.getUserAgent().uaString())));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testContent() throws Exception {
|
public void testContent() throws Exception {
|
||||||
try (var client = new BrowserlessClient(URI.create("http://" + container.getHost() + ":" + container.getMappedPort(3000)))) {
|
try (var client = new BrowserlessClient(browserlessURI)) {
|
||||||
var content = client.content("https://www.marginalia.nu/", BrowserlessClient.GotoOptions.defaultValues());
|
var content = client.content("https://www.marginalia.nu/", BrowserlessClient.GotoOptions.defaultValues()).orElseThrow();
|
||||||
Assertions.assertNotNull(content, "Content should not be null");
|
|
||||||
Assertions.assertFalse(content.isBlank(), "Content should not be empty");
|
Assertions.assertFalse(content.isBlank(), "Content should not be empty");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testScreenshot() throws Exception {
|
public void testScreenshot() throws Exception {
|
||||||
try (var client = new BrowserlessClient(URI.create("http://" + container.getHost() + ":" + container.getMappedPort(3000)))) {
|
try (var client = new BrowserlessClient(browserlessURI)) {
|
||||||
var screenshot = client.screenshot("https://www.marginalia.nu/", BrowserlessClient.GotoOptions.defaultValues(), BrowserlessClient.ScreenshotOptions.defaultValues());
|
var screenshot = client.screenshot("https://www.marginalia.nu/",
|
||||||
|
BrowserlessClient.GotoOptions.defaultValues(),
|
||||||
|
BrowserlessClient.ScreenshotOptions.defaultValues());
|
||||||
|
|
||||||
Assertions.assertNotNull(screenshot, "Screenshot should not be null");
|
Assertions.assertNotNull(screenshot, "Screenshot should not be null");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -96,7 +96,6 @@ class FeedFetcherServiceTest extends AbstractModule {
|
|||||||
feedDb.switchDb(writer);
|
feedDb.switchDb(writer);
|
||||||
}
|
}
|
||||||
|
|
||||||
feedFetcherService.setDeterministic();
|
|
||||||
feedFetcherService.updateFeeds(FeedFetcherService.UpdateMode.REFRESH);
|
feedFetcherService.updateFeeds(FeedFetcherService.UpdateMode.REFRESH);
|
||||||
|
|
||||||
var result = feedDb.getFeed(new EdgeDomain("www.marginalia.nu"));
|
var result = feedDb.getFeed(new EdgeDomain("www.marginalia.nu"));
|
||||||
@@ -104,6 +103,26 @@ class FeedFetcherServiceTest extends AbstractModule {
|
|||||||
Assertions.assertFalse(result.isEmpty());
|
Assertions.assertFalse(result.isEmpty());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Tag("flaky")
|
||||||
|
@Test
|
||||||
|
public void testFetchRepeatedly() throws Exception {
|
||||||
|
try (var writer = feedDb.createWriter()) {
|
||||||
|
writer.saveFeed(new FeedItems("www.marginalia.nu", "https://www.marginalia.nu/log/index.xml", "", List.of()));
|
||||||
|
feedDb.switchDb(writer);
|
||||||
|
}
|
||||||
|
|
||||||
|
feedFetcherService.updateFeeds(FeedFetcherService.UpdateMode.REFRESH);
|
||||||
|
Assertions.assertNotNull(feedDb.getEtag(new EdgeDomain("www.marginalia.nu")));
|
||||||
|
feedFetcherService.updateFeeds(FeedFetcherService.UpdateMode.REFRESH);
|
||||||
|
Assertions.assertNotNull(feedDb.getEtag(new EdgeDomain("www.marginalia.nu")));
|
||||||
|
feedFetcherService.updateFeeds(FeedFetcherService.UpdateMode.REFRESH);
|
||||||
|
Assertions.assertNotNull(feedDb.getEtag(new EdgeDomain("www.marginalia.nu")));
|
||||||
|
|
||||||
|
var result = feedDb.getFeed(new EdgeDomain("www.marginalia.nu"));
|
||||||
|
System.out.println(result);
|
||||||
|
Assertions.assertFalse(result.isEmpty());
|
||||||
|
}
|
||||||
|
|
||||||
@Tag("flaky")
|
@Tag("flaky")
|
||||||
@Test
|
@Test
|
||||||
public void test404() throws Exception {
|
public void test404() throws Exception {
|
||||||
@@ -112,7 +131,6 @@ class FeedFetcherServiceTest extends AbstractModule {
|
|||||||
feedDb.switchDb(writer);
|
feedDb.switchDb(writer);
|
||||||
}
|
}
|
||||||
|
|
||||||
feedFetcherService.setDeterministic();
|
|
||||||
feedFetcherService.updateFeeds(FeedFetcherService.UpdateMode.REFRESH);
|
feedFetcherService.updateFeeds(FeedFetcherService.UpdateMode.REFRESH);
|
||||||
|
|
||||||
// We forget the feed on a 404 error
|
// We forget the feed on a 404 error
|
||||||
|
@@ -1,26 +0,0 @@
|
|||||||
package nu.marginalia.rss.svc;
|
|
||||||
|
|
||||||
import org.junit.jupiter.api.Assertions;
|
|
||||||
import org.junit.jupiter.api.Test;
|
|
||||||
|
|
||||||
public class TestXmlSanitization {
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testPreservedEntities() {
|
|
||||||
Assertions.assertEquals("&", FeedFetcherService.sanitizeEntities("&"));
|
|
||||||
Assertions.assertEquals("<", FeedFetcherService.sanitizeEntities("<"));
|
|
||||||
Assertions.assertEquals(">", FeedFetcherService.sanitizeEntities(">"));
|
|
||||||
Assertions.assertEquals(""", FeedFetcherService.sanitizeEntities("""));
|
|
||||||
Assertions.assertEquals("'", FeedFetcherService.sanitizeEntities("'"));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testStrayAmpersand() {
|
|
||||||
Assertions.assertEquals("Bed & Breakfast", FeedFetcherService.sanitizeEntities("Bed & Breakfast"));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testTranslatedHtmlEntity() {
|
|
||||||
Assertions.assertEquals("Foo -- Bar", FeedFetcherService.sanitizeEntities("Foo — Bar"));
|
|
||||||
}
|
|
||||||
}
|
|
@@ -7,4 +7,8 @@ public record DictionaryResponse(String word, List<DictionaryEntry> entries) {
|
|||||||
this.word = word;
|
this.word = word;
|
||||||
this.entries = entries.stream().toList(); // Make an immutable copy
|
this.entries = entries.stream().toList(); // Make an immutable copy
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public boolean hasEntries() {
|
||||||
|
return !entries.isEmpty();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@@ -2,9 +2,6 @@ package nu.marginalia.api.searchquery;
|
|||||||
|
|
||||||
import nu.marginalia.api.searchquery.model.query.SearchPhraseConstraint;
|
import nu.marginalia.api.searchquery.model.query.SearchPhraseConstraint;
|
||||||
import nu.marginalia.api.searchquery.model.query.SearchQuery;
|
import nu.marginalia.api.searchquery.model.query.SearchQuery;
|
||||||
import nu.marginalia.api.searchquery.model.results.Bm25Parameters;
|
|
||||||
import nu.marginalia.api.searchquery.model.results.ResultRankingParameters;
|
|
||||||
import nu.marginalia.index.query.limit.QueryLimits;
|
|
||||||
import nu.marginalia.index.query.limit.SpecificationLimit;
|
import nu.marginalia.index.query.limit.SpecificationLimit;
|
||||||
import nu.marginalia.index.query.limit.SpecificationLimitType;
|
import nu.marginalia.index.query.limit.SpecificationLimitType;
|
||||||
|
|
||||||
@@ -27,37 +24,19 @@ public class IndexProtobufCodec {
|
|||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
|
|
||||||
public static QueryLimits convertQueryLimits(RpcQueryLimits queryLimits) {
|
|
||||||
return new QueryLimits(
|
|
||||||
queryLimits.getResultsByDomain(),
|
|
||||||
queryLimits.getResultsTotal(),
|
|
||||||
queryLimits.getTimeoutMs(),
|
|
||||||
queryLimits.getFetchSize()
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static RpcQueryLimits convertQueryLimits(QueryLimits queryLimits) {
|
|
||||||
return RpcQueryLimits.newBuilder()
|
|
||||||
.setResultsByDomain(queryLimits.resultsByDomain())
|
|
||||||
.setResultsTotal(queryLimits.resultsTotal())
|
|
||||||
.setTimeoutMs(queryLimits.timeoutMs())
|
|
||||||
.setFetchSize(queryLimits.fetchSize())
|
|
||||||
.build();
|
|
||||||
}
|
|
||||||
|
|
||||||
public static SearchQuery convertRpcQuery(RpcQuery query) {
|
public static SearchQuery convertRpcQuery(RpcQuery query) {
|
||||||
List<SearchPhraseConstraint> phraeConstraints = new ArrayList<>();
|
List<SearchPhraseConstraint> phraseConstraints = new ArrayList<>();
|
||||||
|
|
||||||
for (int j = 0; j < query.getPhrasesCount(); j++) {
|
for (int j = 0; j < query.getPhrasesCount(); j++) {
|
||||||
var coh = query.getPhrases(j);
|
var coh = query.getPhrases(j);
|
||||||
if (coh.getType() == RpcPhrases.TYPE.OPTIONAL) {
|
if (coh.getType() == RpcPhrases.TYPE.OPTIONAL) {
|
||||||
phraeConstraints.add(new SearchPhraseConstraint.Optional(List.copyOf(coh.getTermsList())));
|
phraseConstraints.add(new SearchPhraseConstraint.Optional(List.copyOf(coh.getTermsList())));
|
||||||
}
|
}
|
||||||
else if (coh.getType() == RpcPhrases.TYPE.MANDATORY) {
|
else if (coh.getType() == RpcPhrases.TYPE.MANDATORY) {
|
||||||
phraeConstraints.add(new SearchPhraseConstraint.Mandatory(List.copyOf(coh.getTermsList())));
|
phraseConstraints.add(new SearchPhraseConstraint.Mandatory(List.copyOf(coh.getTermsList())));
|
||||||
}
|
}
|
||||||
else if (coh.getType() == RpcPhrases.TYPE.FULL) {
|
else if (coh.getType() == RpcPhrases.TYPE.FULL) {
|
||||||
phraeConstraints.add(new SearchPhraseConstraint.Full(List.copyOf(coh.getTermsList())));
|
phraseConstraints.add(new SearchPhraseConstraint.Full(List.copyOf(coh.getTermsList())));
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
throw new IllegalArgumentException("Unknown phrase constraint type: " + coh.getType());
|
throw new IllegalArgumentException("Unknown phrase constraint type: " + coh.getType());
|
||||||
@@ -70,7 +49,7 @@ public class IndexProtobufCodec {
|
|||||||
query.getExcludeList(),
|
query.getExcludeList(),
|
||||||
query.getAdviceList(),
|
query.getAdviceList(),
|
||||||
query.getPriorityList(),
|
query.getPriorityList(),
|
||||||
phraeConstraints
|
phraseConstraints
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -103,60 +82,4 @@ public class IndexProtobufCodec {
|
|||||||
return subqueryBuilder.build();
|
return subqueryBuilder.build();
|
||||||
}
|
}
|
||||||
|
|
||||||
public static ResultRankingParameters convertRankingParameterss(RpcResultRankingParameters params) {
|
|
||||||
if (params == null)
|
|
||||||
return ResultRankingParameters.sensibleDefaults();
|
|
||||||
|
|
||||||
return new ResultRankingParameters(
|
|
||||||
new Bm25Parameters(params.getBm25K(), params.getBm25B()),
|
|
||||||
params.getShortDocumentThreshold(),
|
|
||||||
params.getShortDocumentPenalty(),
|
|
||||||
params.getDomainRankBonus(),
|
|
||||||
params.getQualityPenalty(),
|
|
||||||
params.getShortSentenceThreshold(),
|
|
||||||
params.getShortSentencePenalty(),
|
|
||||||
params.getBm25Weight(),
|
|
||||||
params.getTcfFirstPositionWeight(),
|
|
||||||
params.getTcfVerbatimWeight(),
|
|
||||||
params.getTcfProximityWeight(),
|
|
||||||
ResultRankingParameters.TemporalBias.valueOf(params.getTemporalBias().getBias().name()),
|
|
||||||
params.getTemporalBiasWeight(),
|
|
||||||
params.getExportDebugData()
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static RpcResultRankingParameters convertRankingParameterss(ResultRankingParameters rankingParams,
|
|
||||||
RpcTemporalBias temporalBias)
|
|
||||||
{
|
|
||||||
if (rankingParams == null) {
|
|
||||||
rankingParams = ResultRankingParameters.sensibleDefaults();
|
|
||||||
}
|
|
||||||
|
|
||||||
var builder = RpcResultRankingParameters.newBuilder()
|
|
||||||
.setBm25B(rankingParams.bm25Params.b())
|
|
||||||
.setBm25K(rankingParams.bm25Params.k())
|
|
||||||
.setShortDocumentThreshold(rankingParams.shortDocumentThreshold)
|
|
||||||
.setShortDocumentPenalty(rankingParams.shortDocumentPenalty)
|
|
||||||
.setDomainRankBonus(rankingParams.domainRankBonus)
|
|
||||||
.setQualityPenalty(rankingParams.qualityPenalty)
|
|
||||||
.setShortSentenceThreshold(rankingParams.shortSentenceThreshold)
|
|
||||||
.setShortSentencePenalty(rankingParams.shortSentencePenalty)
|
|
||||||
.setBm25Weight(rankingParams.bm25Weight)
|
|
||||||
.setTcfFirstPositionWeight(rankingParams.tcfFirstPosition)
|
|
||||||
.setTcfProximityWeight(rankingParams.tcfProximity)
|
|
||||||
.setTcfVerbatimWeight(rankingParams.tcfVerbatim)
|
|
||||||
.setTemporalBiasWeight(rankingParams.temporalBiasWeight)
|
|
||||||
.setExportDebugData(rankingParams.exportDebugData);
|
|
||||||
|
|
||||||
if (temporalBias != null && temporalBias.getBias() != RpcTemporalBias.Bias.NONE) {
|
|
||||||
builder.setTemporalBias(temporalBias);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
builder.setTemporalBias(RpcTemporalBias.newBuilder()
|
|
||||||
.setBias(RpcTemporalBias.Bias.valueOf(rankingParams.temporalBias.name())));
|
|
||||||
}
|
|
||||||
|
|
||||||
return builder.build();
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@@ -9,10 +9,9 @@ import nu.marginalia.service.client.GrpcChannelPoolFactory;
|
|||||||
import nu.marginalia.service.client.GrpcSingleNodeChannelPool;
|
import nu.marginalia.service.client.GrpcSingleNodeChannelPool;
|
||||||
import nu.marginalia.service.discovery.property.ServiceKey;
|
import nu.marginalia.service.discovery.property.ServiceKey;
|
||||||
import nu.marginalia.service.discovery.property.ServicePartition;
|
import nu.marginalia.service.discovery.property.ServicePartition;
|
||||||
import org.slf4j.Logger;
|
|
||||||
import org.slf4j.LoggerFactory;
|
|
||||||
|
|
||||||
import javax.annotation.CheckReturnValue;
|
import javax.annotation.CheckReturnValue;
|
||||||
|
import java.time.Duration;
|
||||||
|
|
||||||
@Singleton
|
@Singleton
|
||||||
public class QueryClient {
|
public class QueryClient {
|
||||||
@@ -24,13 +23,14 @@ public class QueryClient {
|
|||||||
|
|
||||||
private final GrpcSingleNodeChannelPool<QueryApiGrpc.QueryApiBlockingStub> queryApiPool;
|
private final GrpcSingleNodeChannelPool<QueryApiGrpc.QueryApiBlockingStub> queryApiPool;
|
||||||
|
|
||||||
private final Logger logger = LoggerFactory.getLogger(getClass());
|
|
||||||
|
|
||||||
@Inject
|
@Inject
|
||||||
public QueryClient(GrpcChannelPoolFactory channelPoolFactory) {
|
public QueryClient(GrpcChannelPoolFactory channelPoolFactory) throws InterruptedException {
|
||||||
this.queryApiPool = channelPoolFactory.createSingle(
|
this.queryApiPool = channelPoolFactory.createSingle(
|
||||||
ServiceKey.forGrpcApi(QueryApiGrpc.class, ServicePartition.any()),
|
ServiceKey.forGrpcApi(QueryApiGrpc.class, ServicePartition.any()),
|
||||||
QueryApiGrpc::newBlockingStub);
|
QueryApiGrpc::newBlockingStub);
|
||||||
|
|
||||||
|
// Hold up initialization until we have a downstream connection
|
||||||
|
this.queryApiPool.awaitChannel(Duration.ofSeconds(5));
|
||||||
}
|
}
|
||||||
|
|
||||||
@CheckReturnValue
|
@CheckReturnValue
|
||||||
|
@@ -5,7 +5,7 @@ import nu.marginalia.api.searchquery.model.query.QueryParams;
|
|||||||
import nu.marginalia.api.searchquery.model.query.QueryResponse;
|
import nu.marginalia.api.searchquery.model.query.QueryResponse;
|
||||||
import nu.marginalia.api.searchquery.model.query.SearchSpecification;
|
import nu.marginalia.api.searchquery.model.query.SearchSpecification;
|
||||||
import nu.marginalia.api.searchquery.model.results.DecoratedSearchResultItem;
|
import nu.marginalia.api.searchquery.model.results.DecoratedSearchResultItem;
|
||||||
import nu.marginalia.api.searchquery.model.results.ResultRankingParameters;
|
import nu.marginalia.api.searchquery.model.results.PrototypeRankingParameters;
|
||||||
import nu.marginalia.api.searchquery.model.results.SearchResultItem;
|
import nu.marginalia.api.searchquery.model.results.SearchResultItem;
|
||||||
import nu.marginalia.api.searchquery.model.results.SearchResultKeywordScore;
|
import nu.marginalia.api.searchquery.model.results.SearchResultKeywordScore;
|
||||||
import nu.marginalia.api.searchquery.model.results.debug.DebugFactor;
|
import nu.marginalia.api.searchquery.model.results.debug.DebugFactor;
|
||||||
@@ -37,7 +37,7 @@ public class QueryProtobufCodec {
|
|||||||
builder.setSize(IndexProtobufCodec.convertSpecLimit(query.specs.size));
|
builder.setSize(IndexProtobufCodec.convertSpecLimit(query.specs.size));
|
||||||
builder.setRank(IndexProtobufCodec.convertSpecLimit(query.specs.rank));
|
builder.setRank(IndexProtobufCodec.convertSpecLimit(query.specs.rank));
|
||||||
|
|
||||||
builder.setQueryLimits(IndexProtobufCodec.convertQueryLimits(query.specs.queryLimits));
|
builder.setQueryLimits(query.specs.queryLimits);
|
||||||
|
|
||||||
// Query strategy may be overridden by the query, but if not, use the one from the request
|
// Query strategy may be overridden by the query, but if not, use the one from the request
|
||||||
if (query.specs.queryStrategy != null && query.specs.queryStrategy != QueryStrategy.AUTO)
|
if (query.specs.queryStrategy != null && query.specs.queryStrategy != QueryStrategy.AUTO)
|
||||||
@@ -45,9 +45,27 @@ public class QueryProtobufCodec {
|
|||||||
else
|
else
|
||||||
builder.setQueryStrategy(request.getQueryStrategy());
|
builder.setQueryStrategy(request.getQueryStrategy());
|
||||||
|
|
||||||
if (query.specs.rankingParams != null) {
|
if (request.getTemporalBias().getBias() != RpcTemporalBias.Bias.NONE) {
|
||||||
builder.setParameters(IndexProtobufCodec.convertRankingParameterss(query.specs.rankingParams, request.getTemporalBias()));
|
if (query.specs.rankingParams != null) {
|
||||||
|
builder.setParameters(
|
||||||
|
RpcResultRankingParameters.newBuilder(query.specs.rankingParams)
|
||||||
|
.setTemporalBias(request.getTemporalBias())
|
||||||
|
.build()
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
builder.setParameters(
|
||||||
|
RpcResultRankingParameters.newBuilder(PrototypeRankingParameters.sensibleDefaults())
|
||||||
|
.setTemporalBias(request.getTemporalBias())
|
||||||
|
.build()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} else if (query.specs.rankingParams != null) {
|
||||||
|
builder.setParameters(query.specs.rankingParams);
|
||||||
}
|
}
|
||||||
|
// else {
|
||||||
|
// if we have no ranking params, we don't need to set them, the client check and use the default values
|
||||||
|
// so we don't need to send this huge object over the wire
|
||||||
|
// }
|
||||||
|
|
||||||
return builder.build();
|
return builder.build();
|
||||||
}
|
}
|
||||||
@@ -65,18 +83,13 @@ public class QueryProtobufCodec {
|
|||||||
builder.setSize(IndexProtobufCodec.convertSpecLimit(query.specs.size));
|
builder.setSize(IndexProtobufCodec.convertSpecLimit(query.specs.size));
|
||||||
builder.setRank(IndexProtobufCodec.convertSpecLimit(query.specs.rank));
|
builder.setRank(IndexProtobufCodec.convertSpecLimit(query.specs.rank));
|
||||||
|
|
||||||
builder.setQueryLimits(IndexProtobufCodec.convertQueryLimits(query.specs.queryLimits));
|
builder.setQueryLimits(query.specs.queryLimits);
|
||||||
|
|
||||||
// Query strategy may be overridden by the query, but if not, use the one from the request
|
// Query strategy may be overridden by the query, but if not, use the one from the request
|
||||||
builder.setQueryStrategy(query.specs.queryStrategy.name());
|
builder.setQueryStrategy(query.specs.queryStrategy.name());
|
||||||
|
|
||||||
if (query.specs.rankingParams != null) {
|
if (query.specs.rankingParams != null) {
|
||||||
builder.setParameters(IndexProtobufCodec.convertRankingParameterss(
|
builder.setParameters(query.specs.rankingParams);
|
||||||
query.specs.rankingParams,
|
|
||||||
RpcTemporalBias.newBuilder().setBias(
|
|
||||||
RpcTemporalBias.Bias.NONE)
|
|
||||||
.build())
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return builder.build();
|
return builder.build();
|
||||||
@@ -95,10 +108,10 @@ public class QueryProtobufCodec {
|
|||||||
IndexProtobufCodec.convertSpecLimit(request.getSize()),
|
IndexProtobufCodec.convertSpecLimit(request.getSize()),
|
||||||
IndexProtobufCodec.convertSpecLimit(request.getRank()),
|
IndexProtobufCodec.convertSpecLimit(request.getRank()),
|
||||||
request.getDomainIdsList(),
|
request.getDomainIdsList(),
|
||||||
IndexProtobufCodec.convertQueryLimits(request.getQueryLimits()),
|
request.getQueryLimits(),
|
||||||
request.getSearchSetIdentifier(),
|
request.getSearchSetIdentifier(),
|
||||||
QueryStrategy.valueOf(request.getQueryStrategy()),
|
QueryStrategy.valueOf(request.getQueryStrategy()),
|
||||||
ResultRankingParameters.TemporalBias.valueOf(request.getTemporalBias().getBias().name()),
|
RpcTemporalBias.Bias.valueOf(request.getTemporalBias().getBias().name()),
|
||||||
request.getPagination().getPage()
|
request.getPagination().getPage()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@@ -294,9 +307,9 @@ public class QueryProtobufCodec {
|
|||||||
IndexProtobufCodec.convertSpecLimit(specs.getYear()),
|
IndexProtobufCodec.convertSpecLimit(specs.getYear()),
|
||||||
IndexProtobufCodec.convertSpecLimit(specs.getSize()),
|
IndexProtobufCodec.convertSpecLimit(specs.getSize()),
|
||||||
IndexProtobufCodec.convertSpecLimit(specs.getRank()),
|
IndexProtobufCodec.convertSpecLimit(specs.getRank()),
|
||||||
IndexProtobufCodec.convertQueryLimits(specs.getQueryLimits()),
|
specs.getQueryLimits(),
|
||||||
QueryStrategy.valueOf(specs.getQueryStrategy()),
|
QueryStrategy.valueOf(specs.getQueryStrategy()),
|
||||||
IndexProtobufCodec.convertRankingParameterss(specs.getParameters())
|
specs.hasParameters() ? specs.getParameters() : null
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -307,7 +320,7 @@ public class QueryProtobufCodec {
|
|||||||
.addAllTacitExcludes(params.tacitExcludes())
|
.addAllTacitExcludes(params.tacitExcludes())
|
||||||
.addAllTacitPriority(params.tacitPriority())
|
.addAllTacitPriority(params.tacitPriority())
|
||||||
.setHumanQuery(params.humanQuery())
|
.setHumanQuery(params.humanQuery())
|
||||||
.setQueryLimits(IndexProtobufCodec.convertQueryLimits(params.limits()))
|
.setQueryLimits(params.limits())
|
||||||
.setQuality(IndexProtobufCodec.convertSpecLimit(params.quality()))
|
.setQuality(IndexProtobufCodec.convertSpecLimit(params.quality()))
|
||||||
.setYear(IndexProtobufCodec.convertSpecLimit(params.year()))
|
.setYear(IndexProtobufCodec.convertSpecLimit(params.year()))
|
||||||
.setSize(IndexProtobufCodec.convertSpecLimit(params.size()))
|
.setSize(IndexProtobufCodec.convertSpecLimit(params.size()))
|
||||||
@@ -319,7 +332,7 @@ public class QueryProtobufCodec {
|
|||||||
.build())
|
.build())
|
||||||
.setPagination(RpcQsQueryPagination.newBuilder()
|
.setPagination(RpcQsQueryPagination.newBuilder()
|
||||||
.setPage(params.page())
|
.setPage(params.page())
|
||||||
.setPageSize(Math.min(100, params.limits().resultsTotal()))
|
.setPageSize(Math.min(100, params.limits().getResultsTotal()))
|
||||||
.build());
|
.build());
|
||||||
|
|
||||||
if (params.nearDomain() != null)
|
if (params.nearDomain() != null)
|
||||||
|
@@ -1,7 +1,7 @@
|
|||||||
package nu.marginalia.api.searchquery.model.query;
|
package nu.marginalia.api.searchquery.model.query;
|
||||||
|
|
||||||
import nu.marginalia.api.searchquery.model.results.ResultRankingParameters;
|
import nu.marginalia.api.searchquery.RpcQueryLimits;
|
||||||
import nu.marginalia.index.query.limit.QueryLimits;
|
import nu.marginalia.api.searchquery.RpcTemporalBias;
|
||||||
import nu.marginalia.index.query.limit.QueryStrategy;
|
import nu.marginalia.index.query.limit.QueryStrategy;
|
||||||
import nu.marginalia.index.query.limit.SpecificationLimit;
|
import nu.marginalia.index.query.limit.SpecificationLimit;
|
||||||
|
|
||||||
@@ -21,14 +21,14 @@ public record QueryParams(
|
|||||||
SpecificationLimit size,
|
SpecificationLimit size,
|
||||||
SpecificationLimit rank,
|
SpecificationLimit rank,
|
||||||
List<Integer> domainIds,
|
List<Integer> domainIds,
|
||||||
QueryLimits limits,
|
RpcQueryLimits limits,
|
||||||
String identifier,
|
String identifier,
|
||||||
QueryStrategy queryStrategy,
|
QueryStrategy queryStrategy,
|
||||||
ResultRankingParameters.TemporalBias temporalBias,
|
RpcTemporalBias.Bias temporalBias,
|
||||||
int page
|
int page
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
public QueryParams(String query, QueryLimits limits, String identifier) {
|
public QueryParams(String query, RpcQueryLimits limits, String identifier) {
|
||||||
this(query, null,
|
this(query, null,
|
||||||
List.of(),
|
List.of(),
|
||||||
List.of(),
|
List.of(),
|
||||||
@@ -42,7 +42,7 @@ public record QueryParams(
|
|||||||
limits,
|
limits,
|
||||||
identifier,
|
identifier,
|
||||||
QueryStrategy.AUTO,
|
QueryStrategy.AUTO,
|
||||||
ResultRankingParameters.TemporalBias.NONE,
|
RpcTemporalBias.Bias.NONE,
|
||||||
1 // page
|
1 // page
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@@ -1,10 +1,11 @@
|
|||||||
package nu.marginalia.api.searchquery.model.query;
|
package nu.marginalia.api.searchquery.model.query;
|
||||||
|
|
||||||
import nu.marginalia.api.searchquery.model.results.ResultRankingParameters;
|
import nu.marginalia.api.searchquery.RpcQueryLimits;
|
||||||
import nu.marginalia.index.query.limit.QueryLimits;
|
import nu.marginalia.api.searchquery.RpcResultRankingParameters;
|
||||||
import nu.marginalia.index.query.limit.QueryStrategy;
|
import nu.marginalia.index.query.limit.QueryStrategy;
|
||||||
import nu.marginalia.index.query.limit.SpecificationLimit;
|
import nu.marginalia.index.query.limit.SpecificationLimit;
|
||||||
|
|
||||||
|
import javax.annotation.Nullable;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
public class SearchSpecification {
|
public class SearchSpecification {
|
||||||
@@ -24,11 +25,12 @@ public class SearchSpecification {
|
|||||||
public SpecificationLimit size;
|
public SpecificationLimit size;
|
||||||
public SpecificationLimit rank;
|
public SpecificationLimit rank;
|
||||||
|
|
||||||
public final QueryLimits queryLimits;
|
public final RpcQueryLimits queryLimits;
|
||||||
|
|
||||||
public final QueryStrategy queryStrategy;
|
public final QueryStrategy queryStrategy;
|
||||||
|
|
||||||
public final ResultRankingParameters rankingParams;
|
@Nullable
|
||||||
|
public final RpcResultRankingParameters rankingParams;
|
||||||
|
|
||||||
public SearchSpecification(SearchQuery query,
|
public SearchSpecification(SearchQuery query,
|
||||||
List<Integer> domains,
|
List<Integer> domains,
|
||||||
@@ -38,9 +40,9 @@ public class SearchSpecification {
|
|||||||
SpecificationLimit year,
|
SpecificationLimit year,
|
||||||
SpecificationLimit size,
|
SpecificationLimit size,
|
||||||
SpecificationLimit rank,
|
SpecificationLimit rank,
|
||||||
QueryLimits queryLimits,
|
RpcQueryLimits queryLimits,
|
||||||
QueryStrategy queryStrategy,
|
QueryStrategy queryStrategy,
|
||||||
ResultRankingParameters rankingParams)
|
@Nullable RpcResultRankingParameters rankingParams)
|
||||||
{
|
{
|
||||||
this.query = query;
|
this.query = query;
|
||||||
this.domains = domains;
|
this.domains = domains;
|
||||||
@@ -91,7 +93,7 @@ public class SearchSpecification {
|
|||||||
return this.rank;
|
return this.rank;
|
||||||
}
|
}
|
||||||
|
|
||||||
public QueryLimits getQueryLimits() {
|
public RpcQueryLimits getQueryLimits() {
|
||||||
return this.queryLimits;
|
return this.queryLimits;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -99,7 +101,7 @@ public class SearchSpecification {
|
|||||||
return this.queryStrategy;
|
return this.queryStrategy;
|
||||||
}
|
}
|
||||||
|
|
||||||
public ResultRankingParameters getRankingParams() {
|
public RpcResultRankingParameters getRankingParams() {
|
||||||
return this.rankingParams;
|
return this.rankingParams;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -120,9 +122,9 @@ public class SearchSpecification {
|
|||||||
private boolean size$set;
|
private boolean size$set;
|
||||||
private SpecificationLimit rank$value;
|
private SpecificationLimit rank$value;
|
||||||
private boolean rank$set;
|
private boolean rank$set;
|
||||||
private QueryLimits queryLimits;
|
private RpcQueryLimits queryLimits;
|
||||||
private QueryStrategy queryStrategy;
|
private QueryStrategy queryStrategy;
|
||||||
private ResultRankingParameters rankingParams;
|
private RpcResultRankingParameters rankingParams;
|
||||||
|
|
||||||
SearchSpecificationBuilder() {
|
SearchSpecificationBuilder() {
|
||||||
}
|
}
|
||||||
@@ -171,7 +173,7 @@ public class SearchSpecification {
|
|||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
public SearchSpecificationBuilder queryLimits(QueryLimits queryLimits) {
|
public SearchSpecificationBuilder queryLimits(RpcQueryLimits queryLimits) {
|
||||||
this.queryLimits = queryLimits;
|
this.queryLimits = queryLimits;
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
@@ -181,7 +183,7 @@ public class SearchSpecification {
|
|||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
public SearchSpecificationBuilder rankingParams(ResultRankingParameters rankingParams) {
|
public SearchSpecificationBuilder rankingParams(RpcResultRankingParameters rankingParams) {
|
||||||
this.rankingParams = rankingParams;
|
this.rankingParams = rankingParams;
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
@@ -1,6 +1,7 @@
|
|||||||
package nu.marginalia.api.searchquery.model.results;
|
package nu.marginalia.api.searchquery.model.results;
|
||||||
|
|
||||||
import nu.marginalia.api.searchquery.model.results.debug.ResultRankingDetails;
|
import nu.marginalia.api.searchquery.model.results.debug.ResultRankingDetails;
|
||||||
|
import nu.marginalia.model.DocumentFormat;
|
||||||
import nu.marginalia.model.EdgeUrl;
|
import nu.marginalia.model.EdgeUrl;
|
||||||
import org.jetbrains.annotations.NotNull;
|
import org.jetbrains.annotations.NotNull;
|
||||||
|
|
||||||
@@ -161,4 +162,14 @@ public class DecoratedSearchResultItem implements Comparable<DecoratedSearchResu
|
|||||||
public String toString() {
|
public String toString() {
|
||||||
return "DecoratedSearchResultItem(rawIndexResult=" + this.getRawIndexResult() + ", url=" + this.getUrl() + ", title=" + this.getTitle() + ", description=" + this.getDescription() + ", urlQuality=" + this.getUrlQuality() + ", format=" + this.getFormat() + ", features=" + this.getFeatures() + ", pubYear=" + this.getPubYear() + ", dataHash=" + this.getDataHash() + ", wordsTotal=" + this.getWordsTotal() + ", bestPositions=" + this.getBestPositions() + ", rankingScore=" + this.getRankingScore() + ", resultsFromDomain=" + this.getResultsFromDomain() + ", rankingDetails=" + this.getRankingDetails() + ")";
|
return "DecoratedSearchResultItem(rawIndexResult=" + this.getRawIndexResult() + ", url=" + this.getUrl() + ", title=" + this.getTitle() + ", description=" + this.getDescription() + ", urlQuality=" + this.getUrlQuality() + ", format=" + this.getFormat() + ", features=" + this.getFeatures() + ", pubYear=" + this.getPubYear() + ", dataHash=" + this.getDataHash() + ", wordsTotal=" + this.getWordsTotal() + ", bestPositions=" + this.getBestPositions() + ", rankingScore=" + this.getRankingScore() + ", resultsFromDomain=" + this.getResultsFromDomain() + ", rankingDetails=" + this.getRankingDetails() + ")";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public String getShortFormat() {
|
||||||
|
try {
|
||||||
|
var df = DocumentFormat.valueOf(format);
|
||||||
|
return df.shortFormat;
|
||||||
|
}
|
||||||
|
catch (IllegalArgumentException e) {
|
||||||
|
return DocumentFormat.UNKNOWN.shortFormat;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@@ -0,0 +1,33 @@
|
|||||||
|
package nu.marginalia.api.searchquery.model.results;
|
||||||
|
|
||||||
|
import nu.marginalia.api.searchquery.RpcResultRankingParameters;
|
||||||
|
import nu.marginalia.api.searchquery.RpcTemporalBias;
|
||||||
|
|
||||||
|
public class PrototypeRankingParameters {
|
||||||
|
|
||||||
|
/** These are the default ranking parameters that are used when no parameters are specified. */
|
||||||
|
|
||||||
|
private static final RpcResultRankingParameters _sensibleDefaults = RpcResultRankingParameters.newBuilder()
|
||||||
|
.setBm25B(0.5)
|
||||||
|
.setBm25K(1.2)
|
||||||
|
.setShortDocumentThreshold(2000)
|
||||||
|
.setShortDocumentPenalty(2.)
|
||||||
|
.setDomainRankBonus(1 / 100.)
|
||||||
|
.setQualityPenalty(1 / 15.)
|
||||||
|
.setShortSentenceThreshold(2)
|
||||||
|
.setShortSentencePenalty(5)
|
||||||
|
.setBm25Weight(1.)
|
||||||
|
.setTcfVerbatimWeight(1.)
|
||||||
|
.setTcfProximityWeight(1.)
|
||||||
|
.setTcfFirstPositionWeight(5)
|
||||||
|
.setTemporalBias(RpcTemporalBias.newBuilder().setBias(RpcTemporalBias.Bias.NONE))
|
||||||
|
.setTemporalBiasWeight(5.0)
|
||||||
|
.setExportDebugData(false)
|
||||||
|
.setDisablePenalties(false)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
public static RpcResultRankingParameters sensibleDefaults() {
|
||||||
|
return _sensibleDefaults;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@@ -1,12 +1,13 @@
|
|||||||
package nu.marginalia.api.searchquery.model.results;
|
package nu.marginalia.api.searchquery.model.results;
|
||||||
|
|
||||||
|
import nu.marginalia.api.searchquery.RpcResultRankingParameters;
|
||||||
import nu.marginalia.api.searchquery.model.compiled.CqDataInt;
|
import nu.marginalia.api.searchquery.model.compiled.CqDataInt;
|
||||||
|
|
||||||
import java.util.BitSet;
|
import java.util.BitSet;
|
||||||
|
|
||||||
public class ResultRankingContext {
|
public class ResultRankingContext {
|
||||||
private final int docCount;
|
private final int docCount;
|
||||||
public final ResultRankingParameters params;
|
public final RpcResultRankingParameters params;
|
||||||
|
|
||||||
|
|
||||||
public final BitSet regularMask;
|
public final BitSet regularMask;
|
||||||
@@ -21,7 +22,7 @@ public class ResultRankingContext {
|
|||||||
public final CqDataInt priorityCounts;
|
public final CqDataInt priorityCounts;
|
||||||
|
|
||||||
public ResultRankingContext(int docCount,
|
public ResultRankingContext(int docCount,
|
||||||
ResultRankingParameters params,
|
RpcResultRankingParameters params,
|
||||||
BitSet ngramsMask,
|
BitSet ngramsMask,
|
||||||
BitSet regularMask,
|
BitSet regularMask,
|
||||||
CqDataInt fullCounts,
|
CqDataInt fullCounts,
|
||||||
|
@@ -1,278 +0,0 @@
|
|||||||
package nu.marginalia.api.searchquery.model.results;
|
|
||||||
|
|
||||||
import java.util.Objects;
|
|
||||||
|
|
||||||
public class ResultRankingParameters {
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Tuning for BM25 when applied to full document matches
|
|
||||||
*/
|
|
||||||
public final Bm25Parameters bm25Params;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Documents below this length are penalized
|
|
||||||
*/
|
|
||||||
public int shortDocumentThreshold;
|
|
||||||
|
|
||||||
public double shortDocumentPenalty;
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Scaling factor associated with domain rank (unscaled rank value is 0-255; high is good)
|
|
||||||
*/
|
|
||||||
public double domainRankBonus;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Scaling factor associated with document quality (unscaled rank value is 0-15; high is bad)
|
|
||||||
*/
|
|
||||||
public double qualityPenalty;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Average sentence length values below this threshold are penalized, range [0-4), 2 or 3 is probably what you want
|
|
||||||
*/
|
|
||||||
public int shortSentenceThreshold;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Magnitude of penalty for documents with low average sentence length
|
|
||||||
*/
|
|
||||||
public double shortSentencePenalty;
|
|
||||||
|
|
||||||
public double bm25Weight;
|
|
||||||
public double tcfFirstPosition;
|
|
||||||
public double tcfVerbatim;
|
|
||||||
public double tcfProximity;
|
|
||||||
|
|
||||||
public TemporalBias temporalBias;
|
|
||||||
public double temporalBiasWeight;
|
|
||||||
|
|
||||||
public boolean exportDebugData;
|
|
||||||
|
|
||||||
public ResultRankingParameters(Bm25Parameters bm25Params, int shortDocumentThreshold, double shortDocumentPenalty, double domainRankBonus, double qualityPenalty, int shortSentenceThreshold, double shortSentencePenalty, double bm25Weight, double tcfFirstPosition, double tcfVerbatim, double tcfProximity, TemporalBias temporalBias, double temporalBiasWeight, boolean exportDebugData) {
|
|
||||||
this.bm25Params = bm25Params;
|
|
||||||
this.shortDocumentThreshold = shortDocumentThreshold;
|
|
||||||
this.shortDocumentPenalty = shortDocumentPenalty;
|
|
||||||
this.domainRankBonus = domainRankBonus;
|
|
||||||
this.qualityPenalty = qualityPenalty;
|
|
||||||
this.shortSentenceThreshold = shortSentenceThreshold;
|
|
||||||
this.shortSentencePenalty = shortSentencePenalty;
|
|
||||||
this.bm25Weight = bm25Weight;
|
|
||||||
this.tcfFirstPosition = tcfFirstPosition;
|
|
||||||
this.tcfVerbatim = tcfVerbatim;
|
|
||||||
this.tcfProximity = tcfProximity;
|
|
||||||
this.temporalBias = temporalBias;
|
|
||||||
this.temporalBiasWeight = temporalBiasWeight;
|
|
||||||
this.exportDebugData = exportDebugData;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static ResultRankingParameters sensibleDefaults() {
|
|
||||||
return builder()
|
|
||||||
.bm25Params(new Bm25Parameters(1.2, 0.5))
|
|
||||||
.shortDocumentThreshold(2000)
|
|
||||||
.shortDocumentPenalty(2.)
|
|
||||||
.domainRankBonus(1 / 100.)
|
|
||||||
.qualityPenalty(1 / 15.)
|
|
||||||
.shortSentenceThreshold(2)
|
|
||||||
.shortSentencePenalty(5)
|
|
||||||
.bm25Weight(1.)
|
|
||||||
.tcfVerbatim(1.)
|
|
||||||
.tcfProximity(1.)
|
|
||||||
.tcfFirstPosition(5)
|
|
||||||
.temporalBias(TemporalBias.NONE)
|
|
||||||
.temporalBiasWeight(5.0)
|
|
||||||
.exportDebugData(false)
|
|
||||||
.build();
|
|
||||||
}
|
|
||||||
|
|
||||||
public static ResultRankingParametersBuilder builder() {
|
|
||||||
return new ResultRankingParametersBuilder();
|
|
||||||
}
|
|
||||||
|
|
||||||
public Bm25Parameters getBm25Params() {
|
|
||||||
return this.bm25Params;
|
|
||||||
}
|
|
||||||
|
|
||||||
public int getShortDocumentThreshold() {
|
|
||||||
return this.shortDocumentThreshold;
|
|
||||||
}
|
|
||||||
|
|
||||||
public double getShortDocumentPenalty() {
|
|
||||||
return this.shortDocumentPenalty;
|
|
||||||
}
|
|
||||||
|
|
||||||
public double getDomainRankBonus() {
|
|
||||||
return this.domainRankBonus;
|
|
||||||
}
|
|
||||||
|
|
||||||
public double getQualityPenalty() {
|
|
||||||
return this.qualityPenalty;
|
|
||||||
}
|
|
||||||
|
|
||||||
public int getShortSentenceThreshold() {
|
|
||||||
return this.shortSentenceThreshold;
|
|
||||||
}
|
|
||||||
|
|
||||||
public double getShortSentencePenalty() {
|
|
||||||
return this.shortSentencePenalty;
|
|
||||||
}
|
|
||||||
|
|
||||||
public double getBm25Weight() {
|
|
||||||
return this.bm25Weight;
|
|
||||||
}
|
|
||||||
|
|
||||||
public double getTcfFirstPosition() {
|
|
||||||
return this.tcfFirstPosition;
|
|
||||||
}
|
|
||||||
|
|
||||||
public double getTcfVerbatim() {
|
|
||||||
return this.tcfVerbatim;
|
|
||||||
}
|
|
||||||
|
|
||||||
public double getTcfProximity() {
|
|
||||||
return this.tcfProximity;
|
|
||||||
}
|
|
||||||
|
|
||||||
public TemporalBias getTemporalBias() {
|
|
||||||
return this.temporalBias;
|
|
||||||
}
|
|
||||||
|
|
||||||
public double getTemporalBiasWeight() {
|
|
||||||
return this.temporalBiasWeight;
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean isExportDebugData() {
|
|
||||||
return this.exportDebugData;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public final boolean equals(Object o) {
|
|
||||||
if (this == o) return true;
|
|
||||||
if (!(o instanceof ResultRankingParameters that)) return false;
|
|
||||||
|
|
||||||
return shortDocumentThreshold == that.shortDocumentThreshold && Double.compare(shortDocumentPenalty, that.shortDocumentPenalty) == 0 && Double.compare(domainRankBonus, that.domainRankBonus) == 0 && Double.compare(qualityPenalty, that.qualityPenalty) == 0 && shortSentenceThreshold == that.shortSentenceThreshold && Double.compare(shortSentencePenalty, that.shortSentencePenalty) == 0 && Double.compare(bm25Weight, that.bm25Weight) == 0 && Double.compare(tcfFirstPosition, that.tcfFirstPosition) == 0 && Double.compare(tcfVerbatim, that.tcfVerbatim) == 0 && Double.compare(tcfProximity, that.tcfProximity) == 0 && Double.compare(temporalBiasWeight, that.temporalBiasWeight) == 0 && exportDebugData == that.exportDebugData && Objects.equals(bm25Params, that.bm25Params) && temporalBias == that.temporalBias;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int hashCode() {
|
|
||||||
int result = Objects.hashCode(bm25Params);
|
|
||||||
result = 31 * result + shortDocumentThreshold;
|
|
||||||
result = 31 * result + Double.hashCode(shortDocumentPenalty);
|
|
||||||
result = 31 * result + Double.hashCode(domainRankBonus);
|
|
||||||
result = 31 * result + Double.hashCode(qualityPenalty);
|
|
||||||
result = 31 * result + shortSentenceThreshold;
|
|
||||||
result = 31 * result + Double.hashCode(shortSentencePenalty);
|
|
||||||
result = 31 * result + Double.hashCode(bm25Weight);
|
|
||||||
result = 31 * result + Double.hashCode(tcfFirstPosition);
|
|
||||||
result = 31 * result + Double.hashCode(tcfVerbatim);
|
|
||||||
result = 31 * result + Double.hashCode(tcfProximity);
|
|
||||||
result = 31 * result + Objects.hashCode(temporalBias);
|
|
||||||
result = 31 * result + Double.hashCode(temporalBiasWeight);
|
|
||||||
result = 31 * result + Boolean.hashCode(exportDebugData);
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String toString() {
|
|
||||||
return "ResultRankingParameters(bm25Params=" + this.getBm25Params() + ", shortDocumentThreshold=" + this.getShortDocumentThreshold() + ", shortDocumentPenalty=" + this.getShortDocumentPenalty() + ", domainRankBonus=" + this.getDomainRankBonus() + ", qualityPenalty=" + this.getQualityPenalty() + ", shortSentenceThreshold=" + this.getShortSentenceThreshold() + ", shortSentencePenalty=" + this.getShortSentencePenalty() + ", bm25Weight=" + this.getBm25Weight() + ", tcfFirstPosition=" + this.getTcfFirstPosition() + ", tcfVerbatim=" + this.getTcfVerbatim() + ", tcfProximity=" + this.getTcfProximity() + ", temporalBias=" + this.getTemporalBias() + ", temporalBiasWeight=" + this.getTemporalBiasWeight() + ", exportDebugData=" + this.isExportDebugData() + ")";
|
|
||||||
}
|
|
||||||
|
|
||||||
public enum TemporalBias {
|
|
||||||
RECENT, OLD, NONE
|
|
||||||
}
|
|
||||||
|
|
||||||
public static class ResultRankingParametersBuilder {
|
|
||||||
private Bm25Parameters bm25Params;
|
|
||||||
private int shortDocumentThreshold;
|
|
||||||
private double shortDocumentPenalty;
|
|
||||||
private double domainRankBonus;
|
|
||||||
private double qualityPenalty;
|
|
||||||
private int shortSentenceThreshold;
|
|
||||||
private double shortSentencePenalty;
|
|
||||||
private double bm25Weight;
|
|
||||||
private double tcfFirstPosition;
|
|
||||||
private double tcfVerbatim;
|
|
||||||
private double tcfProximity;
|
|
||||||
private TemporalBias temporalBias;
|
|
||||||
private double temporalBiasWeight;
|
|
||||||
private boolean exportDebugData;
|
|
||||||
|
|
||||||
ResultRankingParametersBuilder() {
|
|
||||||
}
|
|
||||||
|
|
||||||
public ResultRankingParametersBuilder bm25Params(Bm25Parameters bm25Params) {
|
|
||||||
this.bm25Params = bm25Params;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public ResultRankingParametersBuilder shortDocumentThreshold(int shortDocumentThreshold) {
|
|
||||||
this.shortDocumentThreshold = shortDocumentThreshold;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public ResultRankingParametersBuilder shortDocumentPenalty(double shortDocumentPenalty) {
|
|
||||||
this.shortDocumentPenalty = shortDocumentPenalty;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public ResultRankingParametersBuilder domainRankBonus(double domainRankBonus) {
|
|
||||||
this.domainRankBonus = domainRankBonus;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public ResultRankingParametersBuilder qualityPenalty(double qualityPenalty) {
|
|
||||||
this.qualityPenalty = qualityPenalty;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public ResultRankingParametersBuilder shortSentenceThreshold(int shortSentenceThreshold) {
|
|
||||||
this.shortSentenceThreshold = shortSentenceThreshold;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public ResultRankingParametersBuilder shortSentencePenalty(double shortSentencePenalty) {
|
|
||||||
this.shortSentencePenalty = shortSentencePenalty;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public ResultRankingParametersBuilder bm25Weight(double bm25Weight) {
|
|
||||||
this.bm25Weight = bm25Weight;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public ResultRankingParametersBuilder tcfFirstPosition(double tcfFirstPosition) {
|
|
||||||
this.tcfFirstPosition = tcfFirstPosition;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public ResultRankingParametersBuilder tcfVerbatim(double tcfVerbatim) {
|
|
||||||
this.tcfVerbatim = tcfVerbatim;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public ResultRankingParametersBuilder tcfProximity(double tcfProximity) {
|
|
||||||
this.tcfProximity = tcfProximity;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public ResultRankingParametersBuilder temporalBias(TemporalBias temporalBias) {
|
|
||||||
this.temporalBias = temporalBias;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public ResultRankingParametersBuilder temporalBiasWeight(double temporalBiasWeight) {
|
|
||||||
this.temporalBiasWeight = temporalBiasWeight;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public ResultRankingParametersBuilder exportDebugData(boolean exportDebugData) {
|
|
||||||
this.exportDebugData = exportDebugData;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public ResultRankingParameters build() {
|
|
||||||
return new ResultRankingParameters(this.bm25Params, this.shortDocumentThreshold, this.shortDocumentPenalty, this.domainRankBonus, this.qualityPenalty, this.shortSentenceThreshold, this.shortSentencePenalty, this.bm25Weight, this.tcfFirstPosition, this.tcfVerbatim, this.tcfProximity, this.temporalBias, this.temporalBiasWeight, this.exportDebugData);
|
|
||||||
}
|
|
||||||
|
|
||||||
public String toString() {
|
|
||||||
return "ResultRankingParameters.ResultRankingParametersBuilder(bm25Params=" + this.bm25Params + ", shortDocumentThreshold=" + this.shortDocumentThreshold + ", shortDocumentPenalty=" + this.shortDocumentPenalty + ", domainRankBonus=" + this.domainRankBonus + ", qualityPenalty=" + this.qualityPenalty + ", shortSentenceThreshold=" + this.shortSentenceThreshold + ", shortSentencePenalty=" + this.shortSentencePenalty + ", bm25Weight=" + this.bm25Weight + ", tcfFirstPosition=" + this.tcfFirstPosition + ", tcfVerbatim=" + this.tcfVerbatim + ", tcfProximity=" + this.tcfProximity + ", temporalBias=" + this.temporalBias + ", temporalBiasWeight=" + this.temporalBiasWeight + ", exportDebugData=" + this.exportDebugData + ")";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@@ -162,6 +162,7 @@ message RpcResultRankingParameters {
|
|||||||
double temporalBiasWeight = 17;
|
double temporalBiasWeight = 17;
|
||||||
|
|
||||||
bool exportDebugData = 18;
|
bool exportDebugData = 18;
|
||||||
|
bool disablePenalties = 19;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -3,8 +3,6 @@ package nu.marginalia.index.client;
|
|||||||
import nu.marginalia.api.searchquery.IndexProtobufCodec;
|
import nu.marginalia.api.searchquery.IndexProtobufCodec;
|
||||||
import nu.marginalia.api.searchquery.model.query.SearchPhraseConstraint;
|
import nu.marginalia.api.searchquery.model.query.SearchPhraseConstraint;
|
||||||
import nu.marginalia.api.searchquery.model.query.SearchQuery;
|
import nu.marginalia.api.searchquery.model.query.SearchQuery;
|
||||||
import nu.marginalia.api.searchquery.model.results.ResultRankingParameters;
|
|
||||||
import nu.marginalia.index.query.limit.QueryLimits;
|
|
||||||
import nu.marginalia.index.query.limit.SpecificationLimit;
|
import nu.marginalia.index.query.limit.SpecificationLimit;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
@@ -22,18 +20,6 @@ class IndexProtobufCodecTest {
|
|||||||
verifyIsIdentityTransformation(SpecificationLimit.lessThan(1), l -> IndexProtobufCodec.convertSpecLimit(IndexProtobufCodec.convertSpecLimit(l)));
|
verifyIsIdentityTransformation(SpecificationLimit.lessThan(1), l -> IndexProtobufCodec.convertSpecLimit(IndexProtobufCodec.convertSpecLimit(l)));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testRankingParameters() {
|
|
||||||
verifyIsIdentityTransformation(ResultRankingParameters.sensibleDefaults(),
|
|
||||||
p -> IndexProtobufCodec.convertRankingParameterss(IndexProtobufCodec.convertRankingParameterss(p, null)));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testQueryLimits() {
|
|
||||||
verifyIsIdentityTransformation(new QueryLimits(1,2,3,4),
|
|
||||||
l -> IndexProtobufCodec.convertQueryLimits(IndexProtobufCodec.convertQueryLimits(l))
|
|
||||||
);
|
|
||||||
}
|
|
||||||
@Test
|
@Test
|
||||||
public void testSubqery() {
|
public void testSubqery() {
|
||||||
verifyIsIdentityTransformation(new SearchQuery(
|
verifyIsIdentityTransformation(new SearchQuery(
|
||||||
|
@@ -2,8 +2,9 @@ package nu.marginalia.functions.searchquery;
|
|||||||
|
|
||||||
import com.google.inject.Inject;
|
import com.google.inject.Inject;
|
||||||
import com.google.inject.Singleton;
|
import com.google.inject.Singleton;
|
||||||
|
import nu.marginalia.api.searchquery.RpcQueryLimits;
|
||||||
|
import nu.marginalia.api.searchquery.RpcResultRankingParameters;
|
||||||
import nu.marginalia.api.searchquery.model.query.*;
|
import nu.marginalia.api.searchquery.model.query.*;
|
||||||
import nu.marginalia.api.searchquery.model.results.ResultRankingParameters;
|
|
||||||
import nu.marginalia.functions.searchquery.query_parser.QueryExpansion;
|
import nu.marginalia.functions.searchquery.query_parser.QueryExpansion;
|
||||||
import nu.marginalia.functions.searchquery.query_parser.QueryParser;
|
import nu.marginalia.functions.searchquery.query_parser.QueryParser;
|
||||||
import nu.marginalia.functions.searchquery.query_parser.token.QueryToken;
|
import nu.marginalia.functions.searchquery.query_parser.token.QueryToken;
|
||||||
@@ -36,7 +37,7 @@ public class QueryFactory {
|
|||||||
|
|
||||||
|
|
||||||
public ProcessedQuery createQuery(QueryParams params,
|
public ProcessedQuery createQuery(QueryParams params,
|
||||||
@Nullable ResultRankingParameters rankingParams) {
|
@Nullable RpcResultRankingParameters rankingParams) {
|
||||||
final var query = params.humanQuery();
|
final var query = params.humanQuery();
|
||||||
|
|
||||||
if (query.length() > 1000) {
|
if (query.length() > 1000) {
|
||||||
@@ -71,6 +72,17 @@ public class QueryFactory {
|
|||||||
|
|
||||||
String[] parts = StringUtils.split(str, '_');
|
String[] parts = StringUtils.split(str, '_');
|
||||||
|
|
||||||
|
// Trim down tokens to match the behavior of the tokenizer used in indexing
|
||||||
|
for (int i = 0; i < parts.length; i++) {
|
||||||
|
String part = parts[i];
|
||||||
|
|
||||||
|
if (part.endsWith("'s") && part.length() > 2) {
|
||||||
|
part = part.substring(0, part.length()-2);
|
||||||
|
}
|
||||||
|
|
||||||
|
parts[i] = part;
|
||||||
|
}
|
||||||
|
|
||||||
if (parts.length > 1) {
|
if (parts.length > 1) {
|
||||||
// Require that the terms appear in sequence
|
// Require that the terms appear in sequence
|
||||||
queryBuilder.phraseConstraint(SearchPhraseConstraint.mandatory(parts));
|
queryBuilder.phraseConstraint(SearchPhraseConstraint.mandatory(parts));
|
||||||
@@ -121,7 +133,9 @@ public class QueryFactory {
|
|||||||
var limits = params.limits();
|
var limits = params.limits();
|
||||||
// Disable limits on number of results per domain if we're searching with a site:-type term
|
// Disable limits on number of results per domain if we're searching with a site:-type term
|
||||||
if (domain != null) {
|
if (domain != null) {
|
||||||
limits = limits.forSingleDomain();
|
limits = RpcQueryLimits.newBuilder(limits)
|
||||||
|
.setResultsByDomain(limits.getResultsTotal())
|
||||||
|
.build();
|
||||||
}
|
}
|
||||||
|
|
||||||
var expansion = queryExpansion.expandQuery(queryBuilder.searchTermsInclude);
|
var expansion = queryExpansion.expandQuery(queryBuilder.searchTermsInclude);
|
||||||
|
@@ -9,7 +9,7 @@ import nu.marginalia.api.searchquery.*;
|
|||||||
import nu.marginalia.api.searchquery.model.query.ProcessedQuery;
|
import nu.marginalia.api.searchquery.model.query.ProcessedQuery;
|
||||||
import nu.marginalia.api.searchquery.model.query.QueryParams;
|
import nu.marginalia.api.searchquery.model.query.QueryParams;
|
||||||
import nu.marginalia.api.searchquery.model.results.DecoratedSearchResultItem;
|
import nu.marginalia.api.searchquery.model.results.DecoratedSearchResultItem;
|
||||||
import nu.marginalia.api.searchquery.model.results.ResultRankingParameters;
|
import nu.marginalia.api.searchquery.model.results.PrototypeRankingParameters;
|
||||||
import nu.marginalia.index.api.IndexClient;
|
import nu.marginalia.index.api.IndexClient;
|
||||||
import nu.marginalia.service.server.DiscoverableService;
|
import nu.marginalia.service.server.DiscoverableService;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
@@ -55,7 +55,7 @@ public class QueryGRPCService
|
|||||||
.time(() -> {
|
.time(() -> {
|
||||||
|
|
||||||
var params = QueryProtobufCodec.convertRequest(request);
|
var params = QueryProtobufCodec.convertRequest(request);
|
||||||
var query = queryFactory.createQuery(params, ResultRankingParameters.sensibleDefaults());
|
var query = queryFactory.createQuery(params, PrototypeRankingParameters.sensibleDefaults());
|
||||||
|
|
||||||
var indexRequest = QueryProtobufCodec.convertQuery(request, query);
|
var indexRequest = QueryProtobufCodec.convertQuery(request, query);
|
||||||
|
|
||||||
@@ -102,7 +102,7 @@ public class QueryGRPCService
|
|||||||
String originalQuery,
|
String originalQuery,
|
||||||
QueryParams params,
|
QueryParams params,
|
||||||
IndexClient.Pagination pagination,
|
IndexClient.Pagination pagination,
|
||||||
ResultRankingParameters rankingParameters) {
|
RpcResultRankingParameters rankingParameters) {
|
||||||
|
|
||||||
var query = queryFactory.createQuery(params, rankingParameters);
|
var query = queryFactory.createQuery(params, rankingParameters);
|
||||||
IndexClient.AggregateQueryResponse response = indexClient.executeQueries(QueryProtobufCodec.convertQuery(originalQuery, query), pagination);
|
IndexClient.AggregateQueryResponse response = indexClient.executeQueries(QueryProtobufCodec.convertQuery(originalQuery, query), pagination);
|
||||||
|
@@ -25,6 +25,7 @@ public class QueryExpansion {
|
|||||||
this::joinDashes,
|
this::joinDashes,
|
||||||
this::splitWordNum,
|
this::splitWordNum,
|
||||||
this::joinTerms,
|
this::joinTerms,
|
||||||
|
this::categoryKeywords,
|
||||||
this::ngramAll
|
this::ngramAll
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -98,6 +99,24 @@ public class QueryExpansion {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Category keyword substitution, e.g. guitar wiki -> guitar generator:wiki
|
||||||
|
public void categoryKeywords(QWordGraph graph) {
|
||||||
|
|
||||||
|
for (var qw : graph) {
|
||||||
|
|
||||||
|
// Ensure we only perform the substitution on the last word in the query
|
||||||
|
if (!graph.getNextOriginal(qw).getFirst().isEnd()) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
switch (qw.word()) {
|
||||||
|
case "recipe", "recipes" -> graph.addVariant(qw, "category:food");
|
||||||
|
case "forum" -> graph.addVariant(qw, "generator:forum");
|
||||||
|
case "wiki" -> graph.addVariant(qw, "generator:wiki");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Turn 'lawn chair' into 'lawnchair'
|
// Turn 'lawn chair' into 'lawnchair'
|
||||||
public void joinTerms(QWordGraph graph) {
|
public void joinTerms(QWordGraph graph) {
|
||||||
QWord prev = null;
|
QWord prev = null;
|
||||||
@@ -115,6 +134,10 @@ public class QueryExpansion {
|
|||||||
if (scoreCombo > scoreA + scoreB || scoreCombo > 1000) {
|
if (scoreCombo > scoreA + scoreB || scoreCombo > 1000) {
|
||||||
graph.addVariantForSpan(prev, qw, joinedWord);
|
graph.addVariantForSpan(prev, qw, joinedWord);
|
||||||
}
|
}
|
||||||
|
else if (StringUtils.isAlpha(prev.word()) && StringUtils.isNumeric(qw.word())) { // join e.g. trs 80 to trs80 and trs-80
|
||||||
|
graph.addVariantForSpan(prev, qw, prev.word() + qw.word());
|
||||||
|
graph.addVariantForSpan(prev, qw, prev.word() + "-" + qw.word());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
prev = qw;
|
prev = qw;
|
||||||
|
@@ -155,16 +155,25 @@ public class QueryParser {
|
|||||||
|
|
||||||
// Remove trailing punctuation
|
// Remove trailing punctuation
|
||||||
int lastChar = str.charAt(str.length() - 1);
|
int lastChar = str.charAt(str.length() - 1);
|
||||||
if (":.,!?$'".indexOf(lastChar) >= 0)
|
if (":.,!?$'".indexOf(lastChar) >= 0) {
|
||||||
entity.replace(new QueryToken.LiteralTerm(str.substring(0, str.length() - 1), lt.displayStr()));
|
str = str.substring(0, str.length() - 1);
|
||||||
|
entity.replace(new QueryToken.LiteralTerm(str, lt.displayStr()));
|
||||||
|
}
|
||||||
|
|
||||||
// Remove term elements that aren't indexed by the search engine
|
// Remove term elements that aren't indexed by the search engine
|
||||||
if (str.endsWith("'s"))
|
if (str.endsWith("'s")) {
|
||||||
entity.replace(new QueryToken.LiteralTerm(str.substring(0, str.length() - 2), lt.displayStr()));
|
str = str.substring(0, str.length() - 2);
|
||||||
if (str.endsWith("()"))
|
entity.replace(new QueryToken.LiteralTerm(str, lt.displayStr()));
|
||||||
entity.replace(new QueryToken.LiteralTerm(str.substring(0, str.length() - 2), lt.displayStr()));
|
}
|
||||||
if (str.startsWith("$"))
|
if (str.endsWith("()")) {
|
||||||
entity.replace(new QueryToken.LiteralTerm(str.substring(1), lt.displayStr()));
|
str = str.substring(0, str.length() - 2);
|
||||||
|
entity.replace(new QueryToken.LiteralTerm(str, lt.displayStr()));
|
||||||
|
}
|
||||||
|
|
||||||
|
while (str.startsWith("$") || str.startsWith("_")) {
|
||||||
|
str = str.substring(1);
|
||||||
|
entity.replace(new QueryToken.LiteralTerm(str, lt.displayStr()));
|
||||||
|
}
|
||||||
|
|
||||||
if (entity.isBlank()) {
|
if (entity.isBlank()) {
|
||||||
entity.remove();
|
entity.remove();
|
||||||
@@ -224,9 +233,19 @@ public class QueryParser {
|
|||||||
entity.replace(new QueryToken.RankTerm(limit, str));
|
entity.replace(new QueryToken.RankTerm(limit, str));
|
||||||
} else if (str.startsWith("qs=")) {
|
} else if (str.startsWith("qs=")) {
|
||||||
entity.replace(new QueryToken.QsTerm(str.substring(3)));
|
entity.replace(new QueryToken.QsTerm(str.substring(3)));
|
||||||
} else if (str.contains(":")) {
|
} else if (str.startsWith("site:")
|
||||||
|
|| str.startsWith("format:")
|
||||||
|
|| str.startsWith("file:")
|
||||||
|
|| str.startsWith("tld:")
|
||||||
|
|| str.startsWith("ip:")
|
||||||
|
|| str.startsWith("as:")
|
||||||
|
|| str.startsWith("asn:")
|
||||||
|
|| str.startsWith("generator:")
|
||||||
|
)
|
||||||
|
{
|
||||||
entity.replace(new QueryToken.AdviceTerm(str, t.displayStr()));
|
entity.replace(new QueryToken.AdviceTerm(str, t.displayStr()));
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static SpecificationLimit parseSpecificationLimit(String str) {
|
private static SpecificationLimit parseSpecificationLimit(String str) {
|
||||||
|
@@ -1,165 +0,0 @@
|
|||||||
package nu.marginalia.util.language;
|
|
||||||
|
|
||||||
import com.google.inject.Inject;
|
|
||||||
import nu.marginalia.term_frequency_dict.TermFrequencyDict;
|
|
||||||
import org.slf4j.Logger;
|
|
||||||
import org.slf4j.LoggerFactory;
|
|
||||||
|
|
||||||
import java.io.BufferedReader;
|
|
||||||
import java.io.InputStreamReader;
|
|
||||||
import java.util.*;
|
|
||||||
import java.util.regex.Pattern;
|
|
||||||
import java.util.stream.Collectors;
|
|
||||||
|
|
||||||
public class EnglishDictionary {
|
|
||||||
private final Set<String> englishWords = new HashSet<>();
|
|
||||||
private final TermFrequencyDict tfDict;
|
|
||||||
private final Logger logger = LoggerFactory.getLogger(getClass());
|
|
||||||
|
|
||||||
@Inject
|
|
||||||
public EnglishDictionary(TermFrequencyDict tfDict) {
|
|
||||||
this.tfDict = tfDict;
|
|
||||||
try (var resource = Objects.requireNonNull(ClassLoader.getSystemResourceAsStream("dictionary/en-words"),
|
|
||||||
"Could not load word frequency table");
|
|
||||||
var br = new BufferedReader(new InputStreamReader(resource))
|
|
||||||
) {
|
|
||||||
for (;;) {
|
|
||||||
String s = br.readLine();
|
|
||||||
if (s == null) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
englishWords.add(s.toLowerCase());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch (Exception ex) {
|
|
||||||
throw new RuntimeException(ex);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean isWord(String word) {
|
|
||||||
return englishWords.contains(word);
|
|
||||||
}
|
|
||||||
|
|
||||||
private static final Pattern ingPattern = Pattern.compile(".*(\\w)\\1ing$");
|
|
||||||
|
|
||||||
public Collection<String> getWordVariants(String s) {
|
|
||||||
var variants = findWordVariants(s);
|
|
||||||
|
|
||||||
var ret = variants.stream()
|
|
||||||
.filter(var -> tfDict.getTermFreq(var) > 100)
|
|
||||||
.collect(Collectors.toList());
|
|
||||||
|
|
||||||
if (s.equals("recipe") || s.equals("recipes")) {
|
|
||||||
ret.add("category:food");
|
|
||||||
}
|
|
||||||
|
|
||||||
return ret;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
public Collection<String> findWordVariants(String s) {
|
|
||||||
int sl = s.length();
|
|
||||||
|
|
||||||
if (sl < 2) {
|
|
||||||
return Collections.emptyList();
|
|
||||||
}
|
|
||||||
if (s.endsWith("s")) {
|
|
||||||
String a = s.substring(0, sl-1);
|
|
||||||
String b = s + "es";
|
|
||||||
if (isWord(a) && isWord(b)) {
|
|
||||||
return List.of(a, b);
|
|
||||||
}
|
|
||||||
else if (isWord(a)) {
|
|
||||||
return List.of(a);
|
|
||||||
}
|
|
||||||
else if (isWord(b)) {
|
|
||||||
return List.of(b);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (s.endsWith("sm")) {
|
|
||||||
String a = s.substring(0, sl-1)+"t";
|
|
||||||
String b = s.substring(0, sl-1)+"ts";
|
|
||||||
if (isWord(a) && isWord(b)) {
|
|
||||||
return List.of(a, b);
|
|
||||||
}
|
|
||||||
else if (isWord(a)) {
|
|
||||||
return List.of(a);
|
|
||||||
}
|
|
||||||
else if (isWord(b)) {
|
|
||||||
return List.of(b);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (s.endsWith("st")) {
|
|
||||||
String a = s.substring(0, sl-1)+"m";
|
|
||||||
String b = s + "s";
|
|
||||||
if (isWord(a) && isWord(b)) {
|
|
||||||
return List.of(a, b);
|
|
||||||
}
|
|
||||||
else if (isWord(a)) {
|
|
||||||
return List.of(a);
|
|
||||||
}
|
|
||||||
else if (isWord(b)) {
|
|
||||||
return List.of(b);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else if (ingPattern.matcher(s).matches() && sl > 4) { // humming, clapping
|
|
||||||
var a = s.substring(0, sl-4);
|
|
||||||
var b = s.substring(0, sl-3) + "ed";
|
|
||||||
|
|
||||||
if (isWord(a) && isWord(b)) {
|
|
||||||
return List.of(a, b);
|
|
||||||
}
|
|
||||||
else if (isWord(a)) {
|
|
||||||
return List.of(a);
|
|
||||||
}
|
|
||||||
else if (isWord(b)) {
|
|
||||||
return List.of(b);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
String a = s + "s";
|
|
||||||
String b = ingForm(s);
|
|
||||||
String c = s + "ed";
|
|
||||||
|
|
||||||
if (isWord(a) && isWord(b) && isWord(c)) {
|
|
||||||
return List.of(a, b, c);
|
|
||||||
}
|
|
||||||
else if (isWord(a) && isWord(b)) {
|
|
||||||
return List.of(a, b);
|
|
||||||
}
|
|
||||||
else if (isWord(b) && isWord(c)) {
|
|
||||||
return List.of(b, c);
|
|
||||||
}
|
|
||||||
else if (isWord(a) && isWord(c)) {
|
|
||||||
return List.of(a, c);
|
|
||||||
}
|
|
||||||
else if (isWord(a)) {
|
|
||||||
return List.of(a);
|
|
||||||
}
|
|
||||||
else if (isWord(b)) {
|
|
||||||
return List.of(b);
|
|
||||||
}
|
|
||||||
else if (isWord(c)) {
|
|
||||||
return List.of(c);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return Collections.emptyList();
|
|
||||||
}
|
|
||||||
|
|
||||||
public String ingForm(String s) {
|
|
||||||
if (s.endsWith("t") && !s.endsWith("tt")) {
|
|
||||||
return s + "ting";
|
|
||||||
}
|
|
||||||
if (s.endsWith("n") && !s.endsWith("nn")) {
|
|
||||||
return s + "ning";
|
|
||||||
}
|
|
||||||
if (s.endsWith("m") && !s.endsWith("mm")) {
|
|
||||||
return s + "ming";
|
|
||||||
}
|
|
||||||
if (s.endsWith("r") && !s.endsWith("rr")) {
|
|
||||||
return s + "ring";
|
|
||||||
}
|
|
||||||
return s + "ing";
|
|
||||||
}
|
|
||||||
}
|
|
@@ -0,0 +1,32 @@
|
|||||||
|
package nu.marginalia.functions.searchquery.query_parser;
|
||||||
|
|
||||||
|
import nu.marginalia.functions.searchquery.query_parser.token.QueryToken;
|
||||||
|
import org.junit.jupiter.api.Assertions;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
class QueryParserTest {
|
||||||
|
|
||||||
|
@Test
|
||||||
|
// https://github.com/MarginaliaSearch/MarginaliaSearch/issues/140
|
||||||
|
void parse__builtin_ffs() {
|
||||||
|
QueryParser parser = new QueryParser();
|
||||||
|
var tokens = parser.parse("__builtin_ffs");
|
||||||
|
Assertions.assertEquals(List.of(new QueryToken.LiteralTerm("builtin_ffs", "__builtin_ffs")), tokens);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void trailingParens() {
|
||||||
|
QueryParser parser = new QueryParser();
|
||||||
|
var tokens = parser.parse("strcpy()");
|
||||||
|
Assertions.assertEquals(List.of(new QueryToken.LiteralTerm("strcpy", "strcpy()")), tokens);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void trailingQuote() {
|
||||||
|
QueryParser parser = new QueryParser();
|
||||||
|
var tokens = parser.parse("bob's");
|
||||||
|
Assertions.assertEquals(List.of(new QueryToken.LiteralTerm("bob", "bob's")), tokens);
|
||||||
|
}
|
||||||
|
}
|
@@ -1,17 +1,18 @@
|
|||||||
package nu.marginalia.query.svc;
|
package nu.marginalia.query.svc;
|
||||||
|
|
||||||
import nu.marginalia.WmsaHome;
|
import nu.marginalia.WmsaHome;
|
||||||
|
import nu.marginalia.api.searchquery.RpcQueryLimits;
|
||||||
|
import nu.marginalia.api.searchquery.RpcTemporalBias;
|
||||||
import nu.marginalia.api.searchquery.model.query.QueryParams;
|
import nu.marginalia.api.searchquery.model.query.QueryParams;
|
||||||
import nu.marginalia.api.searchquery.model.query.SearchSpecification;
|
import nu.marginalia.api.searchquery.model.query.SearchSpecification;
|
||||||
import nu.marginalia.api.searchquery.model.results.ResultRankingParameters;
|
|
||||||
import nu.marginalia.functions.searchquery.QueryFactory;
|
import nu.marginalia.functions.searchquery.QueryFactory;
|
||||||
import nu.marginalia.functions.searchquery.query_parser.QueryExpansion;
|
import nu.marginalia.functions.searchquery.query_parser.QueryExpansion;
|
||||||
import nu.marginalia.index.query.limit.QueryLimits;
|
|
||||||
import nu.marginalia.index.query.limit.QueryStrategy;
|
import nu.marginalia.index.query.limit.QueryStrategy;
|
||||||
import nu.marginalia.index.query.limit.SpecificationLimit;
|
import nu.marginalia.index.query.limit.SpecificationLimit;
|
||||||
import nu.marginalia.index.query.limit.SpecificationLimitType;
|
import nu.marginalia.index.query.limit.SpecificationLimitType;
|
||||||
import nu.marginalia.segmentation.NgramLexicon;
|
import nu.marginalia.segmentation.NgramLexicon;
|
||||||
import nu.marginalia.term_frequency_dict.TermFrequencyDict;
|
import nu.marginalia.term_frequency_dict.TermFrequencyDict;
|
||||||
|
import org.junit.jupiter.api.Assertions;
|
||||||
import org.junit.jupiter.api.BeforeAll;
|
import org.junit.jupiter.api.BeforeAll;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
@@ -48,10 +49,15 @@ public class QueryFactoryTest {
|
|||||||
SpecificationLimit.none(),
|
SpecificationLimit.none(),
|
||||||
SpecificationLimit.none(),
|
SpecificationLimit.none(),
|
||||||
null,
|
null,
|
||||||
new QueryLimits(100, 100, 100, 100),
|
RpcQueryLimits.newBuilder()
|
||||||
|
.setResultsTotal(100)
|
||||||
|
.setResultsByDomain(100)
|
||||||
|
.setTimeoutMs(100)
|
||||||
|
.setFetchSize(100)
|
||||||
|
.build(),
|
||||||
"NONE",
|
"NONE",
|
||||||
QueryStrategy.AUTO,
|
QueryStrategy.AUTO,
|
||||||
ResultRankingParameters.TemporalBias.NONE,
|
RpcTemporalBias.Bias.NONE,
|
||||||
0), null).specs;
|
0), null).specs;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -207,6 +213,46 @@ public class QueryFactoryTest {
|
|||||||
System.out.println(subquery);
|
System.out.println(subquery);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testContractionWordNum() {
|
||||||
|
var subquery = parseAndGetSpecs("glove 80");
|
||||||
|
|
||||||
|
Assertions.assertTrue(subquery.query.compiledQuery.contains(" glove "));
|
||||||
|
Assertions.assertTrue(subquery.query.compiledQuery.contains(" 80 "));
|
||||||
|
Assertions.assertTrue(subquery.query.compiledQuery.contains(" glove-80 "));
|
||||||
|
Assertions.assertTrue(subquery.query.compiledQuery.contains(" glove80 "));
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testCplusPlus() {
|
||||||
|
var subquery = parseAndGetSpecs("std::vector::push_back vector");
|
||||||
|
System.out.println(subquery);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testQuotedApostrophe() {
|
||||||
|
var subquery = parseAndGetSpecs("\"bob's cars\"");
|
||||||
|
|
||||||
|
System.out.println(subquery);
|
||||||
|
|
||||||
|
Assertions.assertTrue(subquery.query.compiledQuery.contains(" bob "));
|
||||||
|
Assertions.assertFalse(subquery.query.compiledQuery.contains(" bob's "));
|
||||||
|
Assertions.assertEquals("\"bob's cars\"", subquery.humanQuery);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testExpansion9() {
|
||||||
|
var subquery = parseAndGetSpecs("pie recipe");
|
||||||
|
|
||||||
|
Assertions.assertTrue(subquery.query.compiledQuery.contains(" category:food "));
|
||||||
|
|
||||||
|
subquery = parseAndGetSpecs("recipe pie");
|
||||||
|
|
||||||
|
Assertions.assertFalse(subquery.query.compiledQuery.contains(" category:food "));
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testParsing() {
|
public void testParsing() {
|
||||||
var subquery = parseAndGetSpecs("strlen()");
|
var subquery = parseAndGetSpecs("strlen()");
|
||||||
|
@@ -16,20 +16,19 @@ import org.slf4j.LoggerFactory;
|
|||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Comparator;
|
import java.util.Comparator;
|
||||||
import java.util.Iterator;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.concurrent.CompletableFuture;
|
import java.util.concurrent.CompletableFuture;
|
||||||
import java.util.concurrent.ExecutorService;
|
import java.util.concurrent.ExecutorService;
|
||||||
import java.util.concurrent.Executors;
|
import java.util.concurrent.Executors;
|
||||||
|
import java.util.concurrent.atomic.AtomicInteger;
|
||||||
import static java.lang.Math.clamp;
|
import java.util.function.Consumer;
|
||||||
|
|
||||||
@Singleton
|
@Singleton
|
||||||
public class IndexClient {
|
public class IndexClient {
|
||||||
private static final Logger logger = LoggerFactory.getLogger(IndexClient.class);
|
private static final Logger logger = LoggerFactory.getLogger(IndexClient.class);
|
||||||
private final GrpcMultiNodeChannelPool<IndexApiGrpc.IndexApiBlockingStub> channelPool;
|
private final GrpcMultiNodeChannelPool<IndexApiGrpc.IndexApiBlockingStub> channelPool;
|
||||||
private final DomainBlacklistImpl blacklist;
|
private final DomainBlacklistImpl blacklist;
|
||||||
private static final ExecutorService executor = Executors.newVirtualThreadPerTaskExecutor();
|
private static final ExecutorService executor = Executors.newCachedThreadPool();
|
||||||
|
|
||||||
@Inject
|
@Inject
|
||||||
public IndexClient(GrpcChannelPoolFactory channelPoolFactory, DomainBlacklistImpl blacklist) {
|
public IndexClient(GrpcChannelPoolFactory channelPoolFactory, DomainBlacklistImpl blacklist) {
|
||||||
@@ -51,40 +50,37 @@ public class IndexClient {
|
|||||||
|
|
||||||
/** Execute a query on the index partitions and return the combined results. */
|
/** Execute a query on the index partitions and return the combined results. */
|
||||||
public AggregateQueryResponse executeQueries(RpcIndexQuery indexRequest, Pagination pagination) {
|
public AggregateQueryResponse executeQueries(RpcIndexQuery indexRequest, Pagination pagination) {
|
||||||
List<CompletableFuture<Iterator<RpcDecoratedResultItem>>> futures =
|
|
||||||
channelPool.call(IndexApiGrpc.IndexApiBlockingStub::query)
|
|
||||||
.async(executor)
|
|
||||||
.runEach(indexRequest);
|
|
||||||
|
|
||||||
final int requestedMaxResults = indexRequest.getQueryLimits().getResultsTotal();
|
final int requestedMaxResults = indexRequest.getQueryLimits().getResultsTotal();
|
||||||
final int resultsUpperBound = requestedMaxResults * channelPool.getNumNodes();
|
|
||||||
|
|
||||||
List<RpcDecoratedResultItem> results = new ArrayList<>(resultsUpperBound);
|
AtomicInteger totalNumResults = new AtomicInteger(0);
|
||||||
|
|
||||||
for (var future : futures) {
|
List<RpcDecoratedResultItem> results =
|
||||||
try {
|
channelPool.call(IndexApiGrpc.IndexApiBlockingStub::query)
|
||||||
future.get().forEachRemaining(results::add);
|
.async(executor)
|
||||||
}
|
.runEach(indexRequest)
|
||||||
catch (Exception e) {
|
.stream()
|
||||||
logger.error("Downstream exception", e);
|
.map(future -> future.thenApply(iterator -> {
|
||||||
}
|
List<RpcDecoratedResultItem> ret = new ArrayList<>(requestedMaxResults);
|
||||||
}
|
iterator.forEachRemaining(ret::add);
|
||||||
|
totalNumResults.addAndGet(ret.size());
|
||||||
|
return ret;
|
||||||
|
}))
|
||||||
|
.mapMulti((CompletableFuture<List<RpcDecoratedResultItem>> fut, Consumer<List<RpcDecoratedResultItem>> c) ->{
|
||||||
|
try {
|
||||||
|
c.accept(fut.join());
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.error("Error while fetching results", e);
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.flatMap(List::stream)
|
||||||
|
.filter(item -> !isBlacklisted(item))
|
||||||
|
.sorted(comparator)
|
||||||
|
.skip(Math.max(0, (pagination.page - 1) * pagination.pageSize))
|
||||||
|
.limit(pagination.pageSize)
|
||||||
|
.toList();
|
||||||
|
|
||||||
// Sort the results by ranking score and remove blacklisted domains
|
return new AggregateQueryResponse(results, pagination.page(), totalNumResults.get());
|
||||||
results.sort(comparator);
|
|
||||||
results.removeIf(this::isBlacklisted);
|
|
||||||
|
|
||||||
int numReceivedResults = results.size();
|
|
||||||
|
|
||||||
// pagination is typically 1-indexed, so we need to adjust the start and end indices
|
|
||||||
int indexStart = (pagination.page - 1) * pagination.pageSize;
|
|
||||||
int indexEnd = (pagination.page) * pagination.pageSize;
|
|
||||||
|
|
||||||
results = results.subList(
|
|
||||||
clamp(indexStart, 0, Math.max(0, results.size() - 1)), // from is inclusive, so subtract 1 from size()
|
|
||||||
clamp(indexEnd, 0, results.size()));
|
|
||||||
|
|
||||||
return new AggregateQueryResponse(results, pagination.page(), numReceivedResults);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean isBlacklisted(RpcDecoratedResultItem item) {
|
private boolean isBlacklisted(RpcDecoratedResultItem item) {
|
||||||
|
@@ -10,12 +10,12 @@ import it.unimi.dsi.fastutil.longs.LongArrayList;
|
|||||||
import nu.marginalia.api.searchquery.IndexApiGrpc;
|
import nu.marginalia.api.searchquery.IndexApiGrpc;
|
||||||
import nu.marginalia.api.searchquery.RpcDecoratedResultItem;
|
import nu.marginalia.api.searchquery.RpcDecoratedResultItem;
|
||||||
import nu.marginalia.api.searchquery.RpcIndexQuery;
|
import nu.marginalia.api.searchquery.RpcIndexQuery;
|
||||||
|
import nu.marginalia.api.searchquery.RpcResultRankingParameters;
|
||||||
import nu.marginalia.api.searchquery.model.compiled.CompiledQuery;
|
import nu.marginalia.api.searchquery.model.compiled.CompiledQuery;
|
||||||
import nu.marginalia.api.searchquery.model.compiled.CompiledQueryLong;
|
import nu.marginalia.api.searchquery.model.compiled.CompiledQueryLong;
|
||||||
import nu.marginalia.api.searchquery.model.compiled.CqDataInt;
|
import nu.marginalia.api.searchquery.model.compiled.CqDataInt;
|
||||||
import nu.marginalia.api.searchquery.model.query.SearchSpecification;
|
import nu.marginalia.api.searchquery.model.query.SearchSpecification;
|
||||||
import nu.marginalia.api.searchquery.model.results.ResultRankingContext;
|
import nu.marginalia.api.searchquery.model.results.ResultRankingContext;
|
||||||
import nu.marginalia.api.searchquery.model.results.ResultRankingParameters;
|
|
||||||
import nu.marginalia.array.page.LongQueryBuffer;
|
import nu.marginalia.array.page.LongQueryBuffer;
|
||||||
import nu.marginalia.index.index.StatefulIndex;
|
import nu.marginalia.index.index.StatefulIndex;
|
||||||
import nu.marginalia.index.model.SearchParameters;
|
import nu.marginalia.index.model.SearchParameters;
|
||||||
@@ -211,7 +211,7 @@ public class IndexGrpcService
|
|||||||
/** This class is responsible for ranking the results and adding the best results to the
|
/** This class is responsible for ranking the results and adding the best results to the
|
||||||
* resultHeap, which depending on the state of the indexLookup threads may or may not block
|
* resultHeap, which depending on the state of the indexLookup threads may or may not block
|
||||||
*/
|
*/
|
||||||
private ResultRankingContext createRankingContext(ResultRankingParameters rankingParams,
|
private ResultRankingContext createRankingContext(RpcResultRankingParameters rankingParams,
|
||||||
CompiledQuery<String> compiledQuery,
|
CompiledQuery<String> compiledQuery,
|
||||||
CompiledQueryLong compiledQueryIds)
|
CompiledQueryLong compiledQueryIds)
|
||||||
{
|
{
|
||||||
|
@@ -2,12 +2,13 @@ package nu.marginalia.index.model;
|
|||||||
|
|
||||||
import nu.marginalia.api.searchquery.IndexProtobufCodec;
|
import nu.marginalia.api.searchquery.IndexProtobufCodec;
|
||||||
import nu.marginalia.api.searchquery.RpcIndexQuery;
|
import nu.marginalia.api.searchquery.RpcIndexQuery;
|
||||||
|
import nu.marginalia.api.searchquery.RpcResultRankingParameters;
|
||||||
import nu.marginalia.api.searchquery.model.compiled.CompiledQuery;
|
import nu.marginalia.api.searchquery.model.compiled.CompiledQuery;
|
||||||
import nu.marginalia.api.searchquery.model.compiled.CompiledQueryLong;
|
import nu.marginalia.api.searchquery.model.compiled.CompiledQueryLong;
|
||||||
import nu.marginalia.api.searchquery.model.compiled.CompiledQueryParser;
|
import nu.marginalia.api.searchquery.model.compiled.CompiledQueryParser;
|
||||||
import nu.marginalia.api.searchquery.model.query.SearchSpecification;
|
|
||||||
import nu.marginalia.api.searchquery.model.query.SearchQuery;
|
import nu.marginalia.api.searchquery.model.query.SearchQuery;
|
||||||
import nu.marginalia.api.searchquery.model.results.ResultRankingParameters;
|
import nu.marginalia.api.searchquery.model.query.SearchSpecification;
|
||||||
|
import nu.marginalia.api.searchquery.model.results.PrototypeRankingParameters;
|
||||||
import nu.marginalia.index.query.IndexSearchBudget;
|
import nu.marginalia.index.query.IndexSearchBudget;
|
||||||
import nu.marginalia.index.query.limit.QueryStrategy;
|
import nu.marginalia.index.query.limit.QueryStrategy;
|
||||||
import nu.marginalia.index.searchset.SearchSet;
|
import nu.marginalia.index.searchset.SearchSet;
|
||||||
@@ -23,7 +24,7 @@ public class SearchParameters {
|
|||||||
public final IndexSearchBudget budget;
|
public final IndexSearchBudget budget;
|
||||||
public final SearchQuery query;
|
public final SearchQuery query;
|
||||||
public final QueryParams queryParams;
|
public final QueryParams queryParams;
|
||||||
public final ResultRankingParameters rankingParams;
|
public final RpcResultRankingParameters rankingParams;
|
||||||
|
|
||||||
public final int limitByDomain;
|
public final int limitByDomain;
|
||||||
public final int limitTotal;
|
public final int limitTotal;
|
||||||
@@ -41,11 +42,11 @@ public class SearchParameters {
|
|||||||
public SearchParameters(SearchSpecification specsSet, SearchSet searchSet) {
|
public SearchParameters(SearchSpecification specsSet, SearchSet searchSet) {
|
||||||
var limits = specsSet.queryLimits;
|
var limits = specsSet.queryLimits;
|
||||||
|
|
||||||
this.fetchSize = limits.fetchSize();
|
this.fetchSize = limits.getFetchSize();
|
||||||
this.budget = new IndexSearchBudget(limits.timeoutMs());
|
this.budget = new IndexSearchBudget(limits.getTimeoutMs());
|
||||||
this.query = specsSet.query;
|
this.query = specsSet.query;
|
||||||
this.limitByDomain = limits.resultsByDomain();
|
this.limitByDomain = limits.getResultsByDomain();
|
||||||
this.limitTotal = limits.resultsTotal();
|
this.limitTotal = limits.getResultsTotal();
|
||||||
|
|
||||||
queryParams = new QueryParams(
|
queryParams = new QueryParams(
|
||||||
specsSet.quality,
|
specsSet.quality,
|
||||||
@@ -62,17 +63,17 @@ public class SearchParameters {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public SearchParameters(RpcIndexQuery request, SearchSet searchSet) {
|
public SearchParameters(RpcIndexQuery request, SearchSet searchSet) {
|
||||||
var limits = IndexProtobufCodec.convertQueryLimits(request.getQueryLimits());
|
var limits = request.getQueryLimits();
|
||||||
|
|
||||||
this.fetchSize = limits.fetchSize();
|
this.fetchSize = limits.getFetchSize();
|
||||||
|
|
||||||
// The time budget is halved because this is the point when we start to
|
// The time budget is halved because this is the point when we start to
|
||||||
// wrap up the search and return the results.
|
// wrap up the search and return the results.
|
||||||
this.budget = new IndexSearchBudget(limits.timeoutMs() / 2);
|
this.budget = new IndexSearchBudget(limits.getTimeoutMs() / 2);
|
||||||
this.query = IndexProtobufCodec.convertRpcQuery(request.getQuery());
|
this.query = IndexProtobufCodec.convertRpcQuery(request.getQuery());
|
||||||
|
|
||||||
this.limitByDomain = limits.resultsByDomain();
|
this.limitByDomain = limits.getResultsByDomain();
|
||||||
this.limitTotal = limits.resultsTotal();
|
this.limitTotal = limits.getResultsTotal();
|
||||||
|
|
||||||
queryParams = new QueryParams(
|
queryParams = new QueryParams(
|
||||||
convertSpecLimit(request.getQuality()),
|
convertSpecLimit(request.getQuality()),
|
||||||
@@ -85,7 +86,7 @@ public class SearchParameters {
|
|||||||
compiledQuery = CompiledQueryParser.parse(this.query.compiledQuery);
|
compiledQuery = CompiledQueryParser.parse(this.query.compiledQuery);
|
||||||
compiledQueryIds = compiledQuery.mapToLong(SearchTermsUtil::getWordId);
|
compiledQueryIds = compiledQuery.mapToLong(SearchTermsUtil::getWordId);
|
||||||
|
|
||||||
rankingParams = IndexProtobufCodec.convertRankingParameterss(request.getParameters());
|
rankingParams = request.hasParameters() ? request.getParameters() : PrototypeRankingParameters.sensibleDefaults();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@@ -2,7 +2,6 @@ package nu.marginalia.index.results;
|
|||||||
|
|
||||||
import nu.marginalia.api.searchquery.model.compiled.CqDataInt;
|
import nu.marginalia.api.searchquery.model.compiled.CqDataInt;
|
||||||
import nu.marginalia.api.searchquery.model.compiled.CqExpression;
|
import nu.marginalia.api.searchquery.model.compiled.CqExpression;
|
||||||
import nu.marginalia.api.searchquery.model.results.Bm25Parameters;
|
|
||||||
import nu.marginalia.api.searchquery.model.results.ResultRankingContext;
|
import nu.marginalia.api.searchquery.model.results.ResultRankingContext;
|
||||||
|
|
||||||
import java.util.BitSet;
|
import java.util.BitSet;
|
||||||
@@ -24,14 +23,14 @@ public class Bm25GraphVisitor implements CqExpression.DoubleVisitor {
|
|||||||
|
|
||||||
private final BitSet mask;
|
private final BitSet mask;
|
||||||
|
|
||||||
public Bm25GraphVisitor(Bm25Parameters bm25Parameters,
|
public Bm25GraphVisitor(double k1, double b,
|
||||||
float[] counts,
|
float[] counts,
|
||||||
int length,
|
int length,
|
||||||
ResultRankingContext ctx) {
|
ResultRankingContext ctx) {
|
||||||
this.length = length;
|
this.length = length;
|
||||||
|
|
||||||
this.k1 = bm25Parameters.k();
|
this.k1 = k1;
|
||||||
this.b = bm25Parameters.b();
|
this.b = b;
|
||||||
|
|
||||||
this.docCount = ctx.termFreqDocCount();
|
this.docCount = ctx.termFreqDocCount();
|
||||||
this.counts = counts;
|
this.counts = counts;
|
||||||
|
@@ -0,0 +1,119 @@
|
|||||||
|
package nu.marginalia.index.results;
|
||||||
|
|
||||||
|
import com.google.inject.Inject;
|
||||||
|
import com.google.inject.Singleton;
|
||||||
|
import gnu.trove.map.hash.TIntDoubleHashMap;
|
||||||
|
import nu.marginalia.WmsaHome;
|
||||||
|
import nu.marginalia.db.DbDomainQueries;
|
||||||
|
import nu.marginalia.model.EdgeDomain;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.nio.file.Files;
|
||||||
|
import java.nio.file.Path;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.OptionalInt;
|
||||||
|
import java.util.concurrent.TimeUnit;
|
||||||
|
|
||||||
|
@Singleton
|
||||||
|
public class DomainRankingOverrides {
|
||||||
|
private final DbDomainQueries domainQueries;
|
||||||
|
|
||||||
|
private volatile TIntDoubleHashMap rankingFactors = new TIntDoubleHashMap(100, 0.75f, -1, 1.);
|
||||||
|
|
||||||
|
private static final Logger logger = LoggerFactory.getLogger(DomainRankingOverrides.class);
|
||||||
|
|
||||||
|
private final Path overrideFilePath;
|
||||||
|
|
||||||
|
@Inject
|
||||||
|
public DomainRankingOverrides(DbDomainQueries domainQueries) {
|
||||||
|
this.domainQueries = domainQueries;
|
||||||
|
|
||||||
|
overrideFilePath = WmsaHome.getDataPath().resolve("domain-ranking-factors.txt");
|
||||||
|
|
||||||
|
Thread.ofPlatform().start(this::updateRunner);
|
||||||
|
}
|
||||||
|
|
||||||
|
// for test access
|
||||||
|
public DomainRankingOverrides(DbDomainQueries domainQueries, Path overrideFilePath)
|
||||||
|
{
|
||||||
|
this.domainQueries = domainQueries;
|
||||||
|
this.overrideFilePath = overrideFilePath;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public double getRankingFactor(int domainId) {
|
||||||
|
return rankingFactors.get(domainId);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void updateRunner() {
|
||||||
|
for (;;) {
|
||||||
|
reloadFile();
|
||||||
|
|
||||||
|
try {
|
||||||
|
TimeUnit.MINUTES.sleep(5);
|
||||||
|
} catch (InterruptedException ex) {
|
||||||
|
logger.warn("Thread interrupted", ex);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void reloadFile() {
|
||||||
|
if (!Files.exists(overrideFilePath)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
List<String> lines = Files.readAllLines(overrideFilePath);
|
||||||
|
|
||||||
|
double factor = 1.;
|
||||||
|
|
||||||
|
var newRankingFactors = new TIntDoubleHashMap(lines.size(), 0.75f, -1, 1.);
|
||||||
|
|
||||||
|
for (var line : lines) {
|
||||||
|
if (line.isBlank()) continue;
|
||||||
|
if (line.startsWith("#")) continue;
|
||||||
|
|
||||||
|
String[] parts = line.split("\\s+");
|
||||||
|
if (parts.length != 2) {
|
||||||
|
logger.warn("Unrecognized format for domain overrides file: {}", line);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
switch (parts[0]) {
|
||||||
|
case "value" -> {
|
||||||
|
// error handle me
|
||||||
|
factor = Double.parseDouble(parts[1]);
|
||||||
|
if (factor < 0) {
|
||||||
|
logger.error("Negative values are not permitted, found {}", factor);
|
||||||
|
factor = 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case "domain" -> {
|
||||||
|
// error handle
|
||||||
|
OptionalInt domainId = domainQueries.tryGetDomainId(new EdgeDomain(parts[1]));
|
||||||
|
if (domainId.isPresent()) {
|
||||||
|
newRankingFactors.put(domainId.getAsInt(), factor);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
logger.warn("Unrecognized domain id {}", parts[1]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
default -> {
|
||||||
|
logger.warn("Unrecognized format {}", line);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (Exception ex) {
|
||||||
|
logger.warn("Error in parsing domain overrides file: {} ({})", line, ex.getClass().getSimpleName());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
rankingFactors = newRankingFactors;
|
||||||
|
} catch (IOException ex) {
|
||||||
|
logger.error("Failed to read " + overrideFilePath, ex);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@@ -40,13 +40,16 @@ public class IndexResultRankingService {
|
|||||||
|
|
||||||
private final DocumentDbReader documentDbReader;
|
private final DocumentDbReader documentDbReader;
|
||||||
private final StatefulIndex statefulIndex;
|
private final StatefulIndex statefulIndex;
|
||||||
|
private final DomainRankingOverrides domainRankingOverrides;
|
||||||
|
|
||||||
@Inject
|
@Inject
|
||||||
public IndexResultRankingService(DocumentDbReader documentDbReader,
|
public IndexResultRankingService(DocumentDbReader documentDbReader,
|
||||||
StatefulIndex statefulIndex)
|
StatefulIndex statefulIndex,
|
||||||
|
DomainRankingOverrides domainRankingOverrides)
|
||||||
{
|
{
|
||||||
this.documentDbReader = documentDbReader;
|
this.documentDbReader = documentDbReader;
|
||||||
this.statefulIndex = statefulIndex;
|
this.statefulIndex = statefulIndex;
|
||||||
|
this.domainRankingOverrides = domainRankingOverrides;
|
||||||
}
|
}
|
||||||
|
|
||||||
public List<SearchResultItem> rankResults(SearchParameters params,
|
public List<SearchResultItem> rankResults(SearchParameters params,
|
||||||
@@ -57,7 +60,7 @@ public class IndexResultRankingService {
|
|||||||
if (resultIds.isEmpty())
|
if (resultIds.isEmpty())
|
||||||
return List.of();
|
return List.of();
|
||||||
|
|
||||||
IndexResultScoreCalculator resultRanker = new IndexResultScoreCalculator(statefulIndex, rankingContext, params);
|
IndexResultScoreCalculator resultRanker = new IndexResultScoreCalculator(statefulIndex, domainRankingOverrides, rankingContext, params);
|
||||||
|
|
||||||
List<SearchResultItem> results = new ArrayList<>(resultIds.size());
|
List<SearchResultItem> results = new ArrayList<>(resultIds.size());
|
||||||
|
|
||||||
@@ -156,7 +159,7 @@ public class IndexResultRankingService {
|
|||||||
// for the selected results, as this would be comically expensive to do for all the results we
|
// for the selected results, as this would be comically expensive to do for all the results we
|
||||||
// discard along the way
|
// discard along the way
|
||||||
|
|
||||||
if (params.rankingParams.exportDebugData) {
|
if (params.rankingParams.getExportDebugData()) {
|
||||||
var combinedIdsList = new LongArrayList(resultsList.size());
|
var combinedIdsList = new LongArrayList(resultsList.size());
|
||||||
for (var item : resultsList) {
|
for (var item : resultsList) {
|
||||||
combinedIdsList.add(item.combinedId);
|
combinedIdsList.add(item.combinedId);
|
||||||
|
@@ -2,10 +2,11 @@ package nu.marginalia.index.results;
|
|||||||
|
|
||||||
import it.unimi.dsi.fastutil.ints.IntIterator;
|
import it.unimi.dsi.fastutil.ints.IntIterator;
|
||||||
import it.unimi.dsi.fastutil.ints.IntList;
|
import it.unimi.dsi.fastutil.ints.IntList;
|
||||||
|
import nu.marginalia.api.searchquery.RpcResultRankingParameters;
|
||||||
|
import nu.marginalia.api.searchquery.RpcTemporalBias;
|
||||||
import nu.marginalia.api.searchquery.model.compiled.CompiledQuery;
|
import nu.marginalia.api.searchquery.model.compiled.CompiledQuery;
|
||||||
import nu.marginalia.api.searchquery.model.compiled.CompiledQueryLong;
|
import nu.marginalia.api.searchquery.model.compiled.CompiledQueryLong;
|
||||||
import nu.marginalia.api.searchquery.model.results.ResultRankingContext;
|
import nu.marginalia.api.searchquery.model.results.ResultRankingContext;
|
||||||
import nu.marginalia.api.searchquery.model.results.ResultRankingParameters;
|
|
||||||
import nu.marginalia.api.searchquery.model.results.SearchResultItem;
|
import nu.marginalia.api.searchquery.model.results.SearchResultItem;
|
||||||
import nu.marginalia.api.searchquery.model.results.debug.DebugRankingFactors;
|
import nu.marginalia.api.searchquery.model.results.debug.DebugRankingFactors;
|
||||||
import nu.marginalia.index.forward.spans.DocumentSpans;
|
import nu.marginalia.index.forward.spans.DocumentSpans;
|
||||||
@@ -40,14 +41,17 @@ public class IndexResultScoreCalculator {
|
|||||||
private final CombinedIndexReader index;
|
private final CombinedIndexReader index;
|
||||||
private final QueryParams queryParams;
|
private final QueryParams queryParams;
|
||||||
|
|
||||||
|
private final DomainRankingOverrides domainRankingOverrides;
|
||||||
private final ResultRankingContext rankingContext;
|
private final ResultRankingContext rankingContext;
|
||||||
private final CompiledQuery<String> compiledQuery;
|
private final CompiledQuery<String> compiledQuery;
|
||||||
|
|
||||||
public IndexResultScoreCalculator(StatefulIndex statefulIndex,
|
public IndexResultScoreCalculator(StatefulIndex statefulIndex,
|
||||||
|
DomainRankingOverrides domainRankingOverrides,
|
||||||
ResultRankingContext rankingContext,
|
ResultRankingContext rankingContext,
|
||||||
SearchParameters params)
|
SearchParameters params)
|
||||||
{
|
{
|
||||||
this.index = statefulIndex.get();
|
this.index = statefulIndex.get();
|
||||||
|
this.domainRankingOverrides = domainRankingOverrides;
|
||||||
this.rankingContext = rankingContext;
|
this.rankingContext = rankingContext;
|
||||||
|
|
||||||
this.queryParams = params.queryParams;
|
this.queryParams = params.queryParams;
|
||||||
@@ -116,20 +120,20 @@ public class IndexResultScoreCalculator {
|
|||||||
|
|
||||||
float proximitiyFac = getProximitiyFac(decodedPositions, searchTerms.phraseConstraints, verbatimMatches, unorderedMatches, spans);
|
float proximitiyFac = getProximitiyFac(decodedPositions, searchTerms.phraseConstraints, verbatimMatches, unorderedMatches, spans);
|
||||||
|
|
||||||
double score_firstPosition = params.tcfFirstPosition * (1.0 / Math.sqrt(unorderedMatches.firstPosition));
|
double score_firstPosition = params.getTcfFirstPositionWeight() * (1.0 / Math.sqrt(unorderedMatches.firstPosition));
|
||||||
double score_verbatim = params.tcfVerbatim * verbatimMatches.getScore();
|
double score_verbatim = params.getTcfVerbatimWeight() * verbatimMatches.getScore();
|
||||||
double score_proximity = params.tcfProximity * proximitiyFac;
|
double score_proximity = params.getTcfProximityWeight() * proximitiyFac;
|
||||||
double score_bM25 = params.bm25Weight
|
double score_bM25 = params.getBm25Weight()
|
||||||
* wordFlagsQuery.root.visit(new Bm25GraphVisitor(params.bm25Params, unorderedMatches.getWeightedCounts(), docSize, rankingContext))
|
* wordFlagsQuery.root.visit(new Bm25GraphVisitor(params.getBm25K(), params.getBm25B(), unorderedMatches.getWeightedCounts(), docSize, rankingContext))
|
||||||
/ (Math.sqrt(unorderedMatches.searchableKeywordCount + 1));
|
/ (Math.sqrt(unorderedMatches.searchableKeywordCount + 1));
|
||||||
double score_bFlags = params.bm25Weight
|
double score_bFlags = params.getBm25Weight()
|
||||||
* wordFlagsQuery.root.visit(new TermFlagsGraphVisitor(params.bm25Params, wordFlagsQuery.data, unorderedMatches.getWeightedCounts(), rankingContext))
|
* wordFlagsQuery.root.visit(new TermFlagsGraphVisitor(params.getBm25K(), wordFlagsQuery.data, unorderedMatches.getWeightedCounts(), rankingContext))
|
||||||
/ (Math.sqrt(unorderedMatches.searchableKeywordCount + 1));
|
/ (Math.sqrt(unorderedMatches.searchableKeywordCount + 1));
|
||||||
|
|
||||||
|
double rankingAdjustment = domainRankingOverrides.getRankingFactor(UrlIdCodec.getDomainId(combinedId));
|
||||||
|
|
||||||
double score = normalize(
|
double score = normalize(
|
||||||
score_firstPosition + score_proximity + score_verbatim
|
rankingAdjustment * (score_firstPosition + score_proximity + score_verbatim + score_bM25 + score_bFlags),
|
||||||
+ score_bM25
|
|
||||||
+ score_bFlags,
|
|
||||||
-Math.min(0, documentBonus) // The magnitude of documentBonus, if it is negative; otherwise 0
|
-Math.min(0, documentBonus) // The magnitude of documentBonus, if it is negative; otherwise 0
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -245,9 +249,13 @@ public class IndexResultScoreCalculator {
|
|||||||
private double calculateDocumentBonus(long documentMetadata,
|
private double calculateDocumentBonus(long documentMetadata,
|
||||||
int features,
|
int features,
|
||||||
int length,
|
int length,
|
||||||
ResultRankingParameters rankingParams,
|
RpcResultRankingParameters rankingParams,
|
||||||
@Nullable DebugRankingFactors debugRankingFactors) {
|
@Nullable DebugRankingFactors debugRankingFactors) {
|
||||||
|
|
||||||
|
if (rankingParams.getDisablePenalties()) {
|
||||||
|
return 0.;
|
||||||
|
}
|
||||||
|
|
||||||
int rank = DocumentMetadata.decodeRank(documentMetadata);
|
int rank = DocumentMetadata.decodeRank(documentMetadata);
|
||||||
int asl = DocumentMetadata.decodeAvgSentenceLength(documentMetadata);
|
int asl = DocumentMetadata.decodeAvgSentenceLength(documentMetadata);
|
||||||
int quality = DocumentMetadata.decodeQuality(documentMetadata);
|
int quality = DocumentMetadata.decodeQuality(documentMetadata);
|
||||||
@@ -256,18 +264,18 @@ public class IndexResultScoreCalculator {
|
|||||||
int topology = DocumentMetadata.decodeTopology(documentMetadata);
|
int topology = DocumentMetadata.decodeTopology(documentMetadata);
|
||||||
int year = DocumentMetadata.decodeYear(documentMetadata);
|
int year = DocumentMetadata.decodeYear(documentMetadata);
|
||||||
|
|
||||||
double averageSentenceLengthPenalty = (asl >= rankingParams.shortSentenceThreshold ? 0 : -rankingParams.shortSentencePenalty);
|
double averageSentenceLengthPenalty = (asl >= rankingParams.getShortSentenceThreshold() ? 0 : -rankingParams.getShortSentencePenalty());
|
||||||
|
|
||||||
final double qualityPenalty = calculateQualityPenalty(size, quality, rankingParams);
|
final double qualityPenalty = calculateQualityPenalty(size, quality, rankingParams);
|
||||||
final double rankingBonus = (255. - rank) * rankingParams.domainRankBonus;
|
final double rankingBonus = (255. - rank) * rankingParams.getDomainRankBonus();
|
||||||
final double topologyBonus = Math.log(1 + topology);
|
final double topologyBonus = Math.log(1 + topology);
|
||||||
final double documentLengthPenalty = length > rankingParams.shortDocumentThreshold ? 0 : -rankingParams.shortDocumentPenalty;
|
final double documentLengthPenalty = length > rankingParams.getShortDocumentThreshold() ? 0 : -rankingParams.getShortDocumentPenalty();
|
||||||
final double temporalBias;
|
final double temporalBias;
|
||||||
|
|
||||||
if (rankingParams.temporalBias == ResultRankingParameters.TemporalBias.RECENT) {
|
if (rankingParams.getTemporalBias().getBias() == RpcTemporalBias.Bias.RECENT) {
|
||||||
temporalBias = - Math.abs(year - PubDate.MAX_YEAR) * rankingParams.temporalBiasWeight;
|
temporalBias = - Math.abs(year - PubDate.MAX_YEAR) * rankingParams.getTemporalBiasWeight();
|
||||||
} else if (rankingParams.temporalBias == ResultRankingParameters.TemporalBias.OLD) {
|
} else if (rankingParams.getTemporalBias().getBias() == RpcTemporalBias.Bias.OLD) {
|
||||||
temporalBias = - Math.abs(year - PubDate.MIN_YEAR) * rankingParams.temporalBiasWeight;
|
temporalBias = - Math.abs(year - PubDate.MIN_YEAR) * rankingParams.getTemporalBiasWeight();
|
||||||
} else {
|
} else {
|
||||||
temporalBias = 0;
|
temporalBias = 0;
|
||||||
}
|
}
|
||||||
@@ -506,14 +514,14 @@ public class IndexResultScoreCalculator {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
private double calculateQualityPenalty(int size, int quality, ResultRankingParameters rankingParams) {
|
private double calculateQualityPenalty(int size, int quality, RpcResultRankingParameters rankingParams) {
|
||||||
if (size < 400) {
|
if (size < 400) {
|
||||||
if (quality < 5)
|
if (quality < 5)
|
||||||
return 0;
|
return 0;
|
||||||
return -quality * rankingParams.qualityPenalty;
|
return -quality * rankingParams.getQualityPenalty();
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
return -quality * rankingParams.qualityPenalty * 20;
|
return -quality * rankingParams.getQualityPenalty() * 20;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -575,3 +583,4 @@ public class IndexResultScoreCalculator {
|
|||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -3,7 +3,6 @@ package nu.marginalia.index.results;
|
|||||||
import nu.marginalia.api.searchquery.model.compiled.CqDataInt;
|
import nu.marginalia.api.searchquery.model.compiled.CqDataInt;
|
||||||
import nu.marginalia.api.searchquery.model.compiled.CqDataLong;
|
import nu.marginalia.api.searchquery.model.compiled.CqDataLong;
|
||||||
import nu.marginalia.api.searchquery.model.compiled.CqExpression;
|
import nu.marginalia.api.searchquery.model.compiled.CqExpression;
|
||||||
import nu.marginalia.api.searchquery.model.results.Bm25Parameters;
|
|
||||||
import nu.marginalia.api.searchquery.model.results.ResultRankingContext;
|
import nu.marginalia.api.searchquery.model.results.ResultRankingContext;
|
||||||
import nu.marginalia.model.idx.WordFlags;
|
import nu.marginalia.model.idx.WordFlags;
|
||||||
|
|
||||||
@@ -15,15 +14,14 @@ public class TermFlagsGraphVisitor implements CqExpression.DoubleVisitor {
|
|||||||
private final CqDataLong wordMetaData;
|
private final CqDataLong wordMetaData;
|
||||||
private final CqDataInt frequencies;
|
private final CqDataInt frequencies;
|
||||||
private final float[] counts;
|
private final float[] counts;
|
||||||
private final Bm25Parameters bm25Parameters;
|
private final double k1;
|
||||||
|
|
||||||
private final int docCount;
|
private final int docCount;
|
||||||
|
|
||||||
public TermFlagsGraphVisitor(Bm25Parameters bm25Parameters,
|
public TermFlagsGraphVisitor(double k1,
|
||||||
CqDataLong wordMetaData,
|
CqDataLong wordMetaData,
|
||||||
float[] counts,
|
float[] counts,
|
||||||
ResultRankingContext ctx) {
|
ResultRankingContext ctx) {
|
||||||
this.bm25Parameters = bm25Parameters;
|
this.k1 = k1;
|
||||||
this.counts = counts;
|
this.counts = counts;
|
||||||
this.docCount = ctx.termFreqDocCount();
|
this.docCount = ctx.termFreqDocCount();
|
||||||
this.wordMetaData = wordMetaData;
|
this.wordMetaData = wordMetaData;
|
||||||
@@ -55,7 +53,7 @@ public class TermFlagsGraphVisitor implements CqExpression.DoubleVisitor {
|
|||||||
int freq = frequencies.get(idx);
|
int freq = frequencies.get(idx);
|
||||||
|
|
||||||
// note we override b to zero for priority terms as they are independent of document length
|
// note we override b to zero for priority terms as they are independent of document length
|
||||||
return invFreq(docCount, freq) * f(bm25Parameters.k(), 0, count, 0);
|
return invFreq(docCount, freq) * f(k1, 0, count, 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
private double evaluatePriorityScore(int idx) {
|
private double evaluatePriorityScore(int idx) {
|
||||||
|
@@ -1,7 +0,0 @@
|
|||||||
package nu.marginalia.index.query.limit;
|
|
||||||
|
|
||||||
public record QueryLimits(int resultsByDomain, int resultsTotal, int timeoutMs, int fetchSize) {
|
|
||||||
public QueryLimits forSingleDomain() {
|
|
||||||
return new QueryLimits(resultsTotal, resultsTotal, timeoutMs, fetchSize);
|
|
||||||
}
|
|
||||||
}
|
|
@@ -4,10 +4,11 @@ import com.google.inject.Guice;
|
|||||||
import com.google.inject.Inject;
|
import com.google.inject.Inject;
|
||||||
import nu.marginalia.IndexLocations;
|
import nu.marginalia.IndexLocations;
|
||||||
import nu.marginalia.api.searchquery.RpcDecoratedResultItem;
|
import nu.marginalia.api.searchquery.RpcDecoratedResultItem;
|
||||||
|
import nu.marginalia.api.searchquery.RpcQueryLimits;
|
||||||
import nu.marginalia.api.searchquery.model.query.SearchPhraseConstraint;
|
import nu.marginalia.api.searchquery.model.query.SearchPhraseConstraint;
|
||||||
import nu.marginalia.api.searchquery.model.query.SearchQuery;
|
import nu.marginalia.api.searchquery.model.query.SearchQuery;
|
||||||
import nu.marginalia.api.searchquery.model.query.SearchSpecification;
|
import nu.marginalia.api.searchquery.model.query.SearchSpecification;
|
||||||
import nu.marginalia.api.searchquery.model.results.ResultRankingParameters;
|
import nu.marginalia.api.searchquery.model.results.PrototypeRankingParameters;
|
||||||
import nu.marginalia.index.construction.DocIdRewriter;
|
import nu.marginalia.index.construction.DocIdRewriter;
|
||||||
import nu.marginalia.index.construction.full.FullIndexConstructor;
|
import nu.marginalia.index.construction.full.FullIndexConstructor;
|
||||||
import nu.marginalia.index.construction.prio.PrioIndexConstructor;
|
import nu.marginalia.index.construction.prio.PrioIndexConstructor;
|
||||||
@@ -17,7 +18,6 @@ import nu.marginalia.index.forward.construction.ForwardIndexConverter;
|
|||||||
import nu.marginalia.index.index.StatefulIndex;
|
import nu.marginalia.index.index.StatefulIndex;
|
||||||
import nu.marginalia.index.journal.IndexJournal;
|
import nu.marginalia.index.journal.IndexJournal;
|
||||||
import nu.marginalia.index.journal.IndexJournalSlopWriter;
|
import nu.marginalia.index.journal.IndexJournalSlopWriter;
|
||||||
import nu.marginalia.index.query.limit.QueryLimits;
|
|
||||||
import nu.marginalia.index.query.limit.QueryStrategy;
|
import nu.marginalia.index.query.limit.QueryStrategy;
|
||||||
import nu.marginalia.index.query.limit.SpecificationLimit;
|
import nu.marginalia.index.query.limit.SpecificationLimit;
|
||||||
import nu.marginalia.linkdb.docs.DocumentDbReader;
|
import nu.marginalia.linkdb.docs.DocumentDbReader;
|
||||||
@@ -115,9 +115,16 @@ public class IndexQueryServiceIntegrationSmokeTest {
|
|||||||
|
|
||||||
var rsp = queryService.justQuery(
|
var rsp = queryService.justQuery(
|
||||||
SearchSpecification.builder()
|
SearchSpecification.builder()
|
||||||
.queryLimits(new QueryLimits(10, 10, Integer.MAX_VALUE, 4000))
|
.queryLimits(
|
||||||
|
RpcQueryLimits.newBuilder()
|
||||||
|
.setResultsByDomain(10)
|
||||||
|
.setResultsTotal(10)
|
||||||
|
.setTimeoutMs(Integer.MAX_VALUE)
|
||||||
|
.setFetchSize(4000)
|
||||||
|
.build()
|
||||||
|
)
|
||||||
.queryStrategy(QueryStrategy.SENTENCE)
|
.queryStrategy(QueryStrategy.SENTENCE)
|
||||||
.rankingParams(ResultRankingParameters.sensibleDefaults())
|
.rankingParams(PrototypeRankingParameters.sensibleDefaults())
|
||||||
.domains(new ArrayList<>())
|
.domains(new ArrayList<>())
|
||||||
.searchSetIdentifier("NONE")
|
.searchSetIdentifier("NONE")
|
||||||
.query(
|
.query(
|
||||||
@@ -171,9 +178,16 @@ public class IndexQueryServiceIntegrationSmokeTest {
|
|||||||
|
|
||||||
var rsp = queryService.justQuery(
|
var rsp = queryService.justQuery(
|
||||||
SearchSpecification.builder()
|
SearchSpecification.builder()
|
||||||
.queryLimits(new QueryLimits(10, 10, Integer.MAX_VALUE, 4000))
|
.queryLimits(
|
||||||
|
RpcQueryLimits.newBuilder()
|
||||||
|
.setResultsByDomain(10)
|
||||||
|
.setResultsTotal(10)
|
||||||
|
.setTimeoutMs(Integer.MAX_VALUE)
|
||||||
|
.setFetchSize(4000)
|
||||||
|
.build()
|
||||||
|
)
|
||||||
.queryStrategy(QueryStrategy.SENTENCE)
|
.queryStrategy(QueryStrategy.SENTENCE)
|
||||||
.rankingParams(ResultRankingParameters.sensibleDefaults())
|
.rankingParams(PrototypeRankingParameters.sensibleDefaults())
|
||||||
.domains(new ArrayList<>())
|
.domains(new ArrayList<>())
|
||||||
.searchSetIdentifier("NONE")
|
.searchSetIdentifier("NONE")
|
||||||
.query(
|
.query(
|
||||||
@@ -225,8 +239,15 @@ public class IndexQueryServiceIntegrationSmokeTest {
|
|||||||
|
|
||||||
var rsp = queryService.justQuery(
|
var rsp = queryService.justQuery(
|
||||||
SearchSpecification.builder()
|
SearchSpecification.builder()
|
||||||
.queryLimits(new QueryLimits(10, 10, Integer.MAX_VALUE, 4000))
|
.queryLimits(
|
||||||
.rankingParams(ResultRankingParameters.sensibleDefaults())
|
RpcQueryLimits.newBuilder()
|
||||||
|
.setResultsByDomain(10)
|
||||||
|
.setResultsTotal(10)
|
||||||
|
.setTimeoutMs(Integer.MAX_VALUE)
|
||||||
|
.setFetchSize(4000)
|
||||||
|
.build()
|
||||||
|
)
|
||||||
|
.rankingParams(PrototypeRankingParameters.sensibleDefaults())
|
||||||
.queryStrategy(QueryStrategy.SENTENCE)
|
.queryStrategy(QueryStrategy.SENTENCE)
|
||||||
.domains(List.of(2))
|
.domains(List.of(2))
|
||||||
.query(
|
.query(
|
||||||
@@ -282,11 +303,18 @@ public class IndexQueryServiceIntegrationSmokeTest {
|
|||||||
|
|
||||||
var rsp = queryService.justQuery(
|
var rsp = queryService.justQuery(
|
||||||
SearchSpecification.builder()
|
SearchSpecification.builder()
|
||||||
.queryLimits(new QueryLimits(10, 10, Integer.MAX_VALUE, 4000))
|
.queryLimits(
|
||||||
|
RpcQueryLimits.newBuilder()
|
||||||
|
.setResultsByDomain(10)
|
||||||
|
.setResultsTotal(10)
|
||||||
|
.setTimeoutMs(Integer.MAX_VALUE)
|
||||||
|
.setFetchSize(4000)
|
||||||
|
.build()
|
||||||
|
)
|
||||||
.year(SpecificationLimit.equals(1998))
|
.year(SpecificationLimit.equals(1998))
|
||||||
.queryStrategy(QueryStrategy.SENTENCE)
|
.queryStrategy(QueryStrategy.SENTENCE)
|
||||||
.searchSetIdentifier("NONE")
|
.searchSetIdentifier("NONE")
|
||||||
.rankingParams(ResultRankingParameters.sensibleDefaults())
|
.rankingParams(PrototypeRankingParameters.sensibleDefaults())
|
||||||
.query(
|
.query(
|
||||||
SearchQuery.builder()
|
SearchQuery.builder()
|
||||||
.compiledQuery("4")
|
.compiledQuery("4")
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user