mirror of
https://github.com/MarginaliaSearch/MarginaliaSearch.git
synced 2025-10-06 07:32:38 +02:00
Compare commits
12 Commits
deploy-008
...
deploy-008
Author | SHA1 | Date | |
---|---|---|---|
|
c2dd2175a2 | ||
|
b8581b0f56 | ||
|
2ea34767d8 | ||
|
e9af838231 | ||
|
ae0cad47c4 | ||
|
5fbc8ef998 | ||
|
32c6dd9e6a | ||
|
6ece6a6cfb | ||
|
39cd1c18f8 | ||
|
eb65daaa88 | ||
|
0bebdb6e33 | ||
|
72384ad6ca |
@@ -24,58 +24,4 @@ public class LanguageModels {
|
||||
this.fasttextLanguageModel = fasttextLanguageModel;
|
||||
this.segments = segments;
|
||||
}
|
||||
|
||||
public static LanguageModelsBuilder builder() {
|
||||
return new LanguageModelsBuilder();
|
||||
}
|
||||
|
||||
public static class LanguageModelsBuilder {
|
||||
private Path termFrequencies;
|
||||
private Path openNLPSentenceDetectionData;
|
||||
private Path posRules;
|
||||
private Path posDict;
|
||||
private Path fasttextLanguageModel;
|
||||
private Path segments;
|
||||
|
||||
LanguageModelsBuilder() {
|
||||
}
|
||||
|
||||
public LanguageModelsBuilder termFrequencies(Path termFrequencies) {
|
||||
this.termFrequencies = termFrequencies;
|
||||
return this;
|
||||
}
|
||||
|
||||
public LanguageModelsBuilder openNLPSentenceDetectionData(Path openNLPSentenceDetectionData) {
|
||||
this.openNLPSentenceDetectionData = openNLPSentenceDetectionData;
|
||||
return this;
|
||||
}
|
||||
|
||||
public LanguageModelsBuilder posRules(Path posRules) {
|
||||
this.posRules = posRules;
|
||||
return this;
|
||||
}
|
||||
|
||||
public LanguageModelsBuilder posDict(Path posDict) {
|
||||
this.posDict = posDict;
|
||||
return this;
|
||||
}
|
||||
|
||||
public LanguageModelsBuilder fasttextLanguageModel(Path fasttextLanguageModel) {
|
||||
this.fasttextLanguageModel = fasttextLanguageModel;
|
||||
return this;
|
||||
}
|
||||
|
||||
public LanguageModelsBuilder segments(Path segments) {
|
||||
this.segments = segments;
|
||||
return this;
|
||||
}
|
||||
|
||||
public LanguageModels build() {
|
||||
return new LanguageModels(this.termFrequencies, this.openNLPSentenceDetectionData, this.posRules, this.posDict, this.fasttextLanguageModel, this.segments);
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return "LanguageModels.LanguageModelsBuilder(termFrequencies=" + this.termFrequencies + ", openNLPSentenceDetectionData=" + this.openNLPSentenceDetectionData + ", posRules=" + this.posRules + ", posDict=" + this.posDict + ", fasttextLanguageModel=" + this.fasttextLanguageModel + ", segments=" + this.segments + ")";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -19,6 +19,7 @@ import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.StandardCopyOption;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.function.Function;
|
||||
@@ -60,35 +61,41 @@ public class MigrateCrawlDataActor extends RecordActorPrototype {
|
||||
|
||||
for (Map.Entry<WorkLogEntry, Path> item : WorkLog.iterableMap(crawlerLog, new CrawlDataLocator(root))) {
|
||||
|
||||
var entry = item.getKey();
|
||||
var path = item.getValue();
|
||||
final WorkLogEntry entry = item.getKey();
|
||||
final Path inputPath = item.getValue();
|
||||
|
||||
heartbeat.progress("Migrating" + path.toFile().getName(), entryIdx++, totalEntries);
|
||||
Path outputPath = inputPath;
|
||||
heartbeat.progress("Migrating" + inputPath.getFileName(), entryIdx++, totalEntries);
|
||||
|
||||
if (path.toFile().getName().endsWith(".parquet") && Files.exists(path)) {
|
||||
try {
|
||||
String domain = entry.id();
|
||||
String id = Integer.toHexString(domain.hashCode());
|
||||
if (inputPath.toString().endsWith(".parquet")) {
|
||||
String domain = entry.id();
|
||||
String id = Integer.toHexString(domain.hashCode());
|
||||
|
||||
Path outputFile = CrawlerOutputFile.createSlopPath(root, id, domain);
|
||||
outputPath = CrawlerOutputFile.createSlopPath(root, id, domain);
|
||||
|
||||
SlopCrawlDataRecord.convertFromParquet(path, outputFile);
|
||||
|
||||
workLog.setJobToFinished(entry.id(), outputFile.toString(), entry.cnt());
|
||||
if (Files.exists(inputPath)) {
|
||||
try {
|
||||
SlopCrawlDataRecord.convertFromParquet(inputPath, outputPath);
|
||||
Files.deleteIfExists(inputPath);
|
||||
} catch (Exception ex) {
|
||||
outputPath = inputPath; // don't update the work log on error
|
||||
logger.error("Failed to convert " + inputPath, ex);
|
||||
}
|
||||
}
|
||||
catch (Exception ex) {
|
||||
logger.error("Failed to convert " + path, ex);
|
||||
else if (!Files.exists(inputPath) && !Files.exists(outputPath)) {
|
||||
// if the input file is missing, and the output file is missing, we just write the log
|
||||
// record identical to the old one
|
||||
outputPath = inputPath;
|
||||
}
|
||||
}
|
||||
else {
|
||||
workLog.setJobToFinished(entry.id(), path.toString(), entry.cnt());
|
||||
}
|
||||
|
||||
// Write a log entry for the (possibly) converted file
|
||||
workLog.setJobToFinished(entry.id(), outputPath.toString(), entry.cnt());
|
||||
}
|
||||
}
|
||||
|
||||
Path oldCrawlerLog = Files.createTempFile(root, "crawler-", ".migrate.old.log");
|
||||
Files.move(crawlerLog, oldCrawlerLog);
|
||||
Files.move(crawlerLog, oldCrawlerLog, StandardCopyOption.REPLACE_EXISTING);
|
||||
Files.move(newCrawlerLog, crawlerLog);
|
||||
|
||||
yield new End();
|
||||
|
@@ -134,6 +134,10 @@ public class QueryExpansion {
|
||||
if (scoreCombo > scoreA + scoreB || scoreCombo > 1000) {
|
||||
graph.addVariantForSpan(prev, qw, joinedWord);
|
||||
}
|
||||
else if (StringUtils.isAlpha(prev.word()) && StringUtils.isNumeric(qw.word())) { // join e.g. trs 80 to trs80 and trs-80
|
||||
graph.addVariantForSpan(prev, qw, prev.word() + qw.word());
|
||||
graph.addVariantForSpan(prev, qw, prev.word() + "-" + qw.word());
|
||||
}
|
||||
}
|
||||
|
||||
prev = qw;
|
||||
|
@@ -213,6 +213,18 @@ public class QueryFactoryTest {
|
||||
System.out.println(subquery);
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testContractionWordNum() {
|
||||
var subquery = parseAndGetSpecs("glove 80");
|
||||
|
||||
Assertions.assertTrue(subquery.query.compiledQuery.contains(" glove "));
|
||||
Assertions.assertTrue(subquery.query.compiledQuery.contains(" 80 "));
|
||||
Assertions.assertTrue(subquery.query.compiledQuery.contains(" glove-80 "));
|
||||
Assertions.assertTrue(subquery.query.compiledQuery.contains(" glove80 "));
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testCplusPlus() {
|
||||
var subquery = parseAndGetSpecs("std::vector::push_back vector");
|
||||
|
@@ -5,9 +5,7 @@ import nu.marginalia.actor.state.*;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.*;
|
||||
|
||||
public abstract class RecordActorPrototype implements ActorPrototype {
|
||||
|
||||
@@ -118,7 +116,7 @@ public abstract class RecordActorPrototype implements ActorPrototype {
|
||||
}
|
||||
|
||||
private String functionName(Class<? extends ActorStep> functionClass) {
|
||||
return functionClass.getSimpleName().toUpperCase();
|
||||
return ActorStep.functionName(functionClass);
|
||||
}
|
||||
|
||||
private ActorStep constructState(String message) throws ReflectiveOperationException {
|
||||
@@ -145,4 +143,43 @@ public abstract class RecordActorPrototype implements ActorPrototype {
|
||||
}
|
||||
}
|
||||
|
||||
/** Get a list of JSON prototypes for each actor step declared by this actor */
|
||||
@SuppressWarnings("unchecked")
|
||||
public Map<String, String> getMessagePrototypes() {
|
||||
Map<String, String> messagePrototypes = new HashMap<>();
|
||||
|
||||
for (var clazz : getClass().getDeclaredClasses()) {
|
||||
if (!clazz.isRecord() || !ActorStep.class.isAssignableFrom(clazz))
|
||||
continue;
|
||||
|
||||
StringJoiner sj = new StringJoiner(",\n\t", "{\n\t", "\n}");
|
||||
|
||||
renderToJsonPrototype(sj, (Class<? extends Record>) clazz);
|
||||
|
||||
messagePrototypes.put(ActorStep.functionName((Class<? extends ActorStep>) clazz), sj.toString());
|
||||
}
|
||||
|
||||
return messagePrototypes;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private void renderToJsonPrototype(StringJoiner sj, Class<? extends Record> recordType) {
|
||||
for (var field : recordType.getDeclaredFields()) {
|
||||
String typeName = field.getType().getSimpleName();
|
||||
|
||||
if ("List".equals(typeName)) {
|
||||
sj.add(String.format("\"%s\": [ ]", field.getName()));
|
||||
}
|
||||
else if (field.getType().isRecord()) {
|
||||
var innerSj = new StringJoiner(",", "{", "}");
|
||||
renderToJsonPrototype(innerSj, (Class<? extends Record>) field.getType());
|
||||
sj.add(String.format("\"%s\": %s", field.getName(), sj));
|
||||
}
|
||||
else {
|
||||
sj.add(String.format("\"%s\": \"%s\"", field.getName(), typeName));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -1,3 +1,7 @@
|
||||
package nu.marginalia.actor.state;
|
||||
|
||||
public interface ActorStep {}
|
||||
public interface ActorStep {
|
||||
static String functionName(Class<? extends ActorStep> type) {
|
||||
return type.getSimpleName().toUpperCase();
|
||||
}
|
||||
}
|
||||
|
@@ -35,6 +35,7 @@ import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
@@ -202,13 +203,19 @@ public class ConverterMain extends ProcessMainClass {
|
||||
heartbeat.setProgress(processedDomains.get() / (double) totalDomains);
|
||||
|
||||
logger.info("Processing small items");
|
||||
int numBigTasks = 0;
|
||||
|
||||
// We separate the large and small domains to reduce the number of critical sections,
|
||||
// as the large domains have a separate processing track that doesn't store everything
|
||||
// in memory
|
||||
|
||||
final List<Path> bigTasks = new ArrayList<>();
|
||||
|
||||
// First process the small items
|
||||
for (var dataPath : WorkLog.iterableMap(crawlDir.getLogFile(),
|
||||
new CrawlDataLocator(crawlDir.getDir(), batchingWorkLog)))
|
||||
{
|
||||
if (SerializableCrawlDataStream.getSizeHint(dataPath) >= SIDELOAD_THRESHOLD) {
|
||||
numBigTasks ++;
|
||||
bigTasks.add(dataPath);
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -239,15 +246,8 @@ public class ConverterMain extends ProcessMainClass {
|
||||
try (var hb = heartbeat.createAdHocTaskHeartbeat("Large Domains")) {
|
||||
int bigTaskIdx = 0;
|
||||
// Next the big items domain-by-domain
|
||||
for (var dataPath : WorkLog.iterableMap(crawlDir.getLogFile(),
|
||||
new CrawlDataLocator(crawlDir.getDir(), batchingWorkLog)))
|
||||
{
|
||||
int sizeHint = SerializableCrawlDataStream.getSizeHint(dataPath);
|
||||
if (sizeHint < SIDELOAD_THRESHOLD) {
|
||||
continue;
|
||||
}
|
||||
|
||||
hb.progress(dataPath.toFile().getName(), bigTaskIdx++, numBigTasks);
|
||||
for (var dataPath : bigTasks) {
|
||||
hb.progress(dataPath.toFile().getName(), bigTaskIdx++, bigTasks.size());
|
||||
|
||||
try {
|
||||
// SerializableCrawlDataStream is autocloseable, we can't try-with-resources because then it will be
|
||||
@@ -255,7 +255,7 @@ public class ConverterMain extends ProcessMainClass {
|
||||
// will close it after it's consumed.
|
||||
|
||||
var stream = SerializableCrawlDataStream.openDataStream(dataPath);
|
||||
ConverterBatchWritableIf writable = processor.simpleProcessing(stream, sizeHint);
|
||||
ConverterBatchWritableIf writable = processor.simpleProcessing(stream, SerializableCrawlDataStream.getSizeHint(dataPath));
|
||||
|
||||
converterWriter.accept(writable);
|
||||
}
|
||||
|
@@ -116,7 +116,7 @@ public class AdblockSimulator {
|
||||
|
||||
|
||||
// Refrain from cleaning up this code, it's very hot code and needs to be fast.
|
||||
// This version is about 100x faster than the a "clean" first stab implementation.
|
||||
// This version is about 100x faster than a "clean" first stab implementation.
|
||||
|
||||
class RuleVisitor implements NodeFilter {
|
||||
public boolean sawAds;
|
||||
|
@@ -23,7 +23,7 @@ public class DocumentGeneratorExtractor {
|
||||
|
||||
var tags = doc.select("meta[name=generator]");
|
||||
|
||||
if (tags.size() == 0) {
|
||||
if (tags.isEmpty()) {
|
||||
// Some sites have a comment in the head instead of a meta tag
|
||||
return fingerprintServerTech(doc, responseHeaders);
|
||||
}
|
||||
|
@@ -127,7 +127,7 @@ public class EncyclopediaMarginaliaNuSideloader implements SideloadSource, AutoC
|
||||
}
|
||||
fullHtml.append("</div></body></html>");
|
||||
|
||||
var doc = sideloaderProcessing
|
||||
return sideloaderProcessing
|
||||
.processDocument(fullUrl,
|
||||
fullHtml.toString(),
|
||||
List.of("encyclopedia", "wiki"),
|
||||
@@ -137,8 +137,6 @@ public class EncyclopediaMarginaliaNuSideloader implements SideloadSource, AutoC
|
||||
anchorTextKeywords.getAnchorTextKeywords(domainLinks, new EdgeUrl(fullUrl)),
|
||||
LocalDate.now().getYear(),
|
||||
10_000_000);
|
||||
|
||||
return doc;
|
||||
}
|
||||
|
||||
private String normalizeUtf8(String url) {
|
||||
|
@@ -381,8 +381,10 @@ public class CrawlerRetreiver implements AutoCloseable {
|
||||
if (docOpt.isPresent()) {
|
||||
var doc = docOpt.get();
|
||||
|
||||
crawlFrontier.enqueueLinksFromDocument(top, doc);
|
||||
crawlFrontier.addVisited(new EdgeUrl(ok.uri()));
|
||||
var responseUrl = new EdgeUrl(ok.uri());
|
||||
|
||||
crawlFrontier.enqueueLinksFromDocument(responseUrl, doc);
|
||||
crawlFrontier.addVisited(responseUrl);
|
||||
}
|
||||
}
|
||||
else if (fetchedDoc instanceof HttpFetchResult.Result304Raw && reference.doc() != null) {
|
||||
|
@@ -12,8 +12,7 @@ import java.io.InputStream;
|
||||
import java.net.InetAddress;
|
||||
import java.net.URI;
|
||||
import java.net.http.HttpHeaders;
|
||||
import java.util.Arrays;
|
||||
import java.util.Optional;
|
||||
import java.util.*;
|
||||
|
||||
/* FIXME: This interface has a very unfortunate name that is not very descriptive.
|
||||
*/
|
||||
@@ -65,7 +64,21 @@ public sealed interface HttpFetchResult {
|
||||
) implements HttpFetchResult {
|
||||
|
||||
public ResultOk(URI uri, int status, MessageHeaders headers, String ipAddress, byte[] bytes, int bytesStart, int length) {
|
||||
this(uri, status, HttpHeaders.of(headers.map(), (k,v) -> true), ipAddress, bytes, bytesStart, length);
|
||||
this(uri, status, convertHeaders(headers), ipAddress, bytes, bytesStart, length);
|
||||
}
|
||||
|
||||
private static HttpHeaders convertHeaders(MessageHeaders messageHeaders) {
|
||||
Map<String, List<String>> inputMap = messageHeaders.map();
|
||||
Map<String, List<String>> filteredMap = new HashMap<>(Math.max(4, inputMap.size()));
|
||||
|
||||
inputMap.forEach((k, v) -> {
|
||||
if (k.isBlank()) return;
|
||||
if (!Character.isAlphabetic(k.charAt(0))) return;
|
||||
|
||||
filteredMap.put(k, v);
|
||||
});
|
||||
|
||||
return HttpHeaders.of(filteredMap, (k,v) -> true);
|
||||
}
|
||||
|
||||
public boolean isOk() {
|
||||
|
@@ -9,7 +9,7 @@
|
||||
<span>
|
||||
Access logs containing IP-addresses are retained for up to 24 hours,
|
||||
anonymized logs with source addresses removed are sometimes kept longer
|
||||
to help diagnosing bugs.
|
||||
to help diagnose bugs.
|
||||
</span>
|
||||
</div>
|
||||
<div class="flex space-y-4 flex-col">
|
||||
|
@@ -16,8 +16,6 @@ platforms, but for lack of suitable hardware, this can not be guaranteed.
|
||||
The civilized way of installing this is to use [SDKMAN](https://sdkman.io/);
|
||||
graalce is a good distribution choice but it doesn't matter too much.
|
||||
|
||||
**Tailwindcss** - Install NPM and run `npm install tailwindcss @tailwindcss/cli`
|
||||
|
||||
## Quick Set up
|
||||
|
||||
[https://docs.marginalia.nu/](https://docs.marginalia.nu/) has a more comprehensive guide for the install
|
||||
|
@@ -74,3 +74,7 @@ download_model model/tfreq-new-algo3.bin https://huggingface.co/MarginaliaNu/Mar
|
||||
download_model model/lid.176.ftz https://huggingface.co/MarginaliaNu/MarginaliaModelData/resolve/c9339e4224f1dfad7f628809c32687e748198ae3/lid.176.ftz?download=true 340156704bb8c8e50c4abf35a7ec2569
|
||||
|
||||
popd
|
||||
|
||||
pushd $(dirname $0)/..
|
||||
npm install -D tailwindcss@3
|
||||
popd
|
||||
|
Reference in New Issue
Block a user