mirror of
https://github.com/ArchiveBox/ArchiveBox.git
synced 2025-05-14 15:14:31 -04:00
new vastly simplified plugin spec without pydantic
Some checks are pending
Build Debian package / build (push) Waiting to run
Build Docker image / buildx (push) Waiting to run
Build Homebrew package / build (push) Waiting to run
Run linters / lint (push) Waiting to run
Build Pip package / build (push) Waiting to run
Run tests / python_tests (ubuntu-22.04, 3.11) (push) Waiting to run
Run tests / docker_tests (push) Waiting to run
Some checks are pending
Build Debian package / build (push) Waiting to run
Build Docker image / buildx (push) Waiting to run
Build Homebrew package / build (push) Waiting to run
Run linters / lint (push) Waiting to run
Build Pip package / build (push) Waiting to run
Run tests / python_tests (ubuntu-22.04, 3.11) (push) Waiting to run
Run tests / docker_tests (push) Waiting to run
This commit is contained in:
parent
abf75f49f4
commit
01ba6d49d3
115 changed files with 2466 additions and 2301 deletions
|
@ -0,0 +1,48 @@
|
|||
__package__ = 'plugins_search.ripgrep'
|
||||
__label__ = 'ripgrep'
|
||||
__version__ = '2024.10.14'
|
||||
__author__ = 'Nick Sweeting'
|
||||
__homepage__ = 'https://github.com/BurntSushi/ripgrep'
|
||||
__dependencies__ = []
|
||||
|
||||
import abx
|
||||
|
||||
|
||||
@abx.hookimpl
|
||||
def get_PLUGIN():
|
||||
return {
|
||||
'ripgrep': {
|
||||
'PACKAGE': __package__,
|
||||
'LABEL': __label__,
|
||||
'VERSION': __version__,
|
||||
'AUTHOR': __author__,
|
||||
'HOMEPAGE': __homepage__,
|
||||
'DEPENDENCIES': __dependencies__,
|
||||
}
|
||||
}
|
||||
|
||||
@abx.hookimpl
|
||||
def get_CONFIG():
|
||||
from .config import RIPGREP_CONFIG
|
||||
|
||||
return {
|
||||
'ripgrep': RIPGREP_CONFIG
|
||||
}
|
||||
|
||||
|
||||
@abx.hookimpl
|
||||
def get_BINARIES():
|
||||
from .binaries import RIPGREP_BINARY
|
||||
|
||||
return {
|
||||
'ripgrep': RIPGREP_BINARY
|
||||
}
|
||||
|
||||
|
||||
@abx.hookimpl
|
||||
def get_SEARCHBACKENDS():
|
||||
from .searchbackend import RIPGREP_SEARCH_BACKEND
|
||||
|
||||
return {
|
||||
'ripgrep': RIPGREP_SEARCH_BACKEND,
|
||||
}
|
|
@ -1,114 +0,0 @@
|
|||
__package__ = 'archivebox.plugins_search.ripgrep'
|
||||
|
||||
import re
|
||||
from pathlib import Path
|
||||
from subprocess import run
|
||||
from typing import List, Iterable
|
||||
# from typing_extensions import Self
|
||||
|
||||
# Depends on other PyPI/vendor packages:
|
||||
from pydantic import InstanceOf, Field
|
||||
from pydantic_pkgr import BinProvider, BinaryOverrides, BinName
|
||||
|
||||
# Depends on other Django apps:
|
||||
from abx.archivebox.base_plugin import BasePlugin
|
||||
from abx.archivebox.base_configset import BaseConfigSet
|
||||
from abx.archivebox.base_binary import BaseBinary, env, apt, brew
|
||||
from abx.archivebox.base_hook import BaseHook
|
||||
from abx.archivebox.base_searchbackend import BaseSearchBackend
|
||||
|
||||
# Depends on Other Plugins:
|
||||
from archivebox.config import CONSTANTS
|
||||
from archivebox.config.common import SEARCH_BACKEND_CONFIG
|
||||
|
||||
###################### Config ##########################
|
||||
|
||||
class RipgrepConfig(BaseConfigSet):
|
||||
RIPGREP_BINARY: str = Field(default='rg')
|
||||
|
||||
RIPGREP_IGNORE_EXTENSIONS: str = Field(default='css,js,orig,svg')
|
||||
RIPGREP_ARGS_DEFAULT: List[str] = Field(default=lambda c: [
|
||||
# https://github.com/BurntSushi/ripgrep/blob/master/GUIDE.md
|
||||
f'--type-add=ignore:*.{{{c.RIPGREP_IGNORE_EXTENSIONS}}}',
|
||||
'--type-not=ignore',
|
||||
'--ignore-case',
|
||||
'--files-with-matches',
|
||||
'--regexp',
|
||||
])
|
||||
RIPGREP_SEARCH_DIR: Path = CONSTANTS.ARCHIVE_DIR
|
||||
|
||||
RIPGREP_CONFIG = RipgrepConfig()
|
||||
|
||||
|
||||
|
||||
class RipgrepBinary(BaseBinary):
|
||||
name: BinName = RIPGREP_CONFIG.RIPGREP_BINARY
|
||||
binproviders_supported: List[InstanceOf[BinProvider]] = [apt, brew, env]
|
||||
|
||||
overrides: BinaryOverrides = {
|
||||
apt.name: {'packages': ['ripgrep']},
|
||||
brew.name: {'packages': ['ripgrep']},
|
||||
}
|
||||
|
||||
RIPGREP_BINARY = RipgrepBinary()
|
||||
|
||||
# regex to match archive/<ts>/... snapshot dir names
|
||||
TIMESTAMP_REGEX = re.compile(r'\/([\d]+\.[\d]+)\/')
|
||||
|
||||
class RipgrepSearchBackend(BaseSearchBackend):
|
||||
name: str = 'ripgrep'
|
||||
docs_url: str = 'https://github.com/BurntSushi/ripgrep'
|
||||
|
||||
@staticmethod
|
||||
def index(snapshot_id: str, texts: List[str]):
|
||||
return
|
||||
|
||||
@staticmethod
|
||||
def flush(snapshot_ids: Iterable[str]):
|
||||
return
|
||||
|
||||
@staticmethod
|
||||
def search(text: str) -> List[str]:
|
||||
from core.models import Snapshot
|
||||
|
||||
ripgrep_binary = RIPGREP_BINARY.load()
|
||||
if not ripgrep_binary.version:
|
||||
raise Exception("ripgrep binary not found, install ripgrep to use this search backend")
|
||||
|
||||
cmd = [
|
||||
ripgrep_binary.abspath,
|
||||
*RIPGREP_CONFIG.RIPGREP_ARGS_DEFAULT,
|
||||
text,
|
||||
str(RIPGREP_CONFIG.RIPGREP_SEARCH_DIR),
|
||||
]
|
||||
proc = run(cmd, timeout=SEARCH_BACKEND_CONFIG.SEARCH_BACKEND_TIMEOUT, capture_output=True, text=True)
|
||||
timestamps = set()
|
||||
for path in proc.stdout.splitlines():
|
||||
ts = TIMESTAMP_REGEX.findall(path)
|
||||
if ts:
|
||||
timestamps.add(ts[0])
|
||||
|
||||
snap_ids = [str(id) for id in Snapshot.objects.filter(timestamp__in=timestamps).values_list('pk', flat=True)]
|
||||
|
||||
return snap_ids
|
||||
|
||||
RIPGREP_SEARCH_BACKEND = RipgrepSearchBackend()
|
||||
|
||||
|
||||
|
||||
|
||||
class RipgrepSearchPlugin(BasePlugin):
|
||||
app_label: str ='ripgrep'
|
||||
verbose_name: str = 'Ripgrep'
|
||||
|
||||
hooks: List[InstanceOf[BaseHook]] = [
|
||||
RIPGREP_CONFIG,
|
||||
RIPGREP_BINARY,
|
||||
RIPGREP_SEARCH_BACKEND,
|
||||
]
|
||||
|
||||
|
||||
|
||||
PLUGIN = RipgrepSearchPlugin()
|
||||
# PLUGIN.register(settings)
|
||||
DJANGO_APP = PLUGIN.AppConfig
|
23
archivebox/plugins_search/ripgrep/binaries.py
Normal file
23
archivebox/plugins_search/ripgrep/binaries.py
Normal file
|
@ -0,0 +1,23 @@
|
|||
__package__ = 'plugins_search.ripgrep'
|
||||
|
||||
from typing import List
|
||||
|
||||
from pydantic import InstanceOf
|
||||
from pydantic_pkgr import BinProvider, BinaryOverrides, BinName
|
||||
|
||||
from abx.archivebox.base_binary import BaseBinary, env, apt, brew
|
||||
|
||||
|
||||
from .config import RIPGREP_CONFIG
|
||||
|
||||
|
||||
class RipgrepBinary(BaseBinary):
|
||||
name: BinName = RIPGREP_CONFIG.RIPGREP_BINARY
|
||||
binproviders_supported: List[InstanceOf[BinProvider]] = [apt, brew, env]
|
||||
|
||||
overrides: BinaryOverrides = {
|
||||
apt.name: {'packages': ['ripgrep']},
|
||||
brew.name: {'packages': ['ripgrep']},
|
||||
}
|
||||
|
||||
RIPGREP_BINARY = RipgrepBinary()
|
29
archivebox/plugins_search/ripgrep/config.py
Normal file
29
archivebox/plugins_search/ripgrep/config.py
Normal file
|
@ -0,0 +1,29 @@
|
|||
__package__ = 'plugins_search.ripgrep'
|
||||
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
|
||||
from pydantic import Field
|
||||
|
||||
from abx.archivebox.base_configset import BaseConfigSet
|
||||
|
||||
from archivebox.config import CONSTANTS
|
||||
from archivebox.config.common import SEARCH_BACKEND_CONFIG
|
||||
|
||||
|
||||
class RipgrepConfig(BaseConfigSet):
|
||||
RIPGREP_BINARY: str = Field(default='rg')
|
||||
|
||||
RIPGREP_IGNORE_EXTENSIONS: str = Field(default='css,js,orig,svg')
|
||||
RIPGREP_ARGS_DEFAULT: List[str] = Field(default=lambda c: [
|
||||
# https://github.com/BurntSushi/ripgrep/blob/master/GUIDE.md
|
||||
f'--type-add=ignore:*.{{{c.RIPGREP_IGNORE_EXTENSIONS}}}',
|
||||
'--type-not=ignore',
|
||||
'--ignore-case',
|
||||
'--files-with-matches',
|
||||
'--regexp',
|
||||
])
|
||||
RIPGREP_SEARCH_DIR: Path = CONSTANTS.ARCHIVE_DIR
|
||||
RIPGREP_TIMEOUT: int = Field(default=lambda: SEARCH_BACKEND_CONFIG.SEARCH_BACKEND_TIMEOUT)
|
||||
|
||||
RIPGREP_CONFIG = RipgrepConfig()
|
55
archivebox/plugins_search/ripgrep/searchbackend.py
Normal file
55
archivebox/plugins_search/ripgrep/searchbackend.py
Normal file
|
@ -0,0 +1,55 @@
|
|||
__package__ = 'plugins_search.ripgrep'
|
||||
|
||||
import re
|
||||
import subprocess
|
||||
|
||||
from typing import List, Iterable
|
||||
|
||||
from abx.archivebox.base_searchbackend import BaseSearchBackend
|
||||
|
||||
from .binaries import RIPGREP_BINARY
|
||||
from .config import RIPGREP_CONFIG
|
||||
|
||||
|
||||
|
||||
# regex to match archive/<ts>/... snapshot dir names
|
||||
TIMESTAMP_REGEX = re.compile(r'\/([\d]+\.[\d]+)\/')
|
||||
|
||||
class RipgrepSearchBackend(BaseSearchBackend):
|
||||
name: str = 'ripgrep'
|
||||
docs_url: str = 'https://github.com/BurntSushi/ripgrep'
|
||||
|
||||
@staticmethod
|
||||
def index(snapshot_id: str, texts: List[str]):
|
||||
return
|
||||
|
||||
@staticmethod
|
||||
def flush(snapshot_ids: Iterable[str]):
|
||||
return
|
||||
|
||||
@staticmethod
|
||||
def search(text: str) -> List[str]:
|
||||
from core.models import Snapshot
|
||||
|
||||
ripgrep_binary = RIPGREP_BINARY.load()
|
||||
if not ripgrep_binary.version:
|
||||
raise Exception("ripgrep binary not found, install ripgrep to use this search backend")
|
||||
|
||||
cmd = [
|
||||
ripgrep_binary.abspath,
|
||||
*RIPGREP_CONFIG.RIPGREP_ARGS_DEFAULT,
|
||||
text,
|
||||
str(RIPGREP_CONFIG.RIPGREP_SEARCH_DIR),
|
||||
]
|
||||
proc = subprocess.run(cmd, timeout=RIPGREP_CONFIG.RIPGREP_TIMEOUT, capture_output=True, text=True)
|
||||
timestamps = set()
|
||||
for path in proc.stdout.splitlines():
|
||||
ts = TIMESTAMP_REGEX.findall(path)
|
||||
if ts:
|
||||
timestamps.add(ts[0])
|
||||
|
||||
snap_ids = [str(id) for id in Snapshot.objects.filter(timestamp__in=timestamps).values_list('pk', flat=True)]
|
||||
|
||||
return snap_ids
|
||||
|
||||
RIPGREP_SEARCH_BACKEND = RipgrepSearchBackend()
|
|
@ -0,0 +1,48 @@
|
|||
__package__ = 'plugins_search.sonic'
|
||||
__label__ = 'sonic'
|
||||
__version__ = '2024.10.14'
|
||||
__author__ = 'Nick Sweeting'
|
||||
__homepage__ = 'https://github.com/valeriansaliou/sonic'
|
||||
__dependencies__ = []
|
||||
|
||||
import abx
|
||||
|
||||
|
||||
@abx.hookimpl
|
||||
def get_PLUGIN():
|
||||
return {
|
||||
'sonic': {
|
||||
'PACKAGE': __package__,
|
||||
'LABEL': __label__,
|
||||
'VERSION': __version__,
|
||||
'AUTHOR': __author__,
|
||||
'HOMEPAGE': __homepage__,
|
||||
'DEPENDENCIES': __dependencies__,
|
||||
}
|
||||
}
|
||||
|
||||
@abx.hookimpl
|
||||
def get_CONFIG():
|
||||
from .config import SONIC_CONFIG
|
||||
|
||||
return {
|
||||
'sonic': SONIC_CONFIG
|
||||
}
|
||||
|
||||
|
||||
@abx.hookimpl
|
||||
def get_BINARIES():
|
||||
from .binaries import SONIC_BINARY
|
||||
|
||||
return {
|
||||
'sonic': SONIC_BINARY
|
||||
}
|
||||
|
||||
|
||||
@abx.hookimpl
|
||||
def get_SEARCHBACKENDS():
|
||||
from .searchbackend import SONIC_SEARCH_BACKEND
|
||||
|
||||
return {
|
||||
'sonic': SONIC_SEARCH_BACKEND,
|
||||
}
|
|
@ -1,131 +0,0 @@
|
|||
__package__ = 'archivebox.plugins_search.sonic'
|
||||
|
||||
import sys
|
||||
from typing import List, Generator, cast
|
||||
|
||||
# Depends on other PyPI/vendor packages:
|
||||
from pydantic import InstanceOf, Field, model_validator
|
||||
from pydantic_pkgr import BinProvider, BinaryOverrides, BinName
|
||||
|
||||
# Depends on other Django apps:
|
||||
from abx.archivebox.base_plugin import BasePlugin
|
||||
from abx.archivebox.base_configset import BaseConfigSet
|
||||
from abx.archivebox.base_binary import BaseBinary, env, brew
|
||||
from abx.archivebox.base_hook import BaseHook
|
||||
from abx.archivebox.base_searchbackend import BaseSearchBackend
|
||||
|
||||
# Depends on Other Plugins:
|
||||
from archivebox.config.common import SEARCH_BACKEND_CONFIG
|
||||
|
||||
SONIC_LIB = None
|
||||
try:
|
||||
import sonic
|
||||
SONIC_LIB = sonic
|
||||
except ImportError:
|
||||
SONIC_LIB = None
|
||||
|
||||
###################### Config ##########################
|
||||
|
||||
class SonicConfig(BaseConfigSet):
|
||||
SONIC_BINARY: str = Field(default='sonic')
|
||||
|
||||
SONIC_HOST: str = Field(default='localhost', alias='SEARCH_BACKEND_HOST_NAME')
|
||||
SONIC_PORT: int = Field(default=1491, alias='SEARCH_BACKEND_PORT')
|
||||
SONIC_PASSWORD: str = Field(default='SecretPassword', alias='SEARCH_BACKEND_PASSWORD')
|
||||
SONIC_COLLECTION: str = Field(default='archivebox')
|
||||
SONIC_BUCKET: str = Field(default='archivebox')
|
||||
|
||||
SONIC_MAX_CHUNK_LENGTH: int = Field(default=2000)
|
||||
SONIC_MAX_TEXT_LENGTH: int = Field(default=100000000)
|
||||
SONIC_MAX_RETRIES: int = Field(default=5)
|
||||
|
||||
@model_validator(mode='after')
|
||||
def validate_sonic_port(self):
|
||||
if SEARCH_BACKEND_CONFIG.SEARCH_BACKEND_ENGINE == 'sonic' and SONIC_LIB is None:
|
||||
sys.stderr.write('[X] Error: Sonic search backend is enabled but sonic-client lib is not installed. You may need to run: pip install archivebox[sonic]\n')
|
||||
# dont hard exit here. in case the user is just running "archivebox version" or "archivebox help", we still want those to work despite broken ldap
|
||||
# sys.exit(1)
|
||||
SEARCH_BACKEND_CONFIG.update_in_place(SEARCH_BACKEND_ENGINE='ripgrep')
|
||||
return self
|
||||
|
||||
SONIC_CONFIG = SonicConfig()
|
||||
|
||||
|
||||
class SonicBinary(BaseBinary):
|
||||
name: BinName = SONIC_CONFIG.SONIC_BINARY
|
||||
binproviders_supported: List[InstanceOf[BinProvider]] = [brew, env] # TODO: add cargo
|
||||
|
||||
overrides: BinaryOverrides = {
|
||||
brew.name: {'packages': ['sonic']},
|
||||
# cargo.name: {'packages': ['sonic-server']}, # TODO: add cargo
|
||||
}
|
||||
|
||||
# TODO: add version checking over protocol? for when sonic backend is on remote server and binary is not installed locally
|
||||
# def on_get_version(self):
|
||||
# with sonic.IngestClient(SONIC_CONFIG.SONIC_HOST, str(SONIC_CONFIG.SONIC_PORT), SONIC_CONFIG.SONIC_PASSWORD) as ingestcl:
|
||||
# return SemVer.parse(str(ingestcl.protocol))
|
||||
|
||||
SONIC_BINARY = SonicBinary()
|
||||
|
||||
|
||||
|
||||
class SonicSearchBackend(BaseSearchBackend):
|
||||
name: str = 'sonic'
|
||||
docs_url: str = 'https://github.com/valeriansaliou/sonic'
|
||||
|
||||
@staticmethod
|
||||
def index(snapshot_id: str, texts: List[str]):
|
||||
error_count = 0
|
||||
with sonic.IngestClient(SONIC_CONFIG.SONIC_HOST, str(SONIC_CONFIG.SONIC_PORT), SONIC_CONFIG.SONIC_PASSWORD) as ingestcl:
|
||||
for text in texts:
|
||||
chunks = (
|
||||
text[i:i+SONIC_CONFIG.SONIC_MAX_CHUNK_LENGTH]
|
||||
for i in range(
|
||||
0,
|
||||
min(len(text), SONIC_CONFIG.SONIC_MAX_TEXT_LENGTH),
|
||||
SONIC_CONFIG.SONIC_MAX_CHUNK_LENGTH,
|
||||
)
|
||||
)
|
||||
try:
|
||||
for chunk in chunks:
|
||||
ingestcl.push(SONIC_CONFIG.SONIC_COLLECTION, SONIC_CONFIG.SONIC_BUCKET, snapshot_id, str(chunk))
|
||||
except Exception as err:
|
||||
print(f'[!] Sonic search backend threw an error while indexing: {err.__class__.__name__} {err}')
|
||||
error_count += 1
|
||||
if error_count > SONIC_CONFIG.SONIC_MAX_RETRIES:
|
||||
raise
|
||||
|
||||
@staticmethod
|
||||
def flush(snapshot_ids: Generator[str, None, None]):
|
||||
with sonic.IngestClient(SONIC_CONFIG.SONIC_HOST, str(SONIC_CONFIG.SONIC_PORT), SONIC_CONFIG.SONIC_PASSWORD) as ingestcl:
|
||||
for id in snapshot_ids:
|
||||
ingestcl.flush_object(SONIC_CONFIG.SONIC_COLLECTION, SONIC_CONFIG.SONIC_BUCKET, str(id))
|
||||
|
||||
|
||||
@staticmethod
|
||||
def search(text: str) -> List[str]:
|
||||
with sonic.SearchClient(SONIC_CONFIG.SONIC_HOST, SONIC_CONFIG.SONIC_PORT, SONIC_CONFIG.SONIC_PASSWORD) as querycl:
|
||||
snap_ids = cast(List[str], querycl.query(SONIC_CONFIG.SONIC_COLLECTION, SONIC_CONFIG.SONIC_BUCKET, text))
|
||||
return [str(id) for id in snap_ids]
|
||||
|
||||
|
||||
SONIC_SEARCH_BACKEND = SonicSearchBackend()
|
||||
|
||||
|
||||
|
||||
|
||||
class SonicSearchPlugin(BasePlugin):
|
||||
app_label: str ='sonic'
|
||||
verbose_name: str = 'Sonic'
|
||||
|
||||
hooks: List[InstanceOf[BaseHook]] = [
|
||||
SONIC_CONFIG,
|
||||
*([SONIC_BINARY] if (SEARCH_BACKEND_CONFIG.SEARCH_BACKEND_ENGINE == 'sonic') else []),
|
||||
SONIC_SEARCH_BACKEND,
|
||||
]
|
||||
|
||||
|
||||
|
||||
PLUGIN = SonicSearchPlugin()
|
||||
# PLUGIN.register(settings)
|
||||
DJANGO_APP = PLUGIN.AppConfig
|
27
archivebox/plugins_search/sonic/binaries.py
Normal file
27
archivebox/plugins_search/sonic/binaries.py
Normal file
|
@ -0,0 +1,27 @@
|
|||
__package__ = 'plugins_search.sonic'
|
||||
|
||||
from typing import List
|
||||
|
||||
from pydantic import InstanceOf
|
||||
from pydantic_pkgr import BinProvider, BinaryOverrides, BinName
|
||||
|
||||
from abx.archivebox.base_binary import BaseBinary, env, brew
|
||||
|
||||
from .config import SONIC_CONFIG
|
||||
|
||||
|
||||
class SonicBinary(BaseBinary):
|
||||
name: BinName = SONIC_CONFIG.SONIC_BINARY
|
||||
binproviders_supported: List[InstanceOf[BinProvider]] = [brew, env] # TODO: add cargo
|
||||
|
||||
overrides: BinaryOverrides = {
|
||||
brew.name: {'packages': ['sonic']},
|
||||
# cargo.name: {'packages': ['sonic-server']}, # TODO: add cargo
|
||||
}
|
||||
|
||||
# TODO: add version checking over protocol? for when sonic backend is on remote server and binary is not installed locally
|
||||
# def on_get_version(self):
|
||||
# with sonic.IngestClient(SONIC_CONFIG.SONIC_HOST, str(SONIC_CONFIG.SONIC_PORT), SONIC_CONFIG.SONIC_PASSWORD) as ingestcl:
|
||||
# return SemVer.parse(str(ingestcl.protocol))
|
||||
|
||||
SONIC_BINARY = SonicBinary()
|
44
archivebox/plugins_search/sonic/config.py
Normal file
44
archivebox/plugins_search/sonic/config.py
Normal file
|
@ -0,0 +1,44 @@
|
|||
__package__ = 'plugins_search.sonic'
|
||||
|
||||
import sys
|
||||
|
||||
from pydantic import Field, model_validator
|
||||
|
||||
from abx.archivebox.base_configset import BaseConfigSet
|
||||
|
||||
from archivebox.config.common import SEARCH_BACKEND_CONFIG
|
||||
|
||||
|
||||
SONIC_LIB = None
|
||||
try:
|
||||
import sonic
|
||||
SONIC_LIB = sonic
|
||||
except ImportError:
|
||||
SONIC_LIB = None
|
||||
|
||||
###################### Config ##########################
|
||||
|
||||
|
||||
class SonicConfig(BaseConfigSet):
|
||||
SONIC_BINARY: str = Field(default='sonic')
|
||||
|
||||
SONIC_HOST: str = Field(default='localhost', alias='SEARCH_BACKEND_HOST_NAME')
|
||||
SONIC_PORT: int = Field(default=1491, alias='SEARCH_BACKEND_PORT')
|
||||
SONIC_PASSWORD: str = Field(default='SecretPassword', alias='SEARCH_BACKEND_PASSWORD')
|
||||
SONIC_COLLECTION: str = Field(default='archivebox')
|
||||
SONIC_BUCKET: str = Field(default='archivebox')
|
||||
|
||||
SONIC_MAX_CHUNK_LENGTH: int = Field(default=2000)
|
||||
SONIC_MAX_TEXT_LENGTH: int = Field(default=100000000)
|
||||
SONIC_MAX_RETRIES: int = Field(default=5)
|
||||
|
||||
@model_validator(mode='after')
|
||||
def validate_sonic_port(self):
|
||||
if SEARCH_BACKEND_CONFIG.SEARCH_BACKEND_ENGINE == 'sonic' and SONIC_LIB is None:
|
||||
sys.stderr.write('[X] Error: Sonic search backend is enabled but sonic-client lib is not installed. You may need to run: pip install archivebox[sonic]\n')
|
||||
# dont hard exit here. in case the user is just running "archivebox version" or "archivebox help", we still want those to work despite broken ldap
|
||||
# sys.exit(1)
|
||||
SEARCH_BACKEND_CONFIG.update_in_place(SEARCH_BACKEND_ENGINE='ripgrep')
|
||||
return self
|
||||
|
||||
SONIC_CONFIG = SonicConfig()
|
51
archivebox/plugins_search/sonic/searchbackend.py
Normal file
51
archivebox/plugins_search/sonic/searchbackend.py
Normal file
|
@ -0,0 +1,51 @@
|
|||
__package__ = 'plugins_search.sonic'
|
||||
|
||||
from typing import List, Generator, cast
|
||||
|
||||
from abx.archivebox.base_searchbackend import BaseSearchBackend
|
||||
|
||||
|
||||
from .config import SONIC_CONFIG, SONIC_LIB
|
||||
|
||||
|
||||
class SonicSearchBackend(BaseSearchBackend):
|
||||
name: str = 'sonic'
|
||||
docs_url: str = 'https://github.com/valeriansaliou/sonic'
|
||||
|
||||
@staticmethod
|
||||
def index(snapshot_id: str, texts: List[str]):
|
||||
error_count = 0
|
||||
with SONIC_LIB.IngestClient(SONIC_CONFIG.SONIC_HOST, str(SONIC_CONFIG.SONIC_PORT), SONIC_CONFIG.SONIC_PASSWORD) as ingestcl:
|
||||
for text in texts:
|
||||
chunks = (
|
||||
text[i:i+SONIC_CONFIG.SONIC_MAX_CHUNK_LENGTH]
|
||||
for i in range(
|
||||
0,
|
||||
min(len(text), SONIC_CONFIG.SONIC_MAX_TEXT_LENGTH),
|
||||
SONIC_CONFIG.SONIC_MAX_CHUNK_LENGTH,
|
||||
)
|
||||
)
|
||||
try:
|
||||
for chunk in chunks:
|
||||
ingestcl.push(SONIC_CONFIG.SONIC_COLLECTION, SONIC_CONFIG.SONIC_BUCKET, snapshot_id, str(chunk))
|
||||
except Exception as err:
|
||||
print(f'[!] Sonic search backend threw an error while indexing: {err.__class__.__name__} {err}')
|
||||
error_count += 1
|
||||
if error_count > SONIC_CONFIG.SONIC_MAX_RETRIES:
|
||||
raise
|
||||
|
||||
@staticmethod
|
||||
def flush(snapshot_ids: Generator[str, None, None]):
|
||||
with SONIC_LIB.IngestClient(SONIC_CONFIG.SONIC_HOST, str(SONIC_CONFIG.SONIC_PORT), SONIC_CONFIG.SONIC_PASSWORD) as ingestcl:
|
||||
for id in snapshot_ids:
|
||||
ingestcl.flush_object(SONIC_CONFIG.SONIC_COLLECTION, SONIC_CONFIG.SONIC_BUCKET, str(id))
|
||||
|
||||
|
||||
@staticmethod
|
||||
def search(text: str) -> List[str]:
|
||||
with SONIC_LIB.SearchClient(SONIC_CONFIG.SONIC_HOST, SONIC_CONFIG.SONIC_PORT, SONIC_CONFIG.SONIC_PASSWORD) as querycl:
|
||||
snap_ids = cast(List[str], querycl.query(SONIC_CONFIG.SONIC_COLLECTION, SONIC_CONFIG.SONIC_BUCKET, text))
|
||||
return [str(id) for id in snap_ids]
|
||||
|
||||
|
||||
SONIC_SEARCH_BACKEND = SonicSearchBackend()
|
39
archivebox/plugins_search/sqlitefts/__init__.py
Normal file
39
archivebox/plugins_search/sqlitefts/__init__.py
Normal file
|
@ -0,0 +1,39 @@
|
|||
__package__ = 'plugins_search.sqlitefts'
|
||||
__label__ = 'sqlitefts'
|
||||
__version__ = '2024.10.14'
|
||||
__author__ = 'Nick Sweeting'
|
||||
__homepage__ = 'https://github.com/ArchiveBox/archivebox'
|
||||
__dependencies__ = []
|
||||
|
||||
import abx
|
||||
|
||||
|
||||
@abx.hookimpl
|
||||
def get_PLUGIN():
|
||||
return {
|
||||
'sqlitefts': {
|
||||
'PACKAGE': __package__,
|
||||
'LABEL': __label__,
|
||||
'VERSION': __version__,
|
||||
'AUTHOR': __author__,
|
||||
'HOMEPAGE': __homepage__,
|
||||
'DEPENDENCIES': __dependencies__,
|
||||
}
|
||||
}
|
||||
|
||||
@abx.hookimpl
|
||||
def get_CONFIG():
|
||||
from .config import SQLITEFTS_CONFIG
|
||||
|
||||
return {
|
||||
'sqlitefts': SQLITEFTS_CONFIG
|
||||
}
|
||||
|
||||
|
||||
@abx.hookimpl
|
||||
def get_SEARCHBACKENDS():
|
||||
from .searchbackend import SQLITEFTS_SEARCH_BACKEND
|
||||
|
||||
return {
|
||||
'sqlitefts': SQLITEFTS_SEARCH_BACKEND,
|
||||
}
|
73
archivebox/plugins_search/sqlitefts/config.py
Normal file
73
archivebox/plugins_search/sqlitefts/config.py
Normal file
|
@ -0,0 +1,73 @@
|
|||
__package__ = 'plugins_search.sqlitefts'
|
||||
|
||||
import sys
|
||||
import sqlite3
|
||||
from typing import Callable
|
||||
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
|
||||
from pydantic import Field, model_validator
|
||||
|
||||
from abx.archivebox.base_configset import BaseConfigSet
|
||||
|
||||
from archivebox.config.common import SEARCH_BACKEND_CONFIG
|
||||
|
||||
|
||||
|
||||
###################### Config ##########################
|
||||
|
||||
class SqliteftsConfig(BaseConfigSet):
|
||||
SQLITEFTS_SEPARATE_DATABASE: bool = Field(default=True, alias='FTS_SEPARATE_DATABASE')
|
||||
SQLITEFTS_TOKENIZERS: str = Field(default='porter unicode61 remove_diacritics 2', alias='FTS_TOKENIZERS')
|
||||
SQLITEFTS_MAX_LENGTH: int = Field(default=int(1e9), alias='FTS_SQLITE_MAX_LENGTH')
|
||||
|
||||
# Not really meant to be user-modified, just here as constants
|
||||
SQLITEFTS_DB: str = Field(default='search.sqlite3')
|
||||
SQLITEFTS_TABLE: str = Field(default='snapshot_fts')
|
||||
SQLITEFTS_ID_TABLE: str = Field(default='snapshot_id_fts')
|
||||
SQLITEFTS_COLUMN: str = Field(default='texts')
|
||||
|
||||
@model_validator(mode='after')
|
||||
def validate_fts_separate_database(self):
|
||||
if SEARCH_BACKEND_CONFIG.SEARCH_BACKEND_ENGINE == 'sqlite' and self.SQLITEFTS_SEPARATE_DATABASE and not self.SQLITEFTS_DB:
|
||||
sys.stderr.write('[X] Error: SQLITEFTS_DB must be set if SQLITEFTS_SEPARATE_DATABASE is True\n')
|
||||
SEARCH_BACKEND_CONFIG.update_in_place(SEARCH_BACKEND_ENGINE='ripgrep')
|
||||
return self
|
||||
|
||||
@property
|
||||
def get_connection(self) -> Callable[[], sqlite3.Connection]:
|
||||
# Make get_connection callable, because `django.db.connection.cursor()`
|
||||
# has to be called to get a context manager, but sqlite3.Connection
|
||||
# is a context manager without being called.
|
||||
if self.SQLITEFTS_SEPARATE_DATABASE:
|
||||
return lambda: sqlite3.connect(self.SQLITEFTS_DB)
|
||||
else:
|
||||
from django.db import connection as database
|
||||
return database.cursor
|
||||
|
||||
@property
|
||||
def SQLITE_BIND(self) -> str:
|
||||
if self.SQLITEFTS_SEPARATE_DATABASE:
|
||||
return "?"
|
||||
else:
|
||||
return "%s"
|
||||
|
||||
@property
|
||||
def SQLITE_LIMIT_LENGTH(self) -> int:
|
||||
from django.db import connection as database
|
||||
|
||||
# Only Python >= 3.11 supports sqlite3.Connection.getlimit(),
|
||||
# so fall back to the default if the API to get the real value isn't present
|
||||
try:
|
||||
limit_id = sqlite3.SQLITE_LIMIT_LENGTH # type: ignore[attr-defined]
|
||||
|
||||
if self.SQLITEFTS_SEPARATE_DATABASE:
|
||||
cursor = self.get_connection()
|
||||
return cursor.connection.getlimit(limit_id) # type: ignore[attr-defined]
|
||||
else:
|
||||
with database.temporary_connection() as cursor: # type: ignore[attr-defined]
|
||||
return cursor.connection.getlimit(limit_id)
|
||||
except (AttributeError, ImproperlyConfigured):
|
||||
return self.SQLITEFTS_MAX_LENGTH
|
||||
|
||||
SQLITEFTS_CONFIG = SqliteftsConfig()
|
|
@ -1,83 +1,12 @@
|
|||
__package__ = 'archivebox.plugins_search.sqlite'
|
||||
__package__ = 'plugins_search.sqlitefts'
|
||||
|
||||
import sys
|
||||
import codecs
|
||||
import sqlite3
|
||||
from typing import List, Iterable, Callable
|
||||
from typing import List, Iterable
|
||||
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
|
||||
# Depends on other PyPI/vendor packages:
|
||||
from pydantic import InstanceOf, Field, model_validator
|
||||
|
||||
# Depends on other Django apps:
|
||||
from abx.archivebox.base_plugin import BasePlugin
|
||||
from abx.archivebox.base_configset import BaseConfigSet
|
||||
from abx.archivebox.base_hook import BaseHook
|
||||
from abx.archivebox.base_searchbackend import BaseSearchBackend
|
||||
|
||||
# Depends on Other Plugins:
|
||||
from archivebox.config.common import SEARCH_BACKEND_CONFIG
|
||||
|
||||
|
||||
|
||||
###################### Config ##########################
|
||||
|
||||
class SqliteftsConfig(BaseConfigSet):
|
||||
SQLITEFTS_SEPARATE_DATABASE: bool = Field(default=True, alias='FTS_SEPARATE_DATABASE')
|
||||
SQLITEFTS_TOKENIZERS: str = Field(default='porter unicode61 remove_diacritics 2', alias='FTS_TOKENIZERS')
|
||||
SQLITEFTS_MAX_LENGTH: int = Field(default=int(1e9), alias='FTS_SQLITE_MAX_LENGTH')
|
||||
|
||||
# Not really meant to be user-modified, just here as constants
|
||||
SQLITEFTS_DB: str = Field(default='search.sqlite3')
|
||||
SQLITEFTS_TABLE: str = Field(default='snapshot_fts')
|
||||
SQLITEFTS_ID_TABLE: str = Field(default='snapshot_id_fts')
|
||||
SQLITEFTS_COLUMN: str = Field(default='texts')
|
||||
|
||||
@model_validator(mode='after')
|
||||
def validate_fts_separate_database(self):
|
||||
if SEARCH_BACKEND_CONFIG.SEARCH_BACKEND_ENGINE == 'sqlite' and self.SQLITEFTS_SEPARATE_DATABASE and not self.SQLITEFTS_DB:
|
||||
sys.stderr.write('[X] Error: SQLITEFTS_DB must be set if SQLITEFTS_SEPARATE_DATABASE is True\n')
|
||||
SEARCH_BACKEND_CONFIG.update_in_place(SEARCH_BACKEND_ENGINE='ripgrep')
|
||||
return self
|
||||
|
||||
@property
|
||||
def get_connection(self) -> Callable[[], sqlite3.Connection]:
|
||||
# Make get_connection callable, because `django.db.connection.cursor()`
|
||||
# has to be called to get a context manager, but sqlite3.Connection
|
||||
# is a context manager without being called.
|
||||
if self.SQLITEFTS_SEPARATE_DATABASE:
|
||||
return lambda: sqlite3.connect(self.SQLITEFTS_DB)
|
||||
else:
|
||||
from django.db import connection as database
|
||||
return database.cursor
|
||||
|
||||
@property
|
||||
def SQLITE_BIND(self) -> str:
|
||||
if self.SQLITEFTS_SEPARATE_DATABASE:
|
||||
return "?"
|
||||
else:
|
||||
return "%s"
|
||||
|
||||
@property
|
||||
def SQLITE_LIMIT_LENGTH(self) -> int:
|
||||
from django.db import connection as database
|
||||
|
||||
# Only Python >= 3.11 supports sqlite3.Connection.getlimit(),
|
||||
# so fall back to the default if the API to get the real value isn't present
|
||||
try:
|
||||
limit_id = sqlite3.SQLITE_LIMIT_LENGTH # type: ignore[attr-defined]
|
||||
|
||||
if self.SQLITEFTS_SEPARATE_DATABASE:
|
||||
cursor = self.get_connection()
|
||||
return cursor.connection.getlimit(limit_id) # type: ignore[attr-defined]
|
||||
else:
|
||||
with database.temporary_connection() as cursor: # type: ignore[attr-defined]
|
||||
return cursor.connection.getlimit(limit_id)
|
||||
except (AttributeError, ImproperlyConfigured):
|
||||
return self.SQLITEFTS_MAX_LENGTH
|
||||
|
||||
SQLITEFTS_CONFIG = SqliteftsConfig()
|
||||
from .config import SQLITEFTS_CONFIG
|
||||
|
||||
|
||||
|
||||
|
@ -242,20 +171,3 @@ class SqliteftsSearchBackend(BaseSearchBackend):
|
|||
_handle_query_exception(e)
|
||||
|
||||
SQLITEFTS_SEARCH_BACKEND = SqliteftsSearchBackend()
|
||||
|
||||
|
||||
|
||||
class SqliteftsSearchPlugin(BasePlugin):
|
||||
app_label: str ='sqlitefts'
|
||||
verbose_name: str = 'SQLite FTS5 Search'
|
||||
|
||||
hooks: List[InstanceOf[BaseHook]] = [
|
||||
SQLITEFTS_CONFIG,
|
||||
SQLITEFTS_SEARCH_BACKEND,
|
||||
]
|
||||
|
||||
|
||||
|
||||
PLUGIN = SqliteftsSearchPlugin()
|
||||
# PLUGIN.register(settings)
|
||||
DJANGO_APP = PLUGIN.AppConfig
|
Loading…
Add table
Add a link
Reference in a new issue