mirror of
https://github.com/ArchiveBox/ArchiveBox.git
synced 2025-05-24 11:47:04 -04:00
split plugin dirs, created new cleaner import path for plugin config in settings.py
This commit is contained in:
parent
1a58967e8c
commit
a9a97c013d
39 changed files with 469 additions and 199 deletions
|
@ -1,9 +1 @@
|
|||
__package__ = 'archivebox.plugantic'
|
||||
|
||||
from .base_plugin import BasePlugin
|
||||
from .base_configset import BaseConfigSet
|
||||
from .base_binary import BaseBinary
|
||||
from .base_extractor import BaseExtractor
|
||||
from .base_replayer import BaseReplayer
|
||||
from .base_check import BaseCheck
|
||||
|
||||
|
|
|
@ -1,59 +1,59 @@
|
|||
import os
|
||||
# import os
|
||||
|
||||
from pathlib import Path
|
||||
# from pathlib import Path
|
||||
|
||||
from benedict import benedict
|
||||
from rich.pretty import pprint
|
||||
# from benedict import benedict
|
||||
# from rich.pretty import pprint
|
||||
|
||||
from ansible_runner import Runner, RunnerConfig
|
||||
# from ansible_runner import Runner, RunnerConfig
|
||||
|
||||
GLOBAL_CACHE = {}
|
||||
# GLOBAL_CACHE = {}
|
||||
|
||||
|
||||
def run_playbook(playbook_path, data_dir, quiet=False, **kwargs):
|
||||
ANSIBLE_TMP_DIR = str(Path(data_dir) / "tmp" / "ansible")
|
||||
os.environ['ANSIBLE_INVENTORY_UNPARSED_WARNING'] = 'False'
|
||||
os.environ['ANSIBLE_LOCALHOST_WARNING'] = 'False'
|
||||
os.environ["ANSIBLE_HOME"] = ANSIBLE_TMP_DIR
|
||||
# os.environ["ANSIBLE_COLLECTIONS_PATH"] = str(Path(data_dir).parent / 'archivebox')
|
||||
os.environ["ANSIBLE_ROLES_PATH"] = (
|
||||
'/Volumes/NVME/Users/squash/Code/archiveboxes/archivebox7/archivebox/builtin_plugins/ansible/roles'
|
||||
)
|
||||
# def run_playbook(playbook_path, data_dir, quiet=False, **kwargs):
|
||||
# ANSIBLE_TMP_DIR = str(Path(data_dir) / "tmp" / "ansible")
|
||||
# os.environ['ANSIBLE_INVENTORY_UNPARSED_WARNING'] = 'False'
|
||||
# os.environ['ANSIBLE_LOCALHOST_WARNING'] = 'False'
|
||||
# os.environ["ANSIBLE_HOME"] = ANSIBLE_TMP_DIR
|
||||
# # os.environ["ANSIBLE_COLLECTIONS_PATH"] = str(Path(data_dir).parent / 'archivebox')
|
||||
# os.environ["ANSIBLE_ROLES_PATH"] = (
|
||||
# './roles'
|
||||
# )
|
||||
|
||||
rc = RunnerConfig(
|
||||
private_data_dir=ANSIBLE_TMP_DIR,
|
||||
playbook=str(playbook_path),
|
||||
rotate_artifacts=50000,
|
||||
host_pattern="localhost",
|
||||
extravars={
|
||||
"DATA_DIR": str(data_dir),
|
||||
**kwargs,
|
||||
},
|
||||
quiet=quiet,
|
||||
)
|
||||
rc.prepare()
|
||||
r = Runner(config=rc)
|
||||
r.set_fact_cache('localhost', GLOBAL_CACHE)
|
||||
r.run()
|
||||
last_run_facts = r.get_fact_cache('localhost')
|
||||
GLOBAL_CACHE.update(filtered_facts(last_run_facts))
|
||||
return benedict({
|
||||
key: val
|
||||
for key, val in last_run_facts.items()
|
||||
if not (key.startswith('ansible_') or key in ('gather_subset', 'module_setup'))
|
||||
})
|
||||
# rc = RunnerConfig(
|
||||
# private_data_dir=ANSIBLE_TMP_DIR,
|
||||
# playbook=str(playbook_path),
|
||||
# rotate_artifacts=50000,
|
||||
# host_pattern="localhost",
|
||||
# extravars={
|
||||
# "DATA_DIR": str(data_dir),
|
||||
# **kwargs,
|
||||
# },
|
||||
# quiet=quiet,
|
||||
# )
|
||||
# rc.prepare()
|
||||
# r = Runner(config=rc)
|
||||
# r.set_fact_cache('localhost', GLOBAL_CACHE)
|
||||
# r.run()
|
||||
# last_run_facts = r.get_fact_cache('localhost')
|
||||
# GLOBAL_CACHE.update(filtered_facts(last_run_facts))
|
||||
# return benedict({
|
||||
# key: val
|
||||
# for key, val in last_run_facts.items()
|
||||
# if not (key.startswith('ansible_') or key in ('gather_subset', 'module_setup'))
|
||||
# })
|
||||
|
||||
def filtered_facts(facts):
|
||||
return benedict({
|
||||
key: val
|
||||
for key, val in facts.items()
|
||||
if not (key.startswith('ansible_') or key in ('gather_subset', 'module_setup'))
|
||||
})
|
||||
# def filtered_facts(facts):
|
||||
# return benedict({
|
||||
# key: val
|
||||
# for key, val in facts.items()
|
||||
# if not (key.startswith('ansible_') or key in ('gather_subset', 'module_setup'))
|
||||
# })
|
||||
|
||||
def print_globals():
|
||||
pprint(filtered_facts(GLOBAL_CACHE), expand_all=True)
|
||||
# def print_globals():
|
||||
# pprint(filtered_facts(GLOBAL_CACHE), expand_all=True)
|
||||
|
||||
|
||||
|
||||
# YTDLP_OUTPUT = run_playbook('extract.yml', {'url': 'https://www.youtube.com/watch?v=cK4REjqGc9w&t=27s'})
|
||||
# pprint(YTDLP_OUTPUT)
|
||||
# # YTDLP_OUTPUT = run_playbook('extract.yml', {'url': 'https://www.youtube.com/watch?v=cK4REjqGc9w&t=27s'})
|
||||
# # pprint(YTDLP_OUTPUT)
|
||||
|
|
|
@ -1,21 +1,22 @@
|
|||
__package__ = 'archivebox.plugantic'
|
||||
|
||||
from typing import Dict
|
||||
# from typing import Dict
|
||||
|
||||
from .base_hook import BaseHook, HookType
|
||||
from ..config_stubs import AttrDict
|
||||
|
||||
|
||||
class BaseAdminDataView(BaseHook):
|
||||
hook_type: HookType = "ADMINDATAVIEW"
|
||||
|
||||
verbose_name: str = 'NPM Installed Packages'
|
||||
route: str = '/npm/installed/'
|
||||
view: str = 'builtin_plugins.npm.admin.installed_list_view'
|
||||
items: Dict[str, str] = {
|
||||
"name": "installed_npm_pkg",
|
||||
'route': '<str:key>/',
|
||||
'view': 'builtin_plugins.npm.admin.installed_detail_view',
|
||||
}
|
||||
# verbose_name: str = 'Data View'
|
||||
# route: str = '/npm/installed/'
|
||||
# view: str = 'pkg_plugins.npm.admin.installed_list_view'
|
||||
# items: Dict[str, str] = {
|
||||
# "name": "installed_npm_pkg",
|
||||
# 'route': '<str:key>/',
|
||||
# 'view': 'pkg_plugins.npm.admin.installed_detail_view',
|
||||
# }
|
||||
|
||||
def register(self, settings, parent_plugin=None):
|
||||
# self._plugin = parent_plugin # circular ref to parent only here for easier debugging! never depend on circular backref to parent in real code!
|
||||
|
|
|
@ -42,7 +42,11 @@ class BaseBinProvider(BaseHook, BinProvider):
|
|||
settings.BINPROVIDERS[self.id] = self
|
||||
|
||||
super().register(settings, parent_plugin=parent_plugin)
|
||||
|
||||
|
||||
@property
|
||||
def admin_url(self) -> str:
|
||||
# e.g. /admin/environment/binproviders/NpmBinProvider/ TODO
|
||||
return "/admin/environment/binaries/"
|
||||
|
||||
|
||||
class BaseBinary(BaseHook, Binary):
|
||||
|
@ -87,6 +91,11 @@ class BaseBinary(BaseHook, Binary):
|
|||
binary = super().load_or_install(**kwargs)
|
||||
self.symlink_to_lib(binary=binary, bin_dir=settings.CONFIG.BIN_DIR)
|
||||
return binary
|
||||
|
||||
@property
|
||||
def admin_url(self) -> str:
|
||||
# e.g. /admin/environment/config/LdapConfig/
|
||||
return f"/admin/environment/binaries/{self.name}/"
|
||||
|
||||
apt = AptProvider()
|
||||
brew = BrewProvider()
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
__package__ = 'archivebox.plugantic'
|
||||
|
||||
import os
|
||||
import re
|
||||
import json
|
||||
from pathlib import Path
|
||||
from typing import List, Literal, Type, Tuple, Callable, ClassVar, Any
|
||||
from typing import Literal, Type, Tuple, Callable, ClassVar, Any, get_args
|
||||
|
||||
import toml
|
||||
from benedict import benedict
|
||||
|
@ -13,29 +14,27 @@ from pydantic_settings.sources import TomlConfigSettingsSource
|
|||
|
||||
from pydantic_pkgr.base_types import func_takes_args_or_kwargs
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
from .base_hook import BaseHook, HookType
|
||||
from . import ini_to_toml
|
||||
|
||||
|
||||
PACKAGE_DIR = Path(__file__).resolve().parent.parent
|
||||
DATA_DIR = Path(os.curdir).resolve()
|
||||
|
||||
|
||||
ConfigSectionName = Literal[
|
||||
'SHELL_CONFIG',
|
||||
'GENERAL_CONFIG',
|
||||
'STORAGE_CONFIG',
|
||||
'SERVER_CONFIG',
|
||||
'ARCHIVING_CONFIG',
|
||||
'LDAP_CONFIG',
|
||||
'ARCHIVE_METHOD_TOGGLES',
|
||||
'ARCHIVE_METHOD_OPTIONS',
|
||||
'SEARCH_BACKEND_CONFIG',
|
||||
'DEPENDENCY_CONFIG',
|
||||
]
|
||||
ConfigSectionNames: List[ConfigSectionName] = [
|
||||
'SHELL_CONFIG',
|
||||
'GENERAL_CONFIG',
|
||||
'SERVER_CONFIG',
|
||||
'ARCHIVE_METHOD_TOGGLES',
|
||||
'ARCHIVE_METHOD_OPTIONS',
|
||||
'SEARCH_BACKEND_CONFIG',
|
||||
'DEPENDENCY_CONFIG',
|
||||
]
|
||||
ConfigSectionNames: Tuple[ConfigSectionName, ...] = get_args(ConfigSectionName) # just gets the list of values from the Literal type
|
||||
|
||||
|
||||
def better_toml_dump_str(val: Any) -> str:
|
||||
|
@ -136,7 +135,7 @@ class ArchiveBoxBaseConfig(BaseSettings):
|
|||
) -> Tuple[PydanticBaseSettingsSource, ...]:
|
||||
"""Defines the config precedence order: Schema defaults -> ArchiveBox.conf (TOML) -> Environment variables"""
|
||||
|
||||
ARCHIVEBOX_CONFIG_FILE = settings.DATA_DIR / "ArchiveBox.conf"
|
||||
ARCHIVEBOX_CONFIG_FILE = DATA_DIR / "ArchiveBox.conf"
|
||||
ARCHIVEBOX_CONFIG_FILE_BAK = ARCHIVEBOX_CONFIG_FILE.parent / ".ArchiveBox.conf.bak"
|
||||
|
||||
# import ipdb; ipdb.set_trace()
|
||||
|
@ -177,7 +176,7 @@ class ArchiveBoxBaseConfig(BaseSettings):
|
|||
"""Populate any unset values using function provided as their default"""
|
||||
|
||||
for key, field in self.model_fields.items():
|
||||
config_so_far = self.model_dump(include=set(self.model_fields.keys()), warnings=False)
|
||||
config_so_far = benedict(self.model_dump(include=set(self.model_fields.keys()), warnings=False))
|
||||
value = getattr(self, key)
|
||||
if isinstance(value, Callable):
|
||||
# if value is a function, execute it to get the actual value, passing existing config as a dict arg
|
||||
|
|
|
@ -5,7 +5,7 @@ from huey.api import TaskWrapper
|
|||
|
||||
from pathlib import Path
|
||||
from typing import List, Literal, ClassVar
|
||||
from pydantic import BaseModel, ConfigDict, Field, computed_field
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
|
||||
|
||||
HookType = Literal['CONFIG', 'BINPROVIDER', 'BINARY', 'EXTRACTOR', 'REPLAYER', 'CHECK', 'ADMINDATAVIEW', 'QUEUE']
|
||||
|
@ -26,11 +26,11 @@ class BaseHook(BaseModel):
|
|||
# django imports AppConfig, models, migrations, admins, etc. for all installed apps
|
||||
# django then calls AppConfig.ready() on each installed app...
|
||||
|
||||
builtin_plugins.npm.NpmPlugin().AppConfig.ready() # called by django
|
||||
builtin_plugins.npm.NpmPlugin().register(settings) ->
|
||||
builtin_plugins.npm.NpmConfigSet().register(settings)
|
||||
pkg_plugins.npm.NpmPlugin().AppConfig.ready() # called by django
|
||||
pkg_plugins.npm.NpmPlugin().register(settings) ->
|
||||
pkg_plugins.npm.NpmConfigSet().register(settings)
|
||||
plugantic.base_configset.BaseConfigSet().register(settings)
|
||||
plugantic.base_hook.BaseHook().register(settings, parent_plugin=builtin_plugins.npm.NpmPlugin())
|
||||
plugantic.base_hook.BaseHook().register(settings, parent_plugin=pkg_plugins.npm.NpmPlugin())
|
||||
|
||||
...
|
||||
...
|
||||
|
@ -74,22 +74,27 @@ class BaseHook(BaseModel):
|
|||
|
||||
@property
|
||||
def hook_module(self) -> str:
|
||||
"""e.g. builtin_plugins.singlefile.apps.SinglefileConfigSet"""
|
||||
"""e.g. extractor_plugins.singlefile.apps.SinglefileConfigSet"""
|
||||
return f'{self.__module__}.{self.__class__.__name__}'
|
||||
|
||||
@property
|
||||
def hook_file(self) -> Path:
|
||||
"""e.g. builtin_plugins.singlefile.apps.SinglefileConfigSet"""
|
||||
"""e.g. extractor_plugins.singlefile.apps.SinglefileConfigSet"""
|
||||
return Path(inspect.getfile(self.__class__))
|
||||
|
||||
@property
|
||||
def plugin_module(self) -> str:
|
||||
"""e.g. builtin_plugins.singlefile"""
|
||||
"""e.g. extractor_plugins.singlefile"""
|
||||
return f"{self.__module__}.{self.__class__.__name__}".split("archivebox.", 1)[-1].rsplit(".apps.", 1)[0]
|
||||
|
||||
@property
|
||||
def plugin_dir(self) -> Path:
|
||||
return Path(inspect.getfile(self.__class__)).parent.resolve()
|
||||
|
||||
@property
|
||||
def admin_url(self) -> str:
|
||||
# e.g. /admin/environment/config/LdapConfig/
|
||||
return f"/admin/environment/{self.hook_type.lower()}/{self.id}/"
|
||||
|
||||
|
||||
def register(self, settings, parent_plugin=None):
|
||||
|
|
|
@ -39,6 +39,7 @@ class BasePlugin(BaseModel):
|
|||
# Required by AppConfig:
|
||||
app_label: str = Field() # e.g. 'singlefile' (one-word machine-readable representation, to use as url-safe id/db-table prefix_/attr name)
|
||||
verbose_name: str = Field() # e.g. 'SingleFile' (human-readable *short* label, for use in column names, form labels, etc.)
|
||||
docs_url: str = Field(default=None) # e.g. 'https://github.com/...'
|
||||
|
||||
# All the hooks the plugin will install:
|
||||
hooks: List[InstanceOf[BaseHook]] = Field(default=[])
|
||||
|
@ -60,10 +61,16 @@ class BasePlugin(BaseModel):
|
|||
def plugin_module(self) -> str: # DottedImportPath
|
||||
""" "
|
||||
Dotted import path of the plugin's module (after its loaded via settings.INSTALLED_APPS).
|
||||
e.g. 'archivebox.builtin_plugins.npm.apps.NpmPlugin' -> 'builtin_plugins.npm'
|
||||
e.g. 'archivebox.pkg_plugins.npm.apps.NpmPlugin' -> 'pkg_plugins.npm'
|
||||
"""
|
||||
return f"{self.__module__}.{self.__class__.__name__}".split("archivebox.", 1)[-1].rsplit('.apps.', 1)[0]
|
||||
|
||||
|
||||
@property
|
||||
def plugin_module_full(self) -> str: # DottedImportPath
|
||||
"""e.g. 'archivebox.pkg_plugins.npm.apps.NpmPlugin'"""
|
||||
return f"{self.__module__}.{self.__class__.__name__}"
|
||||
|
||||
# @computed_field
|
||||
@property
|
||||
def plugin_dir(self) -> Path:
|
||||
|
@ -77,7 +84,7 @@ class BasePlugin(BaseModel):
|
|||
# preserve references to original default objects,
|
||||
# pydantic deepcopies them by default which breaks mutability
|
||||
# see https://github.com/pydantic/pydantic/issues/7608
|
||||
# if we dont do this, then builtin_plugins.base.CORE_CONFIG != settings.CONFIGS.CoreConfig for example
|
||||
# if we dont do this, then sys_plugins.base.CORE_CONFIG != settings.CONFIGS.CoreConfig for example
|
||||
# and calling .__init__() on one of them will not update the other
|
||||
self.hooks = self.model_fields['hooks'].default
|
||||
|
||||
|
|
|
@ -81,7 +81,7 @@ def binaries_list_view(request: HttpRequest, **kwargs) -> TableContext:
|
|||
}
|
||||
|
||||
for plugin in settings.PLUGINS.values():
|
||||
for binary in plugin.HOOKS_BY_TYPE.BINARY.values():
|
||||
for binary in plugin.HOOKS_BY_TYPE.get('BINARY', {}).values():
|
||||
try:
|
||||
binary = binary.load()
|
||||
except Exception as e:
|
||||
|
@ -125,7 +125,7 @@ def binary_detail_view(request: HttpRequest, key: str, **kwargs) -> ItemContext:
|
|||
binary = None
|
||||
plugin = None
|
||||
for loaded_plugin in settings.PLUGINS.values():
|
||||
for loaded_binary in loaded_plugin.HOOKS_BY_TYPE.BINARY.values():
|
||||
for loaded_binary in loaded_plugin.HOOKS_BY_TYPE.get('BINARY', {}).values():
|
||||
if loaded_binary.name == key:
|
||||
binary = loaded_binary
|
||||
plugin = loaded_plugin
|
||||
|
@ -175,17 +175,17 @@ def plugins_list_view(request: HttpRequest, **kwargs) -> TableContext:
|
|||
|
||||
|
||||
for plugin in settings.PLUGINS.values():
|
||||
try:
|
||||
plugin = plugin.load_binaries()
|
||||
except Exception as e:
|
||||
print(e)
|
||||
# try:
|
||||
# plugin.load_binaries()
|
||||
# except Exception as e:
|
||||
# print(e)
|
||||
|
||||
rows['Name'].append(ItemLink(plugin.id, key=plugin.id))
|
||||
rows['verbose_name'].append(str(plugin.verbose_name))
|
||||
rows['verbose_name'].append(mark_safe(f'<a href="{plugin.docs_url}" target="_blank">{plugin.verbose_name}</a>'))
|
||||
rows['module'].append(str(plugin.plugin_module))
|
||||
rows['source_code'].append(str(plugin.plugin_dir))
|
||||
rows['hooks'].append(mark_safe(', '.join(
|
||||
f'<a href="/admin/environment/hooks/{hook.id}/">{hook.id}</a>'
|
||||
f'<a href="{hook.admin_url}">{hook.id}</a>'
|
||||
for hook in plugin.hooks
|
||||
)))
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue