mirror of
https://github.com/ArchiveBox/ArchiveBox.git
synced 2025-05-15 15:44:26 -04:00
feat: Remove walrus operator (we still need to support python3.7)
This commit is contained in:
parent
8aee5c408a
commit
8d22ebf988
3 changed files with 8 additions and 2 deletions
|
@ -60,6 +60,7 @@ class Tag(models.Model):
|
||||||
else:
|
else:
|
||||||
return super().save(*args, **kwargs)
|
return super().save(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
class Snapshot(models.Model):
|
class Snapshot(models.Model):
|
||||||
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
|
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
|
||||||
|
|
||||||
|
@ -158,6 +159,7 @@ class Snapshot(models.Model):
|
||||||
self.tags.clear()
|
self.tags.clear()
|
||||||
self.tags.add(*tags_id)
|
self.tags.add(*tags_id)
|
||||||
|
|
||||||
|
|
||||||
class ArchiveResultManager(models.Manager):
|
class ArchiveResultManager(models.Manager):
|
||||||
def indexable(self, sorted: bool = True):
|
def indexable(self, sorted: bool = True):
|
||||||
INDEXABLE_METHODS = [ r[0] for r in ARCHIVE_METHODS_INDEXING_PRECEDENCE ]
|
INDEXABLE_METHODS = [ r[0] for r in ARCHIVE_METHODS_INDEXING_PRECEDENCE ]
|
||||||
|
@ -167,6 +169,8 @@ class ArchiveResultManager(models.Manager):
|
||||||
precedence = [ When(extractor=method, then=Value(precedence)) for method, precedence in ARCHIVE_METHODS_INDEXING_PRECEDENCE ]
|
precedence = [ When(extractor=method, then=Value(precedence)) for method, precedence in ARCHIVE_METHODS_INDEXING_PRECEDENCE ]
|
||||||
qs = qs.annotate(indexing_precedence=Case(*precedence, default=Value(1000),output_field=IntegerField())).order_by('indexing_precedence')
|
qs = qs.annotate(indexing_precedence=Case(*precedence, default=Value(1000),output_field=IntegerField())).order_by('indexing_precedence')
|
||||||
return qs
|
return qs
|
||||||
|
|
||||||
|
|
||||||
class ArchiveResult(models.Model):
|
class ArchiveResult(models.Model):
|
||||||
snapshot = models.ForeignKey(Snapshot, on_delete=models.CASCADE)
|
snapshot = models.ForeignKey(Snapshot, on_delete=models.CASCADE)
|
||||||
cmd = models.JSONField()
|
cmd = models.JSONField()
|
||||||
|
|
|
@ -95,7 +95,8 @@ def index_links(links: Union[List[Link],None], out_dir: Path=OUTPUT_DIR):
|
||||||
from core.models import Snapshot, ArchiveResult
|
from core.models import Snapshot, ArchiveResult
|
||||||
|
|
||||||
for link in links:
|
for link in links:
|
||||||
if snap := Snapshot.objects.filter(url=link.url).first():
|
snap = Snapshot.objects.filter(url=link.url).first()
|
||||||
|
if snap:
|
||||||
results = ArchiveResult.objects.indexable().filter(snapshot=snap)
|
results = ArchiveResult.objects.indexable().filter(snapshot=snap)
|
||||||
log_index_started(link.url)
|
log_index_started(link.url)
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -38,7 +38,8 @@ def search(text: str) -> List[str]:
|
||||||
file_paths = [p.decode() for p in rg.stdout.splitlines()]
|
file_paths = [p.decode() for p in rg.stdout.splitlines()]
|
||||||
timestamps = set()
|
timestamps = set()
|
||||||
for path in file_paths:
|
for path in file_paths:
|
||||||
if ts := ts_regex.findall(path):
|
ts = ts_regex.findall(path)
|
||||||
|
if ts:
|
||||||
timestamps.add(ts[0])
|
timestamps.add(ts[0])
|
||||||
|
|
||||||
snap_ids = [str(id) for id in Snapshot.objects.filter(timestamp__in=timestamps).values_list('pk', flat=True)]
|
snap_ids = [str(id) for id in Snapshot.objects.filter(timestamp__in=timestamps).values_list('pk', flat=True)]
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue