Skip to content

Commit

Permalink
Merge pull request #105 from croque-scp/always-log-end-time
Browse files Browse the repository at this point in the history
Always log end time
  • Loading branch information
rossjrw authored May 9, 2024
2 parents b99bec0 + 54f2ed7 commit e28aa93
Show file tree
Hide file tree
Showing 4 changed files with 150 additions and 109 deletions.
165 changes: 92 additions & 73 deletions notifier/notify.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
from contextlib import contextmanager
import logging
import re
from smtplib import SMTPAuthenticationError
from typing import FrozenSet, Iterable, List, Optional, Set, Tuple
from typing import FrozenSet, Iterable, Iterator, List, Optional, Set, Tuple

from notifier.config.remote import get_global_config
from notifier.config.user import get_user_config
Expand All @@ -15,7 +16,7 @@
from notifier.dumps import LogDumpCacher, record_activation_log
from notifier.emailer import Emailer
from notifier.newposts import get_new_posts
from notifier.timing import channel_is_now, channel_will_be_next, timestamp
from notifier.timing import channel_is_now, timestamp
from notifier.types import (
ActivationLogDump,
AuthConfig,
Expand Down Expand Up @@ -70,6 +71,28 @@ def pick_channels_to_notify(
return channels


@contextmanager
def activation_log_dump_context(
config: LocalConfig, database: BaseDatabaseDriver, dry_run: bool
) -> Iterator[LogDumpCacher]:
"""Creates a log dump context that ends the long if the wrapped process fails."""
activation_log_dump = LogDumpCacher[ActivationLogDump](
{"start_timestamp": timestamp()},
database.store_activation_log_dump,
dry_run,
)
try:
yield activation_log_dump

finally:
# Even if the run failed, record the end timestamp and upload if possible
activation_log_dump.update({"end_timestamp": timestamp()})

if not dry_run:
logger.info("Uploading log dumps...")
record_activation_log(config, database)


def notify(
*,
config: LocalConfig,
Expand All @@ -87,87 +110,83 @@ def notify(
getting data for new posts) and then triggers the relevant notification
schedules.
"""
activation_log_dump = LogDumpCacher[ActivationLogDump](
{"start_timestamp": timestamp()},
database.store_activation_log_dump,
dry_run,
)

# If there are no active channels, which shouldn't happen, there is
# nothing to do
if len(active_channels) == 0:
logger.warning("No active channels; aborting")
return

connection = Connection(database.get_supported_wikis(), dry_run=dry_run)

activation_log_dump.update({"config_start_timestamp": timestamp()})
if dry_run:
logger.info("Dry run: skipping remote config acquisition")
else:
logger.info("Getting remote config...")
get_global_config(config, database, connection)
logger.info("Getting user config...")
get_user_config(config, database, connection)

# Refresh the connection to add any newly-configured wikis
connection = Connection(database.get_supported_wikis())
activation_log_dump.update({"config_end_timestamp": timestamp()})

activation_log_dump.update({"getpost_start_timestamp": timestamp()})
if dry_run:
logger.info("Dry run: skipping new post acquisition")
else:
logger.info("Getting new posts...")
get_new_posts(database, connection, limit_wikis)
# The timestamp immediately after downloading posts will be used as the
# upper bound of posts to notify users about
activation_log_dump.update({"getpost_end_timestamp": timestamp()})
with activation_log_dump_context(
config, database, dry_run
) as activation_log_dump:
# If there are no active channels, which shouldn't happen, there is
# nothing to do
if len(active_channels) == 0:
logger.warning("No active channels; aborting")
return

if dry_run:
logger.info("Dry run: skipping Wikidot login")
else:
connection.login(config["wikidot_username"], auth["wikidot_password"])

activation_log_dump.update({"notify_start_timestamp": timestamp()})
logger.info("Notifying...")
notify_active_channels(
active_channels,
current_timestamp=activation_log_dump.data.get(
"getpost_end_timestamp", timestamp()
),
config=config,
auth=auth,
database=database,
connection=connection,
force_initial_search_timestamp=force_initial_search_timestamp,
dry_run=dry_run,
)
activation_log_dump.update({"notify_end_timestamp": timestamp()})
connection = Connection(
database.get_supported_wikis(), dry_run=dry_run
)

# Notifications have been sent, so perform time-insensitive maintenance
activation_log_dump.update({"config_start_timestamp": timestamp()})
if dry_run:
logger.info("Dry run: skipping remote config acquisition")
else:
logger.info("Getting remote config...")
get_global_config(config, database, connection)
logger.info("Getting user config...")
get_user_config(config, database, connection)

# Refresh the connection to add any newly-configured wikis
connection = Connection(database.get_supported_wikis())
activation_log_dump.update({"config_end_timestamp": timestamp()})

activation_log_dump.update({"getpost_start_timestamp": timestamp()})
if dry_run:
logger.info("Dry run: skipping new post acquisition")
else:
logger.info("Getting new posts...")
get_new_posts(database, connection, limit_wikis)
# The timestamp immediately after downloading posts will be used as the
# upper bound of posts to notify users about
activation_log_dump.update({"getpost_end_timestamp": timestamp()})

if dry_run:
logger.info("Dry run: skipping Wikidot login")
else:
connection.login(
config["wikidot_username"], auth["wikidot_password"]
)

if dry_run:
logger.info("Dry run: skipping cleanup")
return
activation_log_dump.update({"notify_start_timestamp": timestamp()})
logger.info("Notifying...")
notify_active_channels(
active_channels,
current_timestamp=activation_log_dump.data.get(
"getpost_end_timestamp", timestamp()
),
config=config,
auth=auth,
database=database,
connection=connection,
force_initial_search_timestamp=force_initial_search_timestamp,
dry_run=dry_run,
)
activation_log_dump.update({"notify_end_timestamp": timestamp()})

logger.info("Cleaning up...")
# Notifications have been sent, so perform time-insensitive maintenance

logger.info("Removing non-notifiable posts...")
database.delete_non_notifiable_posts()
if dry_run:
logger.info("Dry run: skipping cleanup")
return

logger.info("Checking for deleted posts")
clear_deleted_posts(database, connection)
logger.info("Cleaning up...")

logger.info("Purging invalid user config pages...")
delete_prepared_invalid_user_pages(config, connection)
rename_invalid_user_config_pages(config, connection)
logger.info("Removing non-notifiable posts...")
database.delete_non_notifiable_posts()

activation_log_dump.update({"end_timestamp": timestamp()})
logger.info("Checking for deleted posts")
clear_deleted_posts(database, connection)

assert not dry_run
logger.info("Uploading log dumps...")
record_activation_log(config, database)
logger.info("Purging invalid user config pages...")
delete_prepared_invalid_user_pages(config, connection)
rename_invalid_user_config_pages(config, connection)


def notify_active_channels(
Expand Down
77 changes: 42 additions & 35 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ black = "^22.3.0"
pytest = "^6.2.4"
isort = "^5.9.2"
pylint = "^2.9.6"
mypy = "^0.910"
mypy = "^1.10.0"
boto3-stubs = {extras = ["essential"], version = "^1.28.2"}
types-beautifulsoup4 = "^4.12.0.5"
types-requests = "^2.31.0.1"
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
{
"activations": [
{
"start_timestamp": 1691179249,
"config_start_timestamp": 1691179249,
"config_end_timestamp": null,
"getpost_start_timestamp": null,
"getpost_end_timestamp": null,
"notify_start_timestamp": null,
"notify_end_timestamp": null,
"end_timestamp": 1691179281
}
],
"channels": []
}

0 comments on commit e28aa93

Please sign in to comment.