Skip to content

Commit

Permalink
migrate to postgres
Browse files Browse the repository at this point in the history
  • Loading branch information
krystiancha committed Sep 10, 2020
1 parent cc6f62b commit 8f05134
Show file tree
Hide file tree
Showing 11 changed files with 509 additions and 0 deletions.
85 changes: 85 additions & 0 deletions alembic.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
# A generic, single database configuration.

[alembic]
# path to migration scripts
script_location = alembic

# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s

# timezone to use when rendering the date
# within the migration file as well as the filename.
# string value is passed to dateutil.tz.gettz()
# leave blank for localtime
# timezone =

# max length of characters to apply to the
# "slug" field
# truncate_slug_length = 40

# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false

# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false

# version location specification; this defaults
# to alembic/versions. When using multiple version
# directories, initial revisions must be specified with --version-path
# version_locations = %(here)s/bar %(here)s/bat alembic/versions

# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8

sqlalchemy.url = postgresql://wotstats@localhost/wotstats


[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples

# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks=black
# black.type=console_scripts
# black.entrypoint=black
# black.options=-l 79

# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic

[handlers]
keys = console

[formatters]
keys = generic

[logger_root]
level = WARN
handlers = console
qualname =

[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine

[logger_alembic]
level = INFO
handlers =
qualname = alembic

[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic

[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S
1 change: 1 addition & 0 deletions alembic/README
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Generic single-database configuration.
77 changes: 77 additions & 0 deletions alembic/env.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
from logging.config import fileConfig

from sqlalchemy import engine_from_config
from sqlalchemy import pool

from alembic import context

# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config

# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)

# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = None

# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.


def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)

with context.begin_transaction():
context.run_migrations()


def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)

with connectable.connect() as connection:
context.configure(
connection=connection, target_metadata=target_metadata
)

with context.begin_transaction():
context.run_migrations()


if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
24 changes: 24 additions & 0 deletions alembic/script.py.mako
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
"""${message}

Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}

"""
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}

# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}


def upgrade():
${upgrades if upgrades else "pass"}


def downgrade():
${downgrades if downgrades else "pass"}
64 changes: 64 additions & 0 deletions alembic/versions/9d1bc39b8198_create_statistics_table.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
"""create statistics table
Revision ID: 9d1bc39b8198
Revises:
Create Date: 2020-09-10 18:53:24.112358
"""
from alembic import op
import sqlalchemy as sa


# revision identifiers, used by Alembic.
revision = '9d1bc39b8198'
down_revision = None
branch_labels = None
depends_on = None


def upgrade():
op.create_table(
"statistics",
sa.Column("account_id", sa.Integer, primary_key=True),
sa.Column("battles_on_stunning_vehicles", sa.Integer),
sa.Column("spotted", sa.Integer),
sa.Column("avg_damage_blocked", sa.Float),
sa.Column("direct_hits_received", sa.Integer),
sa.Column("explosion_hits", sa.Integer),
sa.Column("piercings", sa.Integer),
sa.Column("xp", sa.Integer),
sa.Column("avg_damage_assisted", sa.Float),
sa.Column("dropped_capture_points", sa.Integer),
sa.Column("piercings_received", sa.Integer),
sa.Column("hits_percents", sa.Integer),
sa.Column("draws", sa.Integer),
sa.Column("battles", sa.Integer),
sa.Column("damage_received", sa.Integer),
sa.Column("survived_battles", sa.Integer),
sa.Column("avg_damage_assisted_track", sa.Float),
sa.Column("frags", sa.Integer),
sa.Column("stun_number", sa.Integer),
sa.Column("avg_damage_assisted_radio", sa.Float),
sa.Column("capture_points", sa.Integer),
sa.Column("stun_assisted_damage", sa.Integer),
sa.Column("hits", sa.Integer),
sa.Column("battle_avg_xp", sa.Integer),
sa.Column("wins", sa.Integer),
sa.Column("losses", sa.Integer),
sa.Column("damage_dealt", sa.Integer),
sa.Column("no_damage_direct_hits_received", sa.Integer),
sa.Column("shots", sa.Integer),
sa.Column("explosion_hits_received", sa.Integer),
sa.Column("tanking_factor", sa.Numeric(2)),
sa.Column("trees_cut", sa.Integer),
sa.Column("last_battle_time", sa.DateTime(timezone=True)),
sa.Column("updated_at", sa.DateTime(timezone=True), primary_key=True),
sa.Column("global_rating", sa.Integer),
sa.Column("clan_id", sa.Integer),
sa.Column("nickname", sa.String),
sa.Column("logout_at", sa.DateTime(timezone=True)),
)


def downgrade():
op.drop_table("statistics")
Empty file added wotstats/__init__.py
Empty file.
104 changes: 104 additions & 0 deletions wotstats/api.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
import json
from collections.abc import MutableMapping
from datetime import datetime
from enum import Enum
from typing import Sequence
from urllib.parse import urljoin
from urllib.request import urlopen

FIELDS = [
"last_battle_time",
"updated_at",
"global_rating",
"clan_id",
"statistics.trees_cut",
"statistics.random.spotted",
"statistics.random.battles_on_stunning_vehicles",
"statistics.random.avg_damage_blocked",
"statistics.random.capture_points",
"statistics.random.explosion_hits",
"statistics.random.piercings",
"statistics.random.xp",
"statistics.random.avg_damage_assisted",
"statistics.random.dropped_capture_points",
"statistics.random.damage_dealt",
"statistics.random.hits_percents",
"statistics.random.draws",
"statistics.random.tanking_factor",
"statistics.random.battles",
"statistics.random.damage_received",
"statistics.random.survived_battles",
"statistics.random.frags",
"statistics.random.stun_number",
"statistics.random.avg_damage_assisted_radio",
"statistics.random.direct_hits_received",
"statistics.random.stun_assisted_damage",
"statistics.random.hits",
"statistics.random.battle_avg_xp",
"statistics.random.wins",
"statistics.random.losses",
"statistics.random.piercings_received",
"statistics.random.no_damage_direct_hits_received",
"statistics.random.shots",
"statistics.random.explosion_hits_received",
"statistics.random.avg_damage_assisted_track",
"nickname",
"logout_at",
]

EXTRA = ["statistics.random"]

TIME_FIELDS = ["last_battle_time", "updated_at", "logout_at"]


class Realm(Enum):
RU = "https://api.worldoftanks.ru/wot/"
EU = "https://api.worldoftanks.eu/wot/"
NA = "https://api.worldoftanks.com/wot/"
ASIA = "https://api.worldoftanks.asia/wot/"


def flatten(d, parent_key="", sep="_"):
items = []
for k, v in d.items():
new_key = parent_key + sep + k if parent_key else k
if isinstance(v, MutableMapping):
items.extend(flatten(v, new_key, sep=sep).items())
else:
items.append((new_key, v))
return dict(items)


def parse(body):
raw = json.load(body)

for account, data in raw["data"].items():
for field in TIME_FIELDS:
data[field] = datetime.utcfromtimestamp(data[field])

return {
account: {
key.replace("statistics.", "").replace("random.", ""): value
for key, value in flatten(data, sep=".").items()
}
for account, data in raw["data"].items()
}


def account_info(realm: Realm, application_id: str, account_ids: Sequence[str]):
data = "&".join(
[
f"{key}={value}"
for key, value in {
"application_id": application_id,
"account_id": ",".join(account_ids),
"fields": ",".join(FIELDS),
"extra": ",".join(EXTRA),
}.items()
]
)
with urlopen(
url=urljoin(realm.value, "account/info/"),
data=data.encode(),
) as f:
return parse(f)
Loading

0 comments on commit 8f05134

Please sign in to comment.