Compare commits
No commits in common. "76c716cea668035f0e43157128232ad13812710c" and "0a48fba5c02ed835801eb45814554571f5e3a7ed" have entirely different histories.
76c716cea6
...
0a48fba5c0
1
.gitignore
vendored
1
.gitignore
vendored
@ -1,7 +1,6 @@
|
|||||||
.env
|
.env
|
||||||
venv
|
venv
|
||||||
volumes/redis/data
|
volumes/redis/data
|
||||||
volumes/postgres/pgdata
|
|
||||||
__pycache__
|
__pycache__
|
||||||
*.db
|
*.db
|
||||||
*.pyc
|
*.pyc
|
||||||
|
119
alembic.ini
119
alembic.ini
@ -1,119 +0,0 @@
|
|||||||
# A generic, single database configuration.
|
|
||||||
|
|
||||||
[alembic]
|
|
||||||
# path to migration scripts
|
|
||||||
# Use forward slashes (/) also on windows to provide an os agnostic path
|
|
||||||
script_location = alembic
|
|
||||||
|
|
||||||
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
|
||||||
# Uncomment the line below if you want the files to be prepended with date and time
|
|
||||||
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
|
|
||||||
# for all available tokens
|
|
||||||
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
|
||||||
|
|
||||||
# sys.path path, will be prepended to sys.path if present.
|
|
||||||
# defaults to the current working directory.
|
|
||||||
prepend_sys_path = .
|
|
||||||
|
|
||||||
# timezone to use when rendering the date within the migration file
|
|
||||||
# as well as the filename.
|
|
||||||
# If specified, requires the python>=3.9 or backports.zoneinfo library and tzdata library.
|
|
||||||
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
|
|
||||||
# string value is passed to ZoneInfo()
|
|
||||||
# leave blank for localtime
|
|
||||||
# timezone =
|
|
||||||
|
|
||||||
# max length of characters to apply to the "slug" field
|
|
||||||
# truncate_slug_length = 40
|
|
||||||
|
|
||||||
# set to 'true' to run the environment during
|
|
||||||
# the 'revision' command, regardless of autogenerate
|
|
||||||
# revision_environment = false
|
|
||||||
|
|
||||||
# set to 'true' to allow .pyc and .pyo files without
|
|
||||||
# a source .py file to be detected as revisions in the
|
|
||||||
# versions/ directory
|
|
||||||
# sourceless = false
|
|
||||||
|
|
||||||
# version location specification; This defaults
|
|
||||||
# to alembic/versions. When using multiple version
|
|
||||||
# directories, initial revisions must be specified with --version-path.
|
|
||||||
# The path separator used here should be the separator specified by "version_path_separator" below.
|
|
||||||
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
|
|
||||||
|
|
||||||
# version path separator; As mentioned above, this is the character used to split
|
|
||||||
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
|
|
||||||
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
|
|
||||||
# Valid values for version_path_separator are:
|
|
||||||
#
|
|
||||||
# version_path_separator = :
|
|
||||||
# version_path_separator = ;
|
|
||||||
# version_path_separator = space
|
|
||||||
# version_path_separator = newline
|
|
||||||
#
|
|
||||||
# Use os.pathsep. Default configuration used for new projects.
|
|
||||||
version_path_separator = os
|
|
||||||
|
|
||||||
# set to 'true' to search source files recursively
|
|
||||||
# in each "version_locations" directory
|
|
||||||
# new in Alembic version 1.10
|
|
||||||
# recursive_version_locations = false
|
|
||||||
|
|
||||||
# the output encoding used when revision files
|
|
||||||
# are written from script.py.mako
|
|
||||||
# output_encoding = utf-8
|
|
||||||
|
|
||||||
sqlalchemy.url = driver://user:pass@localhost/dbname
|
|
||||||
|
|
||||||
|
|
||||||
[post_write_hooks]
|
|
||||||
# post_write_hooks defines scripts or Python functions that are run
|
|
||||||
# on newly generated revision scripts. See the documentation for further
|
|
||||||
# detail and examples
|
|
||||||
|
|
||||||
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
|
||||||
# hooks = black
|
|
||||||
# black.type = console_scripts
|
|
||||||
# black.entrypoint = black
|
|
||||||
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
|
||||||
|
|
||||||
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
|
|
||||||
# hooks = ruff
|
|
||||||
# ruff.type = exec
|
|
||||||
# ruff.executable = %(here)s/.venv/bin/ruff
|
|
||||||
# ruff.options = check --fix REVISION_SCRIPT_FILENAME
|
|
||||||
|
|
||||||
# Logging configuration
|
|
||||||
[loggers]
|
|
||||||
keys = root,sqlalchemy,alembic
|
|
||||||
|
|
||||||
[handlers]
|
|
||||||
keys = console
|
|
||||||
|
|
||||||
[formatters]
|
|
||||||
keys = generic
|
|
||||||
|
|
||||||
[logger_root]
|
|
||||||
level = WARNING
|
|
||||||
handlers = console
|
|
||||||
qualname =
|
|
||||||
|
|
||||||
[logger_sqlalchemy]
|
|
||||||
level = WARNING
|
|
||||||
handlers =
|
|
||||||
qualname = sqlalchemy.engine
|
|
||||||
|
|
||||||
[logger_alembic]
|
|
||||||
level = INFO
|
|
||||||
handlers =
|
|
||||||
qualname = alembic
|
|
||||||
|
|
||||||
[handler_console]
|
|
||||||
class = StreamHandler
|
|
||||||
args = (sys.stderr,)
|
|
||||||
level = NOTSET
|
|
||||||
formatter = generic
|
|
||||||
|
|
||||||
[formatter_generic]
|
|
||||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
|
||||||
datefmt = %H:%M:%S
|
|
@ -1 +0,0 @@
|
|||||||
Generic single-database configuration.
|
|
108
alembic/env.py
108
alembic/env.py
@ -1,108 +0,0 @@
|
|||||||
from pathlib import Path
|
|
||||||
import asyncio
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
from logging.config import fileConfig
|
|
||||||
|
|
||||||
from sqlalchemy import pool
|
|
||||||
from sqlalchemy.engine import Connection
|
|
||||||
from sqlalchemy.ext.asyncio import async_engine_from_config
|
|
||||||
from dotenv import load_dotenv
|
|
||||||
from alembic import context
|
|
||||||
|
|
||||||
from app.core.models import BaseDBModel
|
|
||||||
|
|
||||||
sys.path.append(str(Path(__file__).resolve().parent.parent) + "/app")
|
|
||||||
|
|
||||||
load_dotenv(".local.env")
|
|
||||||
load_dotenv()
|
|
||||||
|
|
||||||
# this is the Alembic Config object, which provides
|
|
||||||
# access to the values within the .ini file in use.
|
|
||||||
config = context.config
|
|
||||||
|
|
||||||
|
|
||||||
# Interpret the config file for Python logging.
|
|
||||||
# This line sets up loggers basically.
|
|
||||||
if config.config_file_name is not None:
|
|
||||||
fileConfig(config.config_file_name)
|
|
||||||
|
|
||||||
# add your model's MetaData object here
|
|
||||||
# for 'autogenerate' support
|
|
||||||
# from myapp import mymodel
|
|
||||||
# target_metadata = mymodel.Base.metadata
|
|
||||||
target_metadata = BaseDBModel.metadata
|
|
||||||
|
|
||||||
# other values from the config, defined by the needs of env.py,
|
|
||||||
# can be acquired:
|
|
||||||
# my_important_option = config.get_main_option("my_important_option")
|
|
||||||
# ... etc.
|
|
||||||
section = config.get_section(config.config_ini_section, {})
|
|
||||||
match section.get("sqlalchemy.url"):
|
|
||||||
case "driver://user:pass@localhost/dbname":
|
|
||||||
url_db = os.getenv("DATABASE_URL_LOCAL_MIGRATE")
|
|
||||||
config.set_section_option(
|
|
||||||
config.config_ini_section,
|
|
||||||
"sqlalchemy.url",
|
|
||||||
url_db,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def run_migrations_offline() -> None:
|
|
||||||
"""Run migrations in 'offline' mode.
|
|
||||||
|
|
||||||
This configures the context with just a URL
|
|
||||||
and not an Engine, though an Engine is acceptable
|
|
||||||
here as well. By skipping the Engine creation
|
|
||||||
we don't even need a DBAPI to be available.
|
|
||||||
|
|
||||||
Calls to context.execute() here emit the given string to the
|
|
||||||
script output.
|
|
||||||
|
|
||||||
"""
|
|
||||||
url = config.get_main_option("sqlalchemy.url")
|
|
||||||
|
|
||||||
context.configure(
|
|
||||||
url=url,
|
|
||||||
target_metadata=target_metadata,
|
|
||||||
literal_binds=True,
|
|
||||||
dialect_opts={"paramstyle": "named"},
|
|
||||||
)
|
|
||||||
|
|
||||||
with context.begin_transaction():
|
|
||||||
context.run_migrations()
|
|
||||||
|
|
||||||
|
|
||||||
def do_run_migrations(connection: Connection) -> None:
|
|
||||||
context.configure(connection=connection, target_metadata=target_metadata)
|
|
||||||
|
|
||||||
with context.begin_transaction():
|
|
||||||
context.run_migrations()
|
|
||||||
|
|
||||||
|
|
||||||
async def run_async_migrations() -> None:
|
|
||||||
"""In this scenario we need to create an Engine
|
|
||||||
and associate a connection with the context.
|
|
||||||
|
|
||||||
"""
|
|
||||||
connectable = async_engine_from_config(
|
|
||||||
config.get_section(config.config_ini_section, {}),
|
|
||||||
prefix="sqlalchemy.",
|
|
||||||
poolclass=pool.NullPool,
|
|
||||||
)
|
|
||||||
|
|
||||||
async with connectable.connect() as connection:
|
|
||||||
await connection.run_sync(do_run_migrations)
|
|
||||||
|
|
||||||
await connectable.dispose()
|
|
||||||
|
|
||||||
|
|
||||||
def run_migrations_online() -> None:
|
|
||||||
"""Run migrations in 'online' mode."""
|
|
||||||
asyncio.run(run_async_migrations())
|
|
||||||
|
|
||||||
|
|
||||||
if context.is_offline_mode():
|
|
||||||
run_migrations_offline()
|
|
||||||
else:
|
|
||||||
run_migrations_online()
|
|
@ -1,28 +0,0 @@
|
|||||||
"""${message}
|
|
||||||
|
|
||||||
Revision ID: ${up_revision}
|
|
||||||
Revises: ${down_revision | comma,n}
|
|
||||||
Create Date: ${create_date}
|
|
||||||
|
|
||||||
"""
|
|
||||||
from typing import Sequence, Union
|
|
||||||
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
${imports if imports else ""}
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision: str = ${repr(up_revision)}
|
|
||||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
|
||||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
|
||||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
"""Upgrade schema."""
|
|
||||||
${upgrades if upgrades else "pass"}
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
"""Downgrade schema."""
|
|
||||||
${downgrades if downgrades else "pass"}
|
|
@ -1,43 +0,0 @@
|
|||||||
"""Added user table
|
|
||||||
|
|
||||||
Revision ID: 6da8bfcff69e
|
|
||||||
Revises:
|
|
||||||
Create Date: 2025-03-29 17:52:31.095177
|
|
||||||
|
|
||||||
"""
|
|
||||||
from typing import Sequence, Union
|
|
||||||
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision: str = '6da8bfcff69e'
|
|
||||||
down_revision: Union[str, None] = None
|
|
||||||
branch_labels: Union[str, Sequence[str], None] = None
|
|
||||||
depends_on: Union[str, Sequence[str], None] = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
"""Upgrade schema."""
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.create_table('users',
|
|
||||||
sa.Column('username', sa.String(), nullable=False),
|
|
||||||
sa.Column('telegram_id', sa.String(), nullable=False),
|
|
||||||
sa.Column('chat_id', sa.String(), nullable=False),
|
|
||||||
sa.Column('is_active', sa.Boolean(), nullable=False),
|
|
||||||
sa.Column('is_admin', sa.Boolean(), nullable=False),
|
|
||||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
|
||||||
sa.Column('created_at', sa.DateTime(), nullable=True),
|
|
||||||
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
|
||||||
sa.PrimaryKeyConstraint('id'),
|
|
||||||
sa.UniqueConstraint('id')
|
|
||||||
)
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
"""Downgrade schema."""
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.drop_table('users')
|
|
||||||
# ### end Alembic commands ###
|
|
@ -1,116 +0,0 @@
|
|||||||
import datetime
|
|
||||||
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from sqlalchemy.future import select
|
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
|
||||||
from sqlalchemy.orm import declarative_base
|
|
||||||
from sqlalchemy import (
|
|
||||||
func,
|
|
||||||
Column,
|
|
||||||
DateTime,
|
|
||||||
Integer,
|
|
||||||
String,
|
|
||||||
Boolean,
|
|
||||||
)
|
|
||||||
from sqlalchemy import exc as sqlalchemy_exc
|
|
||||||
from app.core.sessions import get_session
|
|
||||||
|
|
||||||
from loguru import logger
|
|
||||||
|
|
||||||
|
|
||||||
Base = declarative_base()
|
|
||||||
|
|
||||||
|
|
||||||
class BaseDBModel(Base):
|
|
||||||
__abstract__ = True
|
|
||||||
|
|
||||||
id = Column(
|
|
||||||
Integer,
|
|
||||||
nullable=False,
|
|
||||||
unique=True,
|
|
||||||
primary_key=True,
|
|
||||||
autoincrement=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return "<{0.__class__.__name__}(id={0.id!r})>".format(self)
|
|
||||||
|
|
||||||
|
|
||||||
class BaseCreatedUpdatedAtModel:
|
|
||||||
created_at = Column(DateTime, default=datetime.datetime.now(datetime.timezone.utc))
|
|
||||||
updated_at = Column(
|
|
||||||
DateTime,
|
|
||||||
default=func.now(),
|
|
||||||
onupdate=func.current_timestamp(),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class User(BaseDBModel, BaseCreatedUpdatedAtModel):
|
|
||||||
__tablename__: str = "users"
|
|
||||||
|
|
||||||
username: str = Column(String, nullable=False)
|
|
||||||
telegram_id: str = Column(String, nullable=False)
|
|
||||||
chat_id: str = Column(String, nullable=False)
|
|
||||||
is_active: bool = Column(Boolean, nullable=False, default=True)
|
|
||||||
is_admin: bool = Column(Boolean, nullable=False, default=False)
|
|
||||||
|
|
||||||
async def add_user(
|
|
||||||
username: str,
|
|
||||||
telegram_id: str | int,
|
|
||||||
chat_id: str | int,
|
|
||||||
is_active: bool = True,
|
|
||||||
is_admin: bool = False
|
|
||||||
) -> Optional["User"]:
|
|
||||||
db_session: AsyncSession = await get_session()
|
|
||||||
async with db_session as session:
|
|
||||||
try:
|
|
||||||
new_user = User(
|
|
||||||
username=username,
|
|
||||||
telegram_id=str(telegram_id),
|
|
||||||
chat_id=str(chat_id),
|
|
||||||
is_active=is_active,
|
|
||||||
is_admin=is_admin,
|
|
||||||
)
|
|
||||||
session.add(new_user)
|
|
||||||
await session.commit()
|
|
||||||
return new_user
|
|
||||||
except sqlalchemy_exc.SQLAlchemyError as err:
|
|
||||||
logger.warning(err)
|
|
||||||
await session.rollback()
|
|
||||||
finally:
|
|
||||||
await session.close()
|
|
||||||
|
|
||||||
async def get_user_using_user_id(user_id: str | int) -> Optional["User"]:
|
|
||||||
db_session: AsyncSession = await get_session()
|
|
||||||
if isinstance(user_id, str):
|
|
||||||
user_id: int = int(user_id)
|
|
||||||
|
|
||||||
async with db_session as session:
|
|
||||||
try:
|
|
||||||
stmt = select(User).where(User.id == user_id)
|
|
||||||
user: Optional[User] = (
|
|
||||||
(await session.execute(stmt)).unique().scalar_one_or_none()
|
|
||||||
)
|
|
||||||
return user
|
|
||||||
except sqlalchemy_exc.SQLAlchemyError as err:
|
|
||||||
logger.warning(err)
|
|
||||||
finally:
|
|
||||||
await session.close()
|
|
||||||
|
|
||||||
async def get_user_using_telegram_id(telegram_id: str | int) -> Optional["User"]:
|
|
||||||
db_session: AsyncSession = await get_session()
|
|
||||||
if isinstance(telegram_id, int):
|
|
||||||
telegram_id: str = str(telegram_id)
|
|
||||||
|
|
||||||
async with db_session as session:
|
|
||||||
try:
|
|
||||||
stmt = select(User).filter(User.telegram_id == telegram_id)
|
|
||||||
user: Optional[User] = (
|
|
||||||
(await session.execute(stmt)).unique().scalar_one_or_none()
|
|
||||||
)
|
|
||||||
return user
|
|
||||||
except sqlalchemy_exc.SQLAlchemyError as err:
|
|
||||||
logger.warning(err)
|
|
||||||
finally:
|
|
||||||
await session.close()
|
|
@ -1,18 +0,0 @@
|
|||||||
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
|
|
||||||
from sqlalchemy.orm import sessionmaker
|
|
||||||
|
|
||||||
from loader import SECRETS
|
|
||||||
|
|
||||||
|
|
||||||
engine = create_async_engine(SECRETS.postgres_url, max_overflow=-1)
|
|
||||||
async_session = sessionmaker(engine, expire_on_commit=False, class_=AsyncSession)
|
|
||||||
|
|
||||||
|
|
||||||
async def get_session() -> AsyncSession:
|
|
||||||
"""Получение сессии БД
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
AsyncSession
|
|
||||||
"""
|
|
||||||
async with async_session() as session:
|
|
||||||
return session
|
|
@ -24,7 +24,6 @@ def get_logger(filename: str = "/volumes/app/main.log", level: str = "INFO") ->
|
|||||||
class Secrets:
|
class Secrets:
|
||||||
bot_token: str = os.getenv("bot_token")
|
bot_token: str = os.getenv("bot_token")
|
||||||
redis_url: str = "redis://redis_telegram_bot:6379"
|
redis_url: str = "redis://redis_telegram_bot:6379"
|
||||||
postgres_url: str = "postgresql+asyncpg://admin:admin@postgres_telegram_bot:5433/postgresdb"
|
|
||||||
|
|
||||||
|
|
||||||
SECRETS = Secrets()
|
SECRETS = Secrets()
|
||||||
|
@ -1,102 +0,0 @@
|
|||||||
from typing import Union, Callable, Any, Awaitable, Optional
|
|
||||||
|
|
||||||
from aiogram.types import Message, CallbackQuery, Update
|
|
||||||
from aiogram import BaseMiddleware
|
|
||||||
from aiogram import exceptions as aiogram_exc
|
|
||||||
|
|
||||||
from core.models import User
|
|
||||||
from loader import custom_logger
|
|
||||||
|
|
||||||
|
|
||||||
class GetUserMiddleware(BaseMiddleware):
|
|
||||||
user: User
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
super(GetUserMiddleware, self).__init__()
|
|
||||||
|
|
||||||
async def __call__(
|
|
||||||
self,
|
|
||||||
handler: Callable[[Message, dict[str, Any]], Awaitable[Any]],
|
|
||||||
event: Union[Message, CallbackQuery],
|
|
||||||
data: dict[str, Any],
|
|
||||||
) -> Any:
|
|
||||||
|
|
||||||
try:
|
|
||||||
await self.on_process_event(event, data)
|
|
||||||
except CancelHandler:
|
|
||||||
return
|
|
||||||
except UsernameNotFound:
|
|
||||||
return
|
|
||||||
except aiogram_exc.TelegramBadRequest as err:
|
|
||||||
custom_logger.warning(err)
|
|
||||||
if isinstance(event, CallbackQuery):
|
|
||||||
return event.answer()
|
|
||||||
except aiogram_exc.TelegramForbiddenError as err:
|
|
||||||
custom_logger.warning(err)
|
|
||||||
|
|
||||||
data["user"] = self.user
|
|
||||||
try:
|
|
||||||
result = await handler(event, data)
|
|
||||||
return result
|
|
||||||
except aiogram_exc.TelegramBadRequest as err:
|
|
||||||
custom_logger.warning(err)
|
|
||||||
if isinstance(event, CallbackQuery):
|
|
||||||
return event.answer()
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_update_users_id(cls, update: Update):
|
|
||||||
if update.callback_query:
|
|
||||||
return update.callback_query.from_user.id
|
|
||||||
elif update.message:
|
|
||||||
return update.message.from_user.id
|
|
||||||
elif update.edited_message:
|
|
||||||
return update.edited_message.from_user.id
|
|
||||||
|
|
||||||
async def set_user(self, target: Union[CallbackQuery, Message]):
|
|
||||||
telegram_id: str = str(target.from_user.id)
|
|
||||||
username: str = target.from_user.username
|
|
||||||
|
|
||||||
if not username:
|
|
||||||
if isinstance(target, CallbackQuery):
|
|
||||||
await target.message.answer(
|
|
||||||
"<b>❗️ Для корректной работы бота требуется установить @username</b>"
|
|
||||||
)
|
|
||||||
elif isinstance(target, Message):
|
|
||||||
await target.answer(
|
|
||||||
"<b>❗️ Для корректной работы бота требуется установить @username</b>"
|
|
||||||
)
|
|
||||||
raise UsernameNotFound()
|
|
||||||
|
|
||||||
self.user: User = await User.get_user_using_telegram_id(
|
|
||||||
telegram_id=telegram_id
|
|
||||||
)
|
|
||||||
if isinstance(self.user, None):
|
|
||||||
if isinstance(target, CallbackQuery):
|
|
||||||
chat_id: str = target.message.chat.id
|
|
||||||
else:
|
|
||||||
chat_id: str = target.chat.id
|
|
||||||
self.user = await User.add_user(
|
|
||||||
telegram_id=telegram_id,
|
|
||||||
username=username,
|
|
||||||
chat_id=chat_id,
|
|
||||||
status=0,
|
|
||||||
)
|
|
||||||
return
|
|
||||||
elif isinstance(self.user, User):
|
|
||||||
if self.user.is_active == False:
|
|
||||||
custom_logger.debug(f"User ban: {self.user.is_active}")
|
|
||||||
raise CancelHandler()
|
|
||||||
|
|
||||||
async def on_process_event(self, message: Message, _):
|
|
||||||
try:
|
|
||||||
await self.set_user(message)
|
|
||||||
except AttributeError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class CancelHandler(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class UsernameNotFound(Exception):
|
|
||||||
pass
|
|
@ -14,22 +14,6 @@ services:
|
|||||||
restart: always
|
restart: always
|
||||||
command: "redis-server --appendonly yes --replica-read-only no"
|
command: "redis-server --appendonly yes --replica-read-only no"
|
||||||
|
|
||||||
postgres_telegram_bot:
|
|
||||||
image: postgres:12-alpine
|
|
||||||
container_name: postgres_telegram_bot
|
|
||||||
ports:
|
|
||||||
- 5432:5432
|
|
||||||
volumes:
|
|
||||||
- ./volumes/postgres/pgdata:/var/lib/postgresql/data/pgdata
|
|
||||||
env_file:
|
|
||||||
- .env
|
|
||||||
restart: always
|
|
||||||
deploy:
|
|
||||||
resources:
|
|
||||||
limits:
|
|
||||||
cpus: '2'
|
|
||||||
memory: '1g'
|
|
||||||
|
|
||||||
telegram_bot:
|
telegram_bot:
|
||||||
image: telegram_bot
|
image: telegram_bot
|
||||||
container_name: telegram_bot
|
container_name: telegram_bot
|
||||||
@ -41,10 +25,8 @@ services:
|
|||||||
working_dir: /app
|
working_dir: /app
|
||||||
depends_on:
|
depends_on:
|
||||||
- redis_telegram_bot
|
- redis_telegram_bot
|
||||||
- postgres_telegram_bot
|
|
||||||
links:
|
links:
|
||||||
- redis_telegram_bot
|
- redis_telegram_bot
|
||||||
- postgres_telegram_bot
|
|
||||||
volumes:
|
volumes:
|
||||||
- ./app:/app
|
- ./app:/app
|
||||||
- ./volumes/app:/volumes/app
|
- ./volumes/app:/volumes/app
|
||||||
|
BIN
requirements.txt
BIN
requirements.txt
Binary file not shown.
Loading…
x
Reference in New Issue
Block a user