Files
tg_bot_language/bot/handlers/import_text.py
mamonov.ep 99deaafcbf feat: JLPT levels for Japanese, custom practice scenarios, UI improvements
- Add separate level systems: CEFR (A1-C2) for European languages, JLPT (N5-N1) for Japanese
- Store levels per language in new `levels_by_language` JSON field
- Add custom scenario option in AI practice mode
- Show action buttons after practice ends (new dialogue, tasks, words)
- Fix level display across all handlers to use correct level system
- Add Alembic migration for levels_by_language field
- Update all locale files (ru, en, ja) with new keys

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-12-05 14:30:24 +03:00

274 lines
10 KiB
Python
Raw Blame History

This file contains ambiguous Unicode characters
This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
from aiogram import Router, F
from aiogram.filters import Command
from aiogram.types import Message, InlineKeyboardMarkup, InlineKeyboardButton, CallbackQuery
from aiogram.fsm.context import FSMContext
from aiogram.fsm.state import State, StatesGroup
from database.db import async_session_maker
from database.models import WordSource
from services.user_service import UserService
from services.vocabulary_service import VocabularyService
from services.ai_service import ai_service
from utils.i18n import t, get_user_lang
from utils.levels import get_user_level_for_language
router = Router()
class ImportStates(StatesGroup):
"""Состояния для импорта слов из текста"""
waiting_for_text = State()
viewing_words = State()
@router.message(Command("import"))
async def cmd_import(message: Message, state: FSMContext):
"""Обработчик команды /import"""
async with async_session_maker() as session:
user = await UserService.get_user_by_telegram_id(session, message.from_user.id)
if not user:
await message.answer(t('ru', 'common.start_first'))
return
await state.set_state(ImportStates.waiting_for_text)
lang = user.language_interface or 'ru'
await message.answer(
t(lang, 'import.title') + "\n\n" +
t(lang, 'import.desc') + "\n\n" +
t(lang, 'import.can_send') + "\n\n" +
t(lang, 'import.cancel_hint')
)
@router.message(Command("cancel"), ImportStates.waiting_for_text)
async def cancel_import(message: Message, state: FSMContext):
"""Отмена импорта"""
await state.clear()
async with async_session_maker() as session:
user = await UserService.get_user_by_telegram_id(session, message.from_user.id)
lang = get_user_lang(user)
await message.answer(t(lang, 'import_extra.cancelled'))
@router.message(ImportStates.waiting_for_text)
async def process_text(message: Message, state: FSMContext):
"""Обработка текста от пользователя"""
text = message.text.strip()
async with async_session_maker() as session:
user = await UserService.get_user_by_telegram_id(session, message.from_user.id)
lang = get_user_lang(user)
if len(text) < 50:
await message.answer(t(lang, 'import.too_short'))
return
if len(text) > 3000:
await message.answer(t(lang, 'import.too_long'))
return
async with async_session_maker() as session:
user = await UserService.get_user_by_telegram_id(session, message.from_user.id)
# Показываем индикатор обработки
processing_msg = await message.answer(t(user.language_interface or 'ru', 'import.processing'))
# Извлекаем слова через AI
current_level = get_user_level_for_language(user)
words = await ai_service.extract_words_from_text(
text=text,
level=current_level,
max_words=15,
learning_lang=user.learning_language,
translation_lang=user.language_interface,
)
await processing_msg.delete()
if not words:
await message.answer(t(user.language_interface or 'ru', 'import.failed'))
await state.clear()
return
# Сохраняем данные в состоянии
await state.update_data(
words=words,
user_id=user.id,
original_text=text,
level=current_level
)
await state.set_state(ImportStates.viewing_words)
# Показываем извлечённые слова
await show_extracted_words(message, words)
async def show_extracted_words(message: Message, words: list):
"""Показать извлечённые слова с кнопками для добавления"""
async with async_session_maker() as session:
user = await UserService.get_user_by_telegram_id(session, message.from_user.id)
lang = (user.language_interface if user else 'ru') or 'ru'
text = t(lang, 'import.found_header', n=len(words)) + "\n\n"
for idx, word_data in enumerate(words, 1):
text += (
f"{idx}. <b>{word_data['word']}</b> "
f"[{word_data.get('transcription', '')}]\n"
f" {word_data['translation']}\n"
)
if word_data.get('context'):
# Укорачиваем контекст, если он слишком длинный
context = word_data['context']
if len(context) > 80:
context = context[:77] + "..."
text += f" <i>«{context}»</i>\n"
text += "\n"
text += t(lang, 'words.choose')
# Создаем кнопки для каждого слова (по 2 в ряд)
keyboard = []
for idx, word_data in enumerate(words):
button = InlineKeyboardButton(
text=f" {word_data['word']}",
callback_data=f"import_word_{idx}"
)
# Добавляем по 2 кнопки в ряд
if len(keyboard) == 0 or len(keyboard[-1]) == 2:
keyboard.append([button])
else:
keyboard[-1].append(button)
# Кнопка "Добавить все"
keyboard.append([
InlineKeyboardButton(text=t(lang, 'words.add_all_btn'), callback_data="import_all_words")
])
# Кнопка "Закрыть"
keyboard.append([
InlineKeyboardButton(text=t(lang, 'words.close_btn'), callback_data="close_import")
])
reply_markup = InlineKeyboardMarkup(inline_keyboard=keyboard)
await message.answer(text, reply_markup=reply_markup)
@router.callback_query(F.data.startswith("import_word_"), ImportStates.viewing_words)
async def import_single_word(callback: CallbackQuery, state: FSMContext):
"""Добавить одно слово из импорта"""
# Отвечаем сразу, операция может занять время
await callback.answer()
word_index = int(callback.data.split("_")[2])
data = await state.get_data()
words = data.get('words', [])
user_id = data.get('user_id')
if word_index >= len(words):
await callback.answer("❌ Ошибка: слово не найдено")
return
word_data = words[word_index]
async with async_session_maker() as session:
user = await UserService.get_user_by_telegram_id(session, callback.from_user.id)
# Проверяем, нет ли уже такого слова
existing = await VocabularyService.get_word_by_original(
session, user_id, word_data['word']
)
if existing:
await callback.answer(f"Слово '{word_data['word']}' уже в словаре", show_alert=True)
return
# Добавляем слово
learn = user.learning_language if user else 'en'
ui = user.language_interface if user else 'ru'
ctx = word_data.get('context')
examples = ([{learn: ctx, ui: ''}] if ctx else [])
await VocabularyService.add_word(
session=session,
user_id=user_id,
word_original=word_data['word'],
word_translation=word_data['translation'],
source_lang=user.learning_language if user else None,
translation_lang=user.language_interface if user else None,
transcription=word_data.get('transcription'),
examples=examples,
source=WordSource.CONTEXT,
category='imported',
difficulty_level=data.get('level')
)
lang = (user.language_interface if user else 'ru') or 'ru'
await callback.message.answer(t(lang, 'import.added_single', word=word_data['word']))
@router.callback_query(F.data == "import_all_words", ImportStates.viewing_words)
async def import_all_words(callback: CallbackQuery, state: FSMContext):
"""Добавить все слова из импорта"""
# Сразу отвечаем, так как операция может занять заметное время
await callback.answer()
data = await state.get_data()
words = data.get('words', [])
user_id = data.get('user_id')
added_count = 0
skipped_count = 0
async with async_session_maker() as session:
user = await UserService.get_user_by_telegram_id(session, callback.from_user.id)
for word_data in words:
# Проверяем, нет ли уже такого слова
existing = await VocabularyService.get_word_by_original(
session, user_id, word_data['word']
)
if existing:
skipped_count += 1
continue
# Добавляем слово
learn = user.learning_language if user else 'en'
ui = user.language_interface if user else 'ru'
ctx = word_data.get('context')
examples = ([{learn: ctx, ui: ''}] if ctx else [])
await VocabularyService.add_word(
session=session,
user_id=user_id,
word_original=word_data['word'],
word_translation=word_data['translation'],
source_lang=user.learning_language if user else None,
translation_lang=user.language_interface if user else None,
transcription=word_data.get('transcription'),
examples=examples,
source=WordSource.CONTEXT,
category='imported',
difficulty_level=data.get('level')
)
added_count += 1
lang = (user.language_interface if user else 'ru') or 'ru'
result_text = t(lang, 'import.added_count', n=added_count)
if skipped_count > 0:
result_text += "\n" + t(lang, 'import.skipped_count', n=skipped_count)
await callback.message.edit_reply_markup(reply_markup=None)
await callback.message.answer(result_text)
await state.clear()
@router.callback_query(F.data == "close_import", ImportStates.viewing_words)
async def close_import(callback: CallbackQuery, state: FSMContext):
"""Закрыть импорт"""
await callback.message.delete()
await state.clear()
await callback.answer()