fix: remove collect_links function
This commit is contained in:
parent
d56dca8cd8
commit
8a206a34eb
72
bot.py
72
bot.py
@ -16,7 +16,6 @@ from aiogram.types import InlineKeyboardMarkup, InlineKeyboardButton, FSInputFil
|
||||
|
||||
# Импорт только нужного
|
||||
from config import CONFIG
|
||||
from link_collector import collect_links
|
||||
from main import process_batch, save_to_excel, load_urls
|
||||
|
||||
logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO)
|
||||
@ -330,76 +329,6 @@ async def edit_search_callback(query: CallbackQuery):
|
||||
|
||||
# ==================== ОСНОВНЫЕ ДЕЙСТВИЯ ====================
|
||||
|
||||
async def scrape_callback(query: CallbackQuery):
|
||||
await query.answer("🚀 Запуск скрейпинга...")
|
||||
msg = await query.message.edit_text("⏳ <b>Выполняю полный скрейпинг...</b>", parse_mode="HTML")
|
||||
|
||||
last = [time.time()]
|
||||
unique_phones = set()
|
||||
|
||||
async def progress_cb(done: int, total: int):
|
||||
await simple_progress(msg, done, total, last, unique_phones)
|
||||
|
||||
try:
|
||||
links = collect_links()
|
||||
urls = load_urls(links)
|
||||
if not urls:
|
||||
await msg.answer("❌ Нет ссылок для обработки", reply_markup=main_menu_keyboard())
|
||||
return
|
||||
|
||||
raw = await process_batch(urls, progress_callback=progress_cb, unique_phones=unique_phones) # Передаём unique_phones
|
||||
|
||||
seen = set()
|
||||
unique = [r for r in raw if r[1] and r[2] not in seen and not seen.add(r[2])] # 🔥 Фикс
|
||||
results = [(o, p, d, promo, "—") for o, p, d, promo in unique] # 🔥 Фикс распаковки
|
||||
|
||||
# Финальное обновление (unique_phones уже заполнен)
|
||||
await progress_cb(len(urls), len(urls))
|
||||
|
||||
if results:
|
||||
global last_output_file
|
||||
fmt = CONFIG.get("output_format", "excel")
|
||||
|
||||
if fmt == "excel":
|
||||
ts = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
path = CONFIG["output_file"].format(timestamp=ts)
|
||||
save_to_excel(results, path)
|
||||
last_output_file = path
|
||||
|
||||
promo_cnt = sum(1 for r in results if r[3])
|
||||
await query.message.answer(
|
||||
f"✅ <b>Скрейпинг завершён!</b>\n"
|
||||
f"📊 Обработано URL: {len(urls)}\n"
|
||||
f"📞 Найдено телефонов: {len(results)}\n"
|
||||
f"🎯 Из них promo: {promo_cnt}",
|
||||
parse_mode="HTML"
|
||||
)
|
||||
await query.message.answer_document(FSInputFile(path), caption="📁 Результаты")
|
||||
else:
|
||||
# Текстовый вывод
|
||||
text = format_results(results, fmt)
|
||||
if text:
|
||||
if len(text) > 3800:
|
||||
with tempfile.NamedTemporaryFile(mode='w+', encoding='utf-8', suffix='.txt', delete=False) as f:
|
||||
f.write(text)
|
||||
path = f.name
|
||||
caption = {"phones": "📞 Номера", "domains": "🌐 Домены", "both": "📞+🌐 Результаты"}.get(fmt, "Результаты")
|
||||
await query.message.answer_document(FSInputFile(path), caption=f"✅ {caption}")
|
||||
Path(path).unlink()
|
||||
else:
|
||||
labels = {"phones": "📞", "domains": "🌐", "both": "📞+🌐"}
|
||||
await query.message.answer(
|
||||
f"✅ <b>{labels.get(fmt, '')} Результаты:</b>\n\n{text}",
|
||||
parse_mode="HTML"
|
||||
)
|
||||
else:
|
||||
await query.message.answer("⚠️ Нет данных для отображения")
|
||||
else:
|
||||
await query.message.answer("⚠️ Телефоны не найдены")
|
||||
except Exception as e:
|
||||
await query.message.answer(f"❌ Ошибка: {e}")
|
||||
|
||||
await query.message.answer("Что дальше?", reply_markup=main_menu_keyboard())
|
||||
|
||||
async def process_callback(query: CallbackQuery):
|
||||
await query.answer()
|
||||
@ -516,7 +445,6 @@ def main_bot(token: str):
|
||||
|
||||
dp.callback_query.register(menu_callback, F.data == "main_menu")
|
||||
dp.callback_query.register(status_handler, F.data == "status")
|
||||
dp.callback_query.register(scrape_callback, F.data == "scrape")
|
||||
dp.callback_query.register(process_callback, F.data == "process")
|
||||
dp.callback_query.register(upload_links_callback, F.data == "upload_links")
|
||||
dp.callback_query.register(search_menu_callback, F.data == "search_menu")
|
||||
|
||||
Loading…
Reference in New Issue
Block a user