|
| 1 | +from selenium import webdriver |
| 2 | +from selenium.webdriver.support.ui import WebDriverWait |
| 3 | +from selenium.webdriver.common.by import By |
| 4 | +from selenium.webdriver.common.keys import Keys |
| 5 | +from bs4 import BeautifulSoup |
| 6 | +import time |
| 7 | +from selenium.webdriver.support import expected_conditions as EC |
| 8 | + |
| 9 | +# Initialize the Chrome WebDriver |
| 10 | +driver = webdriver.Chrome() |
| 11 | + |
| 12 | +# Open the Telegram Web URL |
| 13 | +driver.get("https://web.telegram.org/k/") |
| 14 | +print("Scan QR Code, And then Enter") |
| 15 | +time.sleep(10) |
| 16 | +print("Logged In") |
| 17 | + |
| 18 | +# XPath for the search input field |
| 19 | +search_input_xpath = "//input[@class='input-field-input i18n input-search-input']" |
| 20 | + |
| 21 | +# Wait for the search input field to be clickable |
| 22 | +wait = WebDriverWait(driver, 10) |
| 23 | +search_input = wait.until(EC.element_to_be_clickable((By.XPATH, search_input_xpath))) |
| 24 | + |
| 25 | +group_name = input("Enter the group name") |
| 26 | +# Perform a search by sending keys and pressing RETURN |
| 27 | +search_input.send_keys(group_name) |
| 28 | +time.sleep(2) |
| 29 | +search_input.send_keys(Keys.RETURN) |
| 30 | + |
| 31 | +# Find and click on a chat from the search results |
| 32 | +chat_xpath = "//a[@class='row no-wrap row-with-padding row-clickable hover-effect rp chatlist-chat chatlist-chat-abitbigger']" |
| 33 | +chat_element = driver.find_element(By.XPATH, chat_xpath) |
| 34 | +time.sleep(2) |
| 35 | +chat_element.click() |
| 36 | + |
| 37 | +# Find and click on a group in the chat |
| 38 | +person_profile_xpath = "//div[@class='person']" |
| 39 | +person_profile_element = driver.find_element(By.XPATH, person_profile_xpath) |
| 40 | +time.sleep(2) |
| 41 | +person_profile_element.click() |
| 42 | + |
| 43 | +# Get the page source and parse with BeautifulSoup |
| 44 | +html = driver.page_source |
| 45 | +soup = BeautifulSoup(html, "html.parser") |
| 46 | + |
| 47 | +# Find the container for member information |
| 48 | +members_container = soup.find("div", {"class": "search-super-content-members"}) |
| 49 | + |
| 50 | +# Iterate through each member and extract information |
| 51 | +for member_item in members_container.find("ul"): |
| 52 | + member_name = member_item.find("span", {"class": "peer-title"}) |
| 53 | + if member_name: |
| 54 | + print("Member Name:", member_name.text) |
| 55 | + |
| 56 | + member_img = member_item.find("img") |
| 57 | + if member_img: |
| 58 | + print("Member Image:", member_img['src']) |
| 59 | + |
| 60 | + |
0 commit comments