Answer the question
In order to leave comments, you need to log in
How to remove duplicates from the result in parsing?
def bf_parse(base_url, headers):
session = requests.session()
request = session.get(base_url, headers=headers)
if request.status_code == 200:
soup = bs(request.content, 'html.parser')
links = [link.get('href') for link in soup.find_all('a', title='Просмотр профиля')]
f = open("user_baza.txt", "a")
f.write('\n'.join(links))
f.close()
print('\n'.join(links))
else:
print("Ссылки на пользователей на этой страницы кончились, замените парс-ссылку")
bf_parse(base_url, headers)
Answer the question
In order to leave comments, you need to log in
Didn't find what you were looking for?
Ask your questionAsk a Question
731 491 924 answers to any question