how to get all links url from a website python beautifulsoup code example

Example 1: how to get all links from a website python beautifulsoup

from bs4 import BeautifulSoup
import requests

response = requests.get('url')
all_links = response.find_all('a')  # this will return all links+text

Example 2: how to get all links text from a website python beautifulsoup

from bs4 import BeautifulSoup
import requests

response = requests.get('url')
all_links = response.find_all('a')  # this will return all links+text
for link in all_links:
  print(link.get_text())	# this will prints all text
  print(link.get('href'))	# this will print all links

Example 3: get all href links beautifulsoup from a website python

from BeautifulSoup import BeautifulSoupimport urllib2import redef getLinks(url):    html_page = urllib2.urlopen(url)    soup = BeautifulSoup(html_page)    links = []    for link in soup.findAll('a', attrs={'href': re.compile("^http://")}):        links.append(link.get('href'))    return linksprint( getLinks("https://arstechnica.com") )