BeautifulSoup Basic Crawler Script

This python script prompts a website and crawl for all links (internal and external) inside that page. Then it builds a .CSV file with all URLs.

#!/usr/bin/python

from bs4 import BeautifulSoup

import requests
import csv

url = raw_input("Enter a website to extract the URL's from: ")

r  = requests.get("http://" +url)

data = r.text

soup = BeautifulSoup(data)

with open('eggs.csv', 'wb') as csvfile:
    spamwriter = csv.writer(csvfile, quotechar='|', quoting=csv.QUOTE_MINIMAL)
    for link in soup.find_all('a'):
        print(link.get('href'))
        spamwriter.writerow([link.get('href')])