Python crawls readers and creates them as PDF files

Source: Internet
Author: User
This article is a python tool that crawls Reader magazines and uses reportlab to generate pdf files after learning beautifulsoup. I will also share it with you, for more information, see. After learning beautifulsoup, I made a web crawler, crawled Reader magazines, and produced them as pdf using reportlab ..

Crawler. py

The code is as follows:


#! /Usr/bin/env python
# Coding = UTF-8
"""
Author: Anemone
Filename: getmain. py
Last modified:
E-mail: anemone@82flex.com
"""
Import urllib2
From bs4 import BeautifulSoup
Import re
Import sys
Reload (sys)
Sys. setdefaultencoding ('utf-8 ')
Def getEachArticle (url ):
# Response = urllib2.urlopen ('http: // www.52duzhe.com/2015_01/duzh20150104.html ')
Response = urllib2.urlopen (url)
Html = response. read ()
Soup = BeautifulSoup (html) #. decode ("UTF-8"). encode ("gbk "))
# For I in soup. find_all ('P '):
# Print I, 1
Title = soup. find ("h1"). string
Writer = soup. find (id = "pub_date"). string. strip ()
_ From = soup. find (id = "media_name"). string. strip ()
Text = soup. get_text () #. encode ("UTF-8 ")
Main = re. split ("BAIDU_CLB. *;", text)
Result = {"title": title, "writer": writer, "from": _ from, "context": main [1]}
Return result
# New = open ("new.txt", "w ")
# New. write (result ["title"] + "\ n ")
# New. write (result ["writer"] + "" + result ["from"])
# New. write (result ["context"])
# New. close ()
Def getCatalog (issue ):
Url = "http://www.52duzhe.com/" + issue [: 4] + "_" + issue [-2:] + "/"
FirstUrl = url + "duzh" + issue + "01.html"
FirstUrl = url + "index.html"
Duzhe = dict ()
Response = urllib2.urlopen (firstUrl)
Html = response. read ()
Soup = BeautifulSoup (html)
FirstUrl = url + soup. table. a. get ("href ")
Response = urllib2.urlopen (firstUrl)
Html = response. read ()
Soup = BeautifulSoup (html)
All = soup. find_all ("h2 ")
For I in all:
Print I. string
Duzhe [I. string] = list ()
For link in I. parent. find_all (""):
Href = url + link. get ("href ")
Print href
While 1:
Try:
Article = getEachArticle (href)
Break
Except t:
Continue
Duzhe [I. string]. append (article)
Return duzhe
Def readDuZhe (duzhe ):
For eachColumn in duzhe:
For eachArticle in duzhe [eachColumn]:
Print eachArticle ["title"]
If _ name _ = '_ main __':
# Issue = raw_input ("issue (201501 ):")
ReadDuZhe (getCatalog ("201424 "))

Getpdf. py

The code is as follows:


#! /Usr/bin/env python
# Coding = UTF-8
"""
Author: Anemone
Filename: writetopdf. py
Last modified:
E-mail: anemone@82flex.com
"""
# Coding = UTF-8
Import reportlab. rl_config
From reportlabw.base import into metrics
From reportlabw.base. ttfonts import TTFont
From reportlab. lib import fonts
Import copy
From reportlab. platypus import Paragraph, SimpleDocTemplate, flowables
From reportlab. lib. styles import getSampleStyleSheet
Import crawler
Def writePDF (issue, duzhe ):
Reportlab. rl_config.warnOnMissingFontGlyphs = 0
Metrics metrics. registerFont (TTFont ('song', "simsun. ttc "))
Metrics metrics. registerFont (TTFont ('hei', "msyh. ttc "))
Fonts. addMapping ('hongkong', 0, 0, 'hongkong ')
Fonts. addMapping ('hongkong', 0, 1, hongkong ')
Fonts. addMapping ('songg', 1, 0, 'hei ')
Fonts. addMapping ('songg', 1, 1, 'hei ')
Stylesheet = getSampleStyleSheet ()
NormalStyle = copy. deepcopy (stylesheet ['normal'])
NormalStyle. fontName = 'song'
NormalStyle. fontSize = 11
NormalStyle. leading = 11
NormalStyle. firstLineIndent = 20
TitleStyle = copy. deepcopy (stylesheet ['normal'])
TitleStyle. fontName = 'song'
TitleStyle. fontSize = 15
TitleStyle. leading = 20
FirstTitleStyle = copy. deepcopy (stylesheet ['normal'])
FirstTitleStyle. fontName = 'song'
FirstTitleStyle. fontSize = 20
FirstTitleStyle. leading = 20
FirstTitleStyle. firstLineIndent = 50
SmallStyle = copy. deepcopy (stylesheet ['normal'])
SmallStyle. fontName = 'song'
SmallStyle. fontSize = 8
SmallStyle. leading = 8
Story = []
Story. append (Paragraph ("Reader {0} period". Format (issue), firstTitleStyle ))
For eachColumn in duzhe:
Story. append (Paragraph ('_' * 28, titleStyle ))
Story. append (Paragraph ('{0}'. Format (eachColumn), titleStyle ))
For eachArticle in duzhe [eachColumn]:
Story. append (Paragraph (eachArticle ["title"], normalStyle ))
Story. append (flowables. PageBreak ())
For eachColumn in duzhe:
For eachArticle in duzhe [eachColumn]:
Story. append (Paragraph ("{0}". Format (eachArticle [" title "]), titleStyle ))
Story. append (Paragraph ("{0} {1}". format (eachArticle ["writer"], eachArticle ["from"]), smallStyle ))
Para = eachArticle ["context"]. split ("")
For eachPara in para:
Story. append (Paragraph (eachPara, normalStyle ))
Story. append (flowables. PageBreak ())
# Story. append (Paragraph ("context", normalStyle ))
Doc = SimpleDocTemplate ("duzhe" + issue + ". pdf ")
Print "Writing PDF ..."
Doc. build (story)
Def main (issue ):
Duzhe = crawler. getCatalog (issue)
WritePDF (issue, duzhe)
If _ name _ = '_ main __':
Issue = raw_input ("Enter issue (201501 ):")
Main (issue)

The above is all the content of this article. I hope you will like it.

Contact Us

The content source of this page is from Internet, which doesn't represent Alibaba Cloud's opinion; products and services mentioned on that page don't have any relationship with Alibaba Cloud. If the content of the page makes you feel confusing, please write us an email, we will handle the problem within 5 days after receiving your email.

If you find any instances of plagiarism from the community, please send an email to: info-contact@alibabacloud.com and provide relevant evidence. A staff member will contact you within 5 working days.

A Free Trial That Lets You Build Big!

Start building with 50+ products and up to 12 months usage for Elastic Compute Service

  • Sales Support

    1 on 1 presale consultation

  • After-Sales Support

    24/7 Technical Support 6 Free Tickets per Quarter Faster Response

  • Alibaba Cloud offers highly flexible support services tailored to meet your exact needs.