GET Request:
python2.7:
Import urllib,urllib2url= ' http://192.168.199.1:8000/mainsugar/loginGET/' textmod ={' user ': ' admin ', ' Password ': ' Admin '}textmod = Urllib.urlencode (textmod) print (textmod) #输出内容:p assword=admin&user=adminreq = urllib2. Request (url = '%s%s%s '% (URL, '? ', textmod)) res = Urllib2.urlopen (req) res = Res.read () print (res) #输出内容: Login Successful
python3.5:
From urllib import parse,requesttextmod={' user ': ' admin ', ' password ': ' admin '}textmod = Parse.urlencode (textmod) print ( Textmod) #输出内容: user=admin&password=adminheader_dict = {' user-agent ': ' mozilla/5.0 (Windows NT 6.1; trident/7.0; rv:11.0) like Gecko '}url= ' http://192.168.199.1:8000/mainsugar/loginGET/' req = Request. Request (url= '%s%s%s '% (URL, '? ', Textmod), headers=header_dict) res = Request.urlopen (req) res = Res.read () print (res) # The output content (Python3 by default is 16 binary ' bytes ' type data Unicode encoding, if the need for readable output is decode decoded to the corresponding code): B ' \xe7\x99\xbb\xe5\xbd\x95\xe6\x88\x90\ xe5\x8a\x9f ' Print (Res.decode (encoding= ' utf-8 ')) #输出内容: Login Successful
POST request:
python2.7:
Import json,urllib2textmod={"Jsonrpc": "2.0", "Method": "User.login", "params": {"user": "admin", "password": "Zabbix"} , "auth": None, "id": 1}textmod = Json.dumps (textmod) print (textmod) #输出内容: {"params": {"password": "Zabbix", "User": " Admin "}," Jsonrpc ":" 2.0 "," Method ":" User.login "," auth ": null," id ": 1}header_dict = {' user-agent ': ' mozilla/5.0 (Window s NT 6.1; trident/7.0; rv:11.0) Like Gecko ', "Content-type": "Application/json"}url= ' http://192.168.199.10/api_jsonrpc.php ' req = urllib2. Request (url=url,data=textmod,headers=header_dict) res = Urllib2.urlopen (req) res = Res.read () print (res) #输出内容: {" Jsonrpc ":" 2.0 "," Result ":" 2c42e987811c90e0491f45904a67065d "," id ": 1}
python3.5:
fromurllib import parse,requestimportjsontextmod={"jsonrpc": "2.0","method":"user.login","params":{"user":"admin","password":"zabbix"},"auth": None,"id":1}#json串数据使用textmod =json.dumps(textmod).encode(encoding=‘utf-8‘)#普通数据使用textmod =parse.urlencode(textmod).encode(encoding=‘utf-8‘)print(textmod)#输出内容:b‘{"params": {"user": "admin", "password": "zabbix"}, "auth": null, "method": "user.login", "jsonrpc": "2.0", "id": 1}‘header_dict = {‘User-Agent‘: ‘Mozilla/5.0 (Windows NT 6.1; Trident/7.0; rv:11.0) like Gecko‘,"Content-Type": "application/json"}url=‘http://192.168.199.10/api_jsonrpc.php‘req =request.Request(url=url,data=textmod,headers=header_dict)res =request.urlopen(req)res =res.read()print(res)#输出内容:b‘{"jsonrpc":"2.0","result":"37d991fd583e91a0cfae6142d8d59d7e","id":1}‘print(res.decode(encoding=‘utf-8‘))#输出内容:{"jsonrpc":"2.0","result":"37d991fd583e91a0cfae6142d8d59d7e","id":1} |
Use of Cookies (python3.5):
fromurllib import request,parsefrom http import cookiejar#创建cookie处理器cj = http.cookiejar.CookieJar()opener =request.build_opener(request.HTTPCookieProcessor(cj), request.HTTPHandler)request.install_opener(opener)#下面进行正常请求...... |
N methods of crawling Web resources Python3
1, the simplest
Import Urllib.request
Response = Urllib.request.urlopen (' http://python.org/')
html = Response.read ()
2. Use Request
Import Urllib.request
req = urllib.request.Request (' http://python.org/')
Response = Urllib.request.urlopen (req)
The_page = Response.read ()
3. Send data
#! /usr/bin/env Python3
Import Urllib.parse
Import Urllib.request
url = ' http://localhost/login.php '
User_agent = ' mozilla/4.0 (compatible; MSIE 5.5; Windows NT) '
Values = {
' Act ': ' Login ',
' Login[email] ': ' [email protected] ',
' Login[password] ': ' 123456 '
}
data = Urllib.parse.urlencode (values)
req = urllib.request.Request (URL, data)
Req.add_header (' Referer ', ' http://www.python.org/')
Response = Urllib.request.urlopen (req)
The_page = Response.read ()
Print (The_page.decode ("UTF8"))
4. Send data and headers
#! /usr/bin/env Python3
Import Urllib.parse
Import Urllib.request
url = ' http://localhost/login.php '
User_agent = ' mozilla/4.0 (compatible; MSIE 5.5; Windows NT) '
Values = {
' Act ': ' Login ',
' Login[email] ': ' [email protected] ',
' Login[password] ': ' 123456 '
}
headers = {' User-agent ': user_agent}
data = Urllib.parse.urlencode (values)
req = urllib.request.Request (URL, data, headers)
Response = Urllib.request.urlopen (req)
The_page = Response.read ()
Print (The_page.decode ("UTF8"))
5. HTTP Error
#! /usr/bin/env Python3
Import Urllib.request
req = urllib.request.Request (' http://www.111cn.net ')
Try
Urllib.request.urlopen (req)
Except Urllib.error.HTTPError as E:
Print (E.code)
Print (E.read (). Decode ("UTF8"))
6. Exception Handling 1
#! /usr/bin/env Python3
From urllib.request import request, Urlopen
From Urllib.error import Urlerror, Httperror
req = Request ("http://www.111cn.net/")
Try
Response = Urlopen (req)
Except Httperror as E:
Print (' The server couldn ' t fulfill the request. ')
Print (' Error code: ', E.code)
Except Urlerror as E:
Print (' We failed to reach a server. ')
Print (' Reason: ', E.reason)
Else
Print ("good!")
Print (Response.read (). Decode ("UTF8"))
7. Exception Handling 2
#! /usr/bin/env Python3
From urllib.request import request, Urlopen
From Urllib.error import Urlerror
req = Request ("http://www.111cn.net/")
Try
Response = Urlopen (req)
Except Urlerror as E:
If Hasattr (E, ' reason '):
Print (' We failed to reach a server. ')
Print (' Reason: ', E.reason)
Elif hasattr (E, ' Code '):
Print (' The server couldn ' t fulfill the request. ')
Print (' Error code: ', E.code)
Else
Print ("good!")
Print (Response.read (). Decode ("UTF8"))
8. HTTP Authentication
#! /usr/bin/env Python3
Import Urllib.request
# Create a password manager
Password_mgr = Urllib.request.HTTPPasswordMgrWithDefaultRealm ()
# ADD the username and password.
# If We knew the realm, we could use it instead of None.
Top_level_url = "Https://www.111cn.net/"
Password_mgr.add_password (None, Top_level_url, ' Rekfan ', ' xxxxxx ')
Handler = Urllib.request.HTTPBasicAuthHandler (password_mgr)
# create "opener" (Openerdirector instance)
Opener = Urllib.request.build_opener (handler)
# Use the opener to fetch a URL
A_url = "Https://www.111cn.net/"
x = Opener.open (A_url)
Print (X.read ())
# Install the opener.
# now all calls to Urllib.request.urlopen with our opener.
Urllib.request.install_opener (opener)
A = Urllib.request.urlopen (A_url). Read (). Decode (' UTF8 ')
Print (a)
9, the use of agents
#! /usr/bin/env Python3
Import Urllib.request
Proxy_support = Urllib.request.ProxyHandler ({' Sock5 ': ' localhost:1080 '})
Opener = Urllib.request.build_opener (Proxy_support)
Urllib.request.install_opener (opener)
A = Urllib.request.urlopen ("Http://www.111cn.net"). Read (). Decode ("UTF8")
Print (a)
10. Timeout
#! /usr/bin/env Python3
Import socket
Import Urllib.request
# Timeout in seconds
Timeout = 2
Socket.setdefaulttimeout (Timeout)
# Urllib.request.urlopen now uses the default timeout
# We have a set in the socket module
req = Urllib.request.Request (' http://www.111cn.net/')
A = Urllib.request.urlopen (req). Read ()
Print (a)
Python sends a Get POST request