- ©«¤l
 - 109 
 - ¥DÃD
 - 1 
 - ºëµØ
 - 0 
 - ¿n¤À
 - 116 
 - ÂI¦W
 - 0  
 - §@·~¨t²Î
 - win7 
 - ³nÅ骩¥»
 - 2007 
 - ¾\ŪÅv
 - 20 
 - µù¥U®É¶¡
 - 2016-8-4 
 - ³Ì«áµn¿ý
 - 2018-10-22 
 
    
 | 
                
¦^´_ 102# c_c_lai  
´£¨Ñ¥t¤@ºØ¤è¦¡µ¹±z°Ñ¦Ò(¸ÑªRºô¶«á³B²z¸ê®Æ¦A¦s¦¨csv)- import requests
 
 - from bs4 import BeautifulSoup
 
 - import csv
 
  
- headers = {"User-Agent":"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/49.0.2623.112 Safari/537.36"}
 
 -            
 
 - url = 'http://www.twse.com.tw/ch/trading/exchange/MI_MARGN/MI_MARGN.php'
 
  
- myDate = '105/09/08'
 
  
- payload={'download':'',
 
 -         'qdate':myDate,
 
 -         'selectType':'ALL'}
 
  
- res = requests.post(url, headers=headers, data=payload)
 
 - soup = BeautifulSoup(res.text, 'lxml')
 
  
- trs = soup.select('table tr')
 
  
- myList = []
 
 - subList = []
 
  
- header1 = ['ªÑ²¼¥N¸¹','ªÑ²¼¦WºÙ','¿Ä¸ê(³æ¦ì: ¥æ©ö³æ¦ì)','','','','','',
 
 -             '¿Ä¨é(³æ¦ì: ¥æ©ö³æ¦ì)','','','','','','¸ê¨é¤¬©è','µù°O']
 
  
- header2 = ['','','¶R¶i','½æ¥X','²{ª÷ÀvÁÙ','«e¤é¾lÃB','¤µ¤é¾lÃB','ÃB',
 
 -             '¶R¶i','½æ¥X','²{ª÷ÀvÁÙ','«e¤é¾lÃB','¤µ¤é¾lÃB','ÃB','','']
 
  
- for i, tr in enumerate(trs):
 
 -     if i == 6:
 
 -         subList = header1
 
 -     elif i == 7:
 
 -         subList = header2
 
 -     else:
 
 -         for td in tr.find_all('td'):
 
 -             subList.append(td.text)
 
 -     
 
 -     myList.append(subList)
 
 -     subList = []
 
 -     if i == 5:
 
 -         myList.append(subList)
 
  
- with open('output.csv', 'w', new='', encoding='utf-8') as f:
 
 -     f.write('\ufeff')
 
 -     w = csv.writer(f)
 
 -     for sub in myList:
 
 -         w.writerow(sub)
 
  ½Æ»s¥N½X ¦pªG¤£¨D¬üÆ[¡A¥i¥Hª½±µ¥Îpandas¡G- import requests
 
 - import pandas as pd
 
  
- headers = {"User-Agent":"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/49.0.2623.112 Safari/537.36"}
 
 -            
 
 - url = 'http://www.twse.com.tw/ch/trading/exchange/MI_MARGN/MI_MARGN.php'
 
  
- myDate = '105/09/07'
 
  
- payload={'download':'',
 
 -         'qdate':myDate,
 
 -         'selectType':'ALL'}
 
  
- res = requests.post(url, headers=headers, data=payload)
 
  
- dfs = pd.read_html(res.text)
 
  
- #¦bipython¡Adfs[1]´N¥i¥H¨ú¥X¥Dnªº¸ê®Æ
 
 - dfs[1]
 
  ½Æ»s¥N½X |   
 
 
 
 |