用python编写股票数据获取程序

用python编写股票数据获取程序,python,pandas-datareader,Python,Pandas Datareader,我试图用下面的代码从雅虎获取股票信息 import pandas_datareader.data as web import datetime start = datetime.datetime(2010, 1, 1) end = datetime.datetime(2015, 12, 31) f = web.DataReader('SNE', 'yahoo', start, end) print(f) 但是有如下错误。。。。 请有人帮帮我 --------------------------

我试图用下面的代码从雅虎获取股票信息

import pandas_datareader.data as web
import datetime
start = datetime.datetime(2010, 1, 1)
end = datetime.datetime(2015, 12, 31)
f = web.DataReader('SNE', 'yahoo', start, end)
print(f)
但是有如下错误。。。。 请有人帮帮我

--------------------------------------------------------------------------- gaierror                                  Traceback (most recent call last) ~\Anaconda3\lib\site-packages\requests\packages\urllib3\connection.py in _new_conn(self)
    140             conn = connection.create_connection(
--> 141                 (self.host, self.port), self.timeout, **extra_kw)
    142 

~\Anaconda3\lib\site-packages\requests\packages\urllib3\util\connection.py in create_connection(address, timeout, source_address, socket_options)
     59 
---> 60     for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM):
     61         af, socktype, proto, canonname, sa = res

~\Anaconda3\lib\socket.py in getaddrinfo(host, port, family, type, proto, flags)
    742     addrlist = []
--> 743     for res in _socket.getaddrinfo(host, port, family, type, proto, flags):
    744         af, socktype, proto, canonname, sa = res

gaierror: [Errno 11001] getaddrinfo failed

During handling of the above exception, another exception occurred:

NewConnectionError                        Traceback (most recent call last) ~\Anaconda3\lib\site-packages\requests\packages\urllib3\connectionpool.py in urlopen(self, method, url, body, headers, retries, redirect, assert_same_host, timeout, pool_timeout, release_conn, chunked, body_pos, **response_kw)
    599                                                   body=body, headers=headers,
--> 600                                                   chunked=chunked)
    601 

~\Anaconda3\lib\site-packages\requests\packages\urllib3\connectionpool.py in _make_request(self, conn, method, url, timeout, chunked,
**httplib_request_kw)
    355         else:
--> 356             conn.request(method, url, **httplib_request_kw)
    357 

~\Anaconda3\lib\http\client.py in request(self, method, url, body, headers, encode_chunked)    1238         """Send a complete request to the server."""
-> 1239         self._send_request(method, url, body, headers, encode_chunked)    1240 

~\Anaconda3\lib\http\client.py in _send_request(self, method, url, body, headers, encode_chunked)    1284             body =
_encode(body, 'body')
-> 1285         self.endheaders(body, encode_chunked=encode_chunked)    1286 

~\Anaconda3\lib\http\client.py in endheaders(self, message_body, encode_chunked)    1233             raise CannotSendHeader()
-> 1234         self._send_output(message_body, encode_chunked=encode_chunked)    1235 

~\Anaconda3\lib\http\client.py in _send_output(self, message_body, encode_chunked)    1025         del self._buffer[:]
-> 1026         self.send(msg)    1027 

~\Anaconda3\lib\http\client.py in send(self, data)
    963             if self.auto_open:
--> 964                 self.connect()
    965             else:

~\Anaconda3\lib\site-packages\requests\packages\urllib3\connection.py in connect(self)
    165     def connect(self):
--> 166         conn = self._new_conn()
    167         self._prepare_conn(conn)

~\Anaconda3\lib\site-packages\requests\packages\urllib3\connection.py in _new_conn(self)
    149             raise NewConnectionError(
--> 150                 self, "Failed to establish a new connection: %s" % e)
    151 

NewConnectionError: <requests.packages.urllib3.connection.HTTPConnection object at 0x0000001B3783DC18>: Failed to establish a new connection: [Errno 11001] getaddrinfo failed

During handling of the above exception, another exception occurred:

MaxRetryError                             Traceback (most recent call last) ~\Anaconda3\lib\site-packages\requests\adapters.py in send(self, request, stream, timeout, verify, cert, proxies)
    437                     retries=self.max_retries,
--> 438                     timeout=timeout
    439                 )

~\Anaconda3\lib\site-packages\requests\packages\urllib3\connectionpool.py in urlopen(self, method, url, body, headers, retries, redirect, assert_same_host, timeout, pool_timeout, release_conn, chunked, body_pos, **response_kw)
    648             retries = retries.increment(method, url, error=e, _pool=self,
--> 649                                         _stacktrace=sys.exc_info()[2])
    650             retries.sleep()

~\Anaconda3\lib\site-packages\requests\packages\urllib3\util\retry.py in increment(self, method, url, response, error, _pool, _stacktrace)
    387         if new_retry.is_exhausted():
--> 388             raise MaxRetryError(_pool, url, error or ResponseError(cause))
    389 

MaxRetryError: HTTPConnectionPool(host='ichart.finance.yahoo.com', port=80): Max retries exceeded with url: /table.csv?s=SNE&a=0&b=1&c=2010&d=11&e=31&f=2015&g=d&ignore=.csv (Caused by NewConnectionError('<requests.packages.urllib3.connection.HTTPConnection object at 0x0000001B3783DC18>: Failed to establish a new connection: [Errno 11001] getaddrinfo failed',))

During handling of the above exception, another exception occurred:

ConnectionError                           Traceback (most recent call last) <ipython-input-1-7d307edbe1a3> in <module>()
      3 start = datetime.datetime(2010, 1, 1)
      4 end = datetime.datetime(2015, 12, 31)
----> 5 f = web.DataReader('SNE', 'yahoo', start, end)
      6 print(f)

~\Anaconda3\lib\site-packages\pandas_datareader\data.py in DataReader(name, data_source, start, end, retry_count, pause, session, access_key)
    115                                 adjust_price=False, chunksize=25,
    116                                 retry_count=retry_count, pause=pause,
--> 117                                 session=session).read()
    118 
    119     elif data_source == "yahoo-actions":

~\Anaconda3\lib\site-packages\pandas_datareader\yahoo\daily.py in read(self)
     75     def read(self):
     76         """ read one data from specified URL """
---> 77         df = super(YahooDailyReader, self).read()
     78         if self.ret_index:
     79             df['Ret_Index'] = _calc_return_index(df['Adj Close'])

~\Anaconda3\lib\site-packages\pandas_datareader\base.py in read(self)
    155         if isinstance(self.symbols, (compat.string_types, int)):
    156             df = self._read_one_data(self.url,
--> 157                                      params=self._get_params(self.symbols))
    158         # Or multiple symbols, (e.g., ['GOOG', 'AAPL', 'MSFT'])
    159         elif isinstance(self.symbols, DataFrame):

~\Anaconda3\lib\site-packages\pandas_datareader\base.py in
_read_one_data(self, url, params)
     72         """ read one data from specified URL """
     73         if self._format == 'string':
---> 74             out = self._read_url_as_StringIO(url, params=params)
     75         elif self._format == 'json':
     76             out = self._get_response(url, params=params).json()

~\Anaconda3\lib\site-packages\pandas_datareader\base.py in
_read_url_as_StringIO(self, url, params)
     83         Open url (and retry)
     84         """
---> 85         response = self._get_response(url, params=params)
     86         text = self._sanitize_response(response)
     87         out = StringIO()

~\Anaconda3\lib\site-packages\pandas_datareader\base.py in
_get_response(self, url, params)
    112         # initial attempt + retry
    113         for i in range(self.retry_count + 1):
--> 114             response = self.session.get(url, params=params)
    115             if response.status_code == requests.codes.ok:
    116                 return response

~\Anaconda3\lib\site-packages\requests\sessions.py in get(self, url,
**kwargs)
    529 
    530         kwargs.setdefault('allow_redirects', True)
--> 531         return self.request('GET', url, **kwargs)
    532 
    533     def options(self, url, **kwargs):

~\Anaconda3\lib\site-packages\requests\sessions.py in request(self, method, url, params, data, headers, cookies, files, auth, timeout, allow_redirects, proxies, hooks, stream, verify, cert, json)
    516         }
    517         send_kwargs.update(settings)
--> 518         resp = self.send(prep, **send_kwargs)
    519 
    520         return resp

~\Anaconda3\lib\site-packages\requests\sessions.py in send(self, request, **kwargs)
    637 
    638         # Send the request
--> 639         r = adapter.send(request, **kwargs)
    640 
    641         # Total elapsed time of the request (approximately)

~\Anaconda3\lib\site-packages\requests\adapters.py in send(self, request, stream, timeout, verify, cert, proxies)
    500                 raise ProxyError(e, request=request)
    501 
--> 502             raise ConnectionError(e, request=request)
    503 
    504         except ClosedPoolError as e:

ConnectionError: HTTPConnectionPool(host='ichart.finance.yahoo.com', port=80): Max retries exceeded with url: /table.csv?s=SNE&a=0&b=1&c=2010&d=11&e=31&f=2015&g=d&ignore=.csv (Caused by NewConnectionError('<requests.packages.urllib3.connection.HTTPConnection object at 0x0000001B3783DC18>: Failed to establish a new connection: [Errno 11001] getaddrinfo failed',))
-------------------------------------------------------------GAIRROR回溯(最近一次调用上次)~\Anaconda3\lib\site packages\requests\packages\urllib3\connection.py在\u new\u conn(self)中
140连接=连接。创建连接(
-->141(自主机、自端口)、自超时、**额外功率)
142
创建连接(地址、超时、源地址、套接字选项)中的~\Anaconda3\lib\site packages\requests\packages\urllib3\util\connection.py
59
--->60表示socket.getaddrinfo(主机、端口、系列、socket.SOCK\u流)中的res:
61 af,socktype,proto,canonname,sa=res
getaddrinfo中的~\Anaconda3\lib\socket.py(主机、端口、系列、类型、协议、标志)
742 addrlist=[]
-->743用于_socket.getaddrinfo(主机、端口、系列、类型、协议、标志)中的res:
744 af,socktype,proto,canonname,sa=res
gaierror:[Errno 11001]getaddrinfo失败
在处理上述异常期间,发生了另一个异常:
NewConnectionError回溯(最近一次调用上次)~\Anaconda3\lib\site packages\requests\packages\urlib3\connectionpool.py在urlopen中(self、method、url、body、headers、retries、redirect、assert\u same\u host、timeout、pool\u timeout、release\u conn、chunked、body\u pos、**response\u kw)
599主体=主体,标题=标题,
-->600分块=分块)
601
~\Anaconda3\lib\site packages\requests\packages\urllib3\connectionpool.py in\u make\u request(self、conn、method、url、timeout、chunked、,
**httplib_请求功率(kw)
355其他:
-->356 conn.request(方法,url,**httplib_request_kw)
357
请求中的~\Anaconda3\lib\http\client.py(self、method、url、body、headers、encode\u chunked)1238“向服务器发送完整的请求”
->1239 self.\u发送请求(方法、url、正文、标题、编码\u分块)1240
~\Anaconda3\lib\http\client.py in\u send\u请求(self、method、url、body、headers、encode\u chunked)1284 body=
_编码(主体,“主体”)
->1285 self.endheaders(正文,encode_chunked=encode_chunked)1286
endheaders中的~\Anaconda3\lib\http\client.py(self、message\u body、encode\u chunked)1233 raise CannotSendHeader()
->1234 self.\u send\u output(消息体,encode\u chunked=encode\u chunked)1235
~\Anaconda3\lib\http\client.py在发送输出(self、消息体、编码块)1025 del self中。缓冲区[:]
->1026自我发送(msg)1027
发送中的~\Anaconda3\lib\http\client.py(self,data)
963如果self.auto_打开:
-->964 self.connect()
965其他:
连接中的~\Anaconda3\lib\site packages\requests\packages\urllib3\connection.py(self)
165 def连接(自):
-->166 conn=自我。_new_conn()
167自我准备连接(连接)
~\Anaconda3\lib\site packages\requests\packages\urllib3\connection.py位于\u new\u conn(self)中
149升起新连接器错误(
-->150 self,“无法建立新连接:%s”%e)
151
NewConnectionError::未能建立新连接:[Errno 11001]getaddrinfo失败
在处理上述异常期间,发生了另一个异常:
MaxRetryError回溯(最近一次调用上次)~\Anaconda3\lib\site packages\requests\adapters.py in send(self、request、stream、timeout、verify、cert、proxies)
437次重试=自最大重试次数,
-->438超时=超时
439                 )
urlopen中的~\Anaconda3\lib\site packages\requests\packages\urllib3\connectionpool.py(self、method、url、body、headers、retries、redirect、assert\u same\u host、timeout、pool\u timeout、release\u conn、chunked、body\u pos、**response\u kw)
648 retries=retries.increment(方法,url,错误=e,_pool=self,
-->649 _stacktrace=sys.exc_info()[2])
650次重试
增量中的~\Anaconda3\lib\site packages\requests\packages\urllib3\util\retry.py(self、方法、url、响应、错误、\u pool、\u stacktrace)
387如果新的\u重试。是否已用尽()
-->388 raise MaxRetryError(_池、url、错误或响应错误(原因))
389
MaxRetryError:HTTPConnectionPool(host='ichart.finance.yahoo.com',port=80):url超过了最大重试次数:/table.csv?s=SNE&a=0&b=1&c=2010&d=11&e=31&f=2015&g=d&ignore=.csv(由NewConnectionError引起(':Failed to建立新连接:[Errno 11001]getaddrinfo Failed',)
在处理上述异常期间,发生了另一个异常:
在()
3 start=datetime.datetime(2010,1,1)
4 end=datetime.datetime(2015,12,31)
---->5f=web.DataReader('SNE','yahoo',start,end)
6印刷品(f)
datareader中的~\Anaconda3\lib\site packages\pandas\u datareader\data.py(名称、数据源、开始、结束、重试计数、暂停、会话、访问密钥)
115调整价格=False,chunksize=25,
116重试计数=重试计数,暂停=暂停,
-->117会话=会话)。读取()
118
119 elif数据来源==“雅虎行动”:
~\Anaconda3\lib\site packages\pandas\u datareader\yahoo\daily.py in read(self)
75 def读取(自):
76“从指定的URL读取一个数据”
--->77 df=super(YahooDailyReader,self).read()
78如果self.ret_索引:
79 df['Ret_Index']=计算回报指数(df['Adj Close'])
~\Anaconda3\lib\site packages\pandas\u datareader\base.py in read(self)
155如果存在(自身符号,兼容。
from yahoo_finance import Share
Share('<Your Share Ticker Name>')
Share('Your Share Ticker Name>').get_historical(<start_date>, <end_date>)