I am trying to fetch data from an url using python 3.5 using the following code
import requests
url ='http://eutils.ncbi.nlm.nih.gov/entrez/eutils/einfo.fcgi'
r = requests.get(url)
r.content
The url can be opened with no problems in the browser.
However, I am getting an error (for this URL and any other URL I try) as follows:
--------------------------------------------------------------------------
TypeError Traceback (most recent call last)
C:\Anaconda3\lib\site-packages\requests\packages\urllib3\connectionpool.py in _make_request(self, conn, method, url, timeout, **httplib_request_kw)
375 try: # Python 2.7, use buffering of HTTP responses
--> 376 httplib_response = conn.getresponse(buffering=True)
377 except TypeError: # Python 2.6 and older
TypeError: getresponse() got an unexpected keyword argument 'buffering'
During handling of the above exception, another exception occurred:
RemoteDisconnected Traceback (most recent call last)
C:\Anaconda3\lib\site-packages\requests\packages\urllib3\connectionpool.py
in urlopen(self, method, url, body, headers, retries, redirect, assert_same_host, timeout, pool_timeout, release_conn, **response_kw)
558 timeout=timeout_obj,
--> 559 body=body, headers=headers)
560
C:\Anaconda3\lib\site-packages\requests\packages\urllib3\connectionpool.py
in _make_request(self, conn, method, url, timeout,
**httplib_request_kw)
377 except TypeError: # Python 2.6 and older
--> 378 httplib_response = conn.getresponse()
379 except (SocketTimeout, BaseSSLError, SocketError) as e:
C:\Anaconda3\lib\http\client.py in getresponse(self) 1173
try:
-> 1174 response.begin() 1175 except ConnectionError:
C:\Anaconda3\lib\http\client.py in begin(self)
281 while True:
--> 282 version, status, reason = self._read_status()
283 if status != CONTINUE:
>
> C:\Anaconda3\lib\http\client.py in _read_status(self)
> 250 # sending a valid response.
> --> 251 raise RemoteDisconnected("Remote end closed connection without"
> 252 " response")
>
> RemoteDisconnected: Remote end closed connection without response
>
> During handling of the above exception, another exception occurred:
>
> ProtocolError Traceback (most recent call
> last) C:\Anaconda3\lib\site-packages\requests\adapters.py in
> send(self, request, stream, timeout, verify, cert, proxies)
> 369 retries=self.max_retries,
> --> 370 timeout=timeout
> 371 )
>
> C:\Anaconda3\lib\site-packages\requests\packages\urllib3\connectionpool.py
> in urlopen(self, method, url, body, headers, retries, redirect,
> assert_same_host, timeout, pool_timeout, release_conn, **response_kw)
> 608 retries = retries.increment(method, url, error=e, _pool=self,
> --> 609 _stacktrace=sys.exc_info()[2])
> 610 retries.sleep()
>
> C:\Anaconda3\lib\site-packages\requests\packages\urllib3\util\retry.py
> in increment(self, method, url, response, error, _pool, _stacktrace)
> 244 if read is False:
> --> 245 raise six.reraise(type(error), error, _stacktrace)
> 246 elif read is not None:
>
> C:\Anaconda3\lib\site-packages\requests\packages\urllib3\packages\six.py
> in reraise(tp, value, tb)
> 308 if value.__traceback__ is not tb:
> --> 309 raise value.with_traceback(tb)
> 310 raise value
>
> C:\Anaconda3\lib\site-packages\requests\packages\urllib3\connectionpool.py
> in urlopen(self, method, url, body, headers, retries, redirect,
> assert_same_host, timeout, pool_timeout, release_conn, **response_kw)
> 558 timeout=timeout_obj,
> --> 559 body=body, headers=headers)
> 560
>
> C:\Anaconda3\lib\site-packages\requests\packages\urllib3\connectionpool.py
> in _make_request(self, conn, method, url, timeout,
> **httplib_request_kw)
> 377 except TypeError: # Python 2.6 and older
> --> 378 httplib_response = conn.getresponse()
> 379 except (SocketTimeout, BaseSSLError, SocketError) as e:
>
> C:\Anaconda3\lib\http\client.py in getresponse(self) 1173
> try:
> -> 1174 response.begin() 1175 except ConnectionError:
>
> C:\Anaconda3\lib\http\client.py in begin(self)
> 281 while True:
> --> 282 version, status, reason = self._read_status()
> 283 if status != CONTINUE:
>
> C:\Anaconda3\lib\http\client.py in _read_status(self)
> 250 # sending a valid response.
> --> 251 raise RemoteDisconnected("Remote end closed connection without"
> 252 " response")
>
> ProtocolError: ('Connection aborted.', RemoteDisconnected('Remote end
> closed connection without response',))
>
> During handling of the above exception, another exception occurred:
>
> ConnectionError Traceback (most recent call
> last) <ipython-input-16-598f53da7af3> in <module>()
> 3 import requests
> 4 url ='http://eutils.ncbi.nlm.nih.gov/entrez/eutils/einfo.fcgi'
> ----> 5 r = requests.get(url)
> 6 r.content
>
> C:\Anaconda3\lib\site-packages\requests\api.py in get(url, params,
> **kwargs)
> 67
> 68 kwargs.setdefault('allow_redirects', True)
> ---> 69 return request('get', url, params=params, **kwargs)
> 70
> 71
>
> C:\Anaconda3\lib\site-packages\requests\api.py in request(method, url,
> **kwargs)
> 48
> 49 session = sessions.Session()
> ---> 50 response = session.request(method=method, url=url, **kwargs)
> 51 # By explicitly closing the session, we avoid leaving sockets open which
> 52 # can trigger a ResourceWarning in some cases, and look like a memory leak
>
> C:\Anaconda3\lib\site-packages\requests\sessions.py in request(self,
> method, url, params, data, headers, cookies, files, auth, timeout,
> allow_redirects, proxies, hooks, stream, verify, cert, json)
> 466 }
> 467 send_kwargs.update(settings)
> --> 468 resp = self.send(prep, **send_kwargs)
> 469
> 470 return resp
>
> C:\Anaconda3\lib\site-packages\requests\sessions.py in send(self,
> request, **kwargs)
> 574
> 575 # Send the request
> --> 576 r = adapter.send(request, **kwargs)
> 577
> 578 # Total elapsed time of the request (approximately)
>
> C:\Anaconda3\lib\site-packages\requests\adapters.py in send(self,
> request, stream, timeout, verify, cert, proxies)
> 410
> 411 except (ProtocolError, socket.error) as err:
> --> 412 raise ConnectionError(err, request=request)
> 413
> 414 except MaxRetryError as e:
>
> ConnectionError: ('Connection aborted.', RemoteDisconnected('Remote
> end closed connection without response',))
This worked for me:
import requests
r = requests.get(url, allow_redirects=True,headers={
"User-Agent" : "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.103 Safari/537.36"
})
try use request session.
import requests
MAX_RETRIES = 20
url ='http://eutils.ncbi.nlm.nih.gov/entrez/eutils/einfo.fcgi'
session = requests.Session()
adapter = requests.adapters.HTTPAdapter(max_retries=MAX_RETRIES)
session.mount('https://', adapter)
session.mount('http://', adapter)
r = session.get(url)
print(r.content)
My symptoms were very similar:
The Splunk HEC library that was giving off errors like:
requests.exceptions.ConnectionError: ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response'))
and when going thru flask it looked like
{
"message": "('Connection aborted.', RemoteDisconnected('Remote end closed connection without response'))"
}
This answer worked for me, while the addition of timeouts and retries with the HTTP Adapter did not help.
https://community.splunk.com/t5/Getting-Data-In/HEC-Error-Connection-closed-by-peer/m-p/473363/highlight/true#M81317
https://stackoverflow.com/a/15511852/999943
Adding a close header to the request:
The Connection: close header is added to the actual request:
r = requests.post(url=url, data=body, headers={'Connection':'close'})
For me, the problem was HTTP mocking library. I was using HTTPretty to mock requests
on my tests.
HTTPretty library is pretty young and while it clearly states that it will raise UnmockedError
if any unmocked URL is requested during testing, it didn't raise it in requests
library. So, if you are using any HTTP mocking library, especially the ones that mock the socket directly, you might get this error.
The error fades away out of scope in HTTPretty:
httpretty.enable()
# error might occur here
httpretty.disable()
# out of scope
This answer is written for Googlers and might not intersect with the interests of the OP.
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With