Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- https://<user>:<pass>@xecdapi.xe.com/v1/convert_from.json/?from=1000000&to=SGD&amount=AED,AUD,BDT&inverse=True
- yield scrapy.Request("https://<user>:<pass>@xecdapi.xe.com/v1/convert_from.json/?from=1000000&to=SGD&amount=AED,AUD,BDT&inverse=True")
- It returns this error:
- Traceback (most recent call last):
- File "d:kerjahitpython~1<project_name><project_name>libsite-packagestwistedinternetdefer.py", line 1297, in _inlineCallbacks
- result = result.throwExceptionIntoGenerator(g)
- File "d:kerjahitpython~1<project_name><project_name>libsite-packagestwistedpythonfailure.py", line 389, in throwExceptionIntoGenerator
- return g.throw(self.type, self.value, self.tb)
- File "d:kerjahitpython~1<project_name><project_name>libsite-packagesscrapycoredownloadermiddleware.py", line 43, in process_request
- defer.returnValue((yield download_func(request=request,spider=spider)))
- File "d:kerjahitpython~1<project_name><project_name>libsite-packagesscrapyutilsdefer.py", line 45, in mustbe_deferred
- result = f(*args, **kw)
- File "d:kerjahitpython~1<project_name><project_name>libsite-packagesscrapycoredownloaderhandlers__init__.py", line 65, in download_request
- return handler.download_request(request, spider)
- File "d:kerjahitpython~1<project_name><project_name>libsite-packagesscrapycoredownloaderhandlershttp11.py", line 61, in download_request
- return agent.download_request(request)
- File "d:kerjahitpython~1<project_name><project_name>libsite-packagesscrapycoredownloaderhandlershttp11.py", line 286, in download_request
- method, to_bytes(url, encoding='ascii'), headers, bodyproducer)
- File "d:kerjahitpython~1<project_name><project_name>libsite-packagestwistedwebclient.py", line 1596, in request
- endpoint = self._getEndpoint(parsedURI)
- File "d:kerjahitpython~1<project_name><project_name>libsite-packagestwistedwebclient.py", line 1580, in _getEndpoint
- return self._endpointFactory.endpointForURI(uri)
- File "d:kerjahitpython~1<project_name><project_name>libsite-packagestwistedwebclient.py", line 1456, in endpointForURI
- uri.port)
- File "d:kerjahitpython~1<project_name><project_name>libsite-packagesscrapycoredownloadercontextfactory.py", line 59, in creatorForNetloc
- return ScrapyClientTLSOptions(hostname.decode("ascii"), self.getContext())
- File "d:kerjahitpython~1<project_name><project_name>libsite-packagestwistedinternet_sslverify.py", line 1201, in __init__
- self._hostnameBytes = _idnaBytes(hostname)
- File "d:kerjahitpython~1<project_name><project_name>libsite-packagestwistedinternet_sslverify.py", line 87, in _idnaBytes
- return idna.encode(text)
- File "d:kerjahitpython~1<project_name><project_name>libsite-packagesidnacore.py", line 355, in encode
- result.append(alabel(label))
- File "d:kerjahitpython~1<project_name><project_name>libsite-packagesidnacore.py", line 276, in alabel
- check_label(label)
- File "d:kerjahitpython~1<project_name><project_name>libsite-packagesidnacore.py", line 253, in check_label
- raise InvalidCodepoint('Codepoint {0} at position {1} of {2} not allowed'.format(_unot(cp_value), pos+1, repr(label)))
- InvalidCodepoint: Codepoint U+003A at position 28 of u'xxxxxxxxxxxxxxxxxxxxxxxxxxxx:xxxxxxxxxxxxxxxxxxxxxxxxxxx@xecdapi' not allowed
- DOWNLOADER_MIDDLEWARES = {
- 'scrapy.downloadermiddlewares.httpauth.HttpAuthMiddleware': 811,
- }
- from scrapy.spiders import CrawlSpider
- class SomeIntranetSiteSpider(CrawlSpider):
- http_user = 'someuser'
- http_pass = 'somepass'
- name = 'intranet.example.com'
- # .. rest of the spider code omitted ...
Add Comment
Please, Sign In to add comment