• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    公众号

Python urllib.getproxies函数代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了Python中urllib.getproxies函数的典型用法代码示例。如果您正苦于以下问题:Python getproxies函数的具体用法?Python getproxies怎么用?Python getproxies使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。



在下文中一共展示了getproxies函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。

示例1: getAutoProxy

 def getAutoProxy(self):
     """Fetch the proxy from the the system environment variables
     """
     if urllib.getproxies().has_key('http'):
         return urllib.getproxies()['http']
     else:
         return ""
开发者ID:bjanus,项目名称:psychopy,代码行数:7,代码来源:preferences.py


示例2: __get_proxies

 def __get_proxies(self):
     try:
         mm_http_proxy = config.connection.get_plugin_client_setting('mm_http_proxy', None)
         mm_http_proxy = mm_http_proxy.replace("\\","")
         mm_https_proxy = config.connection.get_plugin_client_setting('mm_https_proxy', None)
         mm_https_proxy = mm_https_proxy.replace("\\","")
         if mm_https_proxy == None and mm_http_proxy != None:
             mm_https_proxy = mm_http_proxy
         if mm_http_proxy != None and mm_https_proxy != None:
             return {
                 "http": mm_http_proxy,
                 "https": mm_https_proxy
             }
         elif mm_http_proxy != None:
             return {
                 "http": mm_http_proxy
             }
         elif mm_https_proxy != None:
             return {
                 "https": mm_https_proxy
             }
         else:
             return urllib.getproxies()
     except:
         return urllib.getproxies()             
开发者ID:azam,项目名称:mm,代码行数:25,代码来源:sfdc_client.py


示例3: start_schedulers

def start_schedulers(options):
    try:
        from multiprocessing import Process
    except:
        sys.stderr.write('Sorry, -K only supported for python 2.6-2.7\n')
        return
    processes = []
    apps = [(app.strip(), None) for app in options.scheduler.split(',')]
    if options.scheduler_groups:
        apps = options.scheduler_groups
    code = "from gluon.globals import current;current._scheduler.loop()"
    logging.getLogger().setLevel(options.debuglevel)
    if options.folder:
        os.chdir(options.folder)
    if len(apps) == 1 and not options.with_scheduler:
        app_, code = get_code_for_scheduler(apps[0], options)
        if not app_:
            return
        print('starting single-scheduler for "%s"...' % app_)
        run(app_, True, True, None, False, code)
        return

    # Work around OS X problem: http://bugs.python.org/issue9405
    if PY2:
        import urllib
    else:
        import urllib.request as urllib
    urllib.getproxies()

    for app in apps:
        app_, code = get_code_for_scheduler(app, options)
        if not app_:
            continue
        print('starting scheduler for "%s"...' % app_)
        args = (app_, True, True, None, False, code)
        p = Process(target=run, args=args)
        processes.append(p)
        print("Currently running %s scheduler processes" % (len(processes)))
        p.start()
        ##to avoid bashing the db at the same time
        time.sleep(0.7)
        print("Processes started")
    for p in processes:
        try:
            p.join()
        except (KeyboardInterrupt, SystemExit):
            print("Processes stopped")
        except:
            p.terminate()
            p.join()
开发者ID:zcomx,项目名称:zcomix.com,代码行数:50,代码来源:widget.py


示例4: __init__

    def __init__(self):
        self.proxies = {}
        for type, url in getproxies().items():
            self.proxies[type] = self._get_proxy(url, type)

        if not self.proxies:
            raise NotConfigured
开发者ID:pyarnold,项目名称:scrapy,代码行数:7,代码来源:httpproxy.py


示例5: __init__

    def __init__(self, radiodns_services):
        self._radiodns_services = radiodns_services
        self._dns = DnsResolver()

        self._proxy_settings = None
        self._use_http_proxy = False

        self._radiovis_client = None

        self._http_client = HttpClientThread(self)
        self._http_client.start()

        self._listeners = []

        # Get system proxy server settings (from http_proxy environment variable
        # or web browser settings).
        proxies = urllib.getproxies()
        if "http" in proxies:
            http_proxy = urlparse.urlparse(proxies['http'])
            self._proxy_settings = ProxySettings(proxy_type = socks.PROXY_TYPE_HTTP,
                                                 host = http_proxy.hostname,
                                                 port = http_proxy.port)

            self.log("HTTP proxy: " + http_proxy.hostname +
                     ", port " + str(http_proxy.port))
        else:
            self._proxy_settings = None
开发者ID:BantouTV,项目名称:RadioVisDemo,代码行数:27,代码来源:connection_manager.py


示例6: get_soap_client

def get_soap_client(wsdlurl):  # pragma: no cover (no tests for this function)
    """Get a SOAP client for performing requests."""
    # this function isn't automatically tested because the functions using
    # it are not automatically tested
    try:
        from urllib import getproxies
    except ImportError:
        from urllib.request import getproxies
    # try suds first
    try:
        from suds.client import Client
        return Client(wsdlurl, proxy=getproxies()).service
    except ImportError:
        # fall back to using pysimplesoap
        from pysimplesoap.client import SoapClient
        return SoapClient(wsdl=wsdlurl, proxy=getproxies())
开发者ID:vbastos,项目名称:python-stdnum,代码行数:16,代码来源:util.py


示例7: GetDefaultProxyInfo

def GetDefaultProxyInfo(method='http'):
  """Get ProxyInfo from environment.

  This function is meant to mimic httplib2.proxy_info_from_environment, but get
  the proxy information from urllib.getproxies instead. urllib can also get
  proxy information from Windows Internet Explorer settings or MacOSX framework
  SystemConfiguration.

  Args:
    method: protocol string
  Returns:
    httplib2 ProxyInfo object or None
  """

  proxy_dict = urllib.getproxies()
  proxy_url = proxy_dict.get(method, None)
  if not proxy_url:
    return None

  pi = httplib2.proxy_info_from_url(proxy_url, method)

  # The ProxyInfo object has a bypass_host method that takes the hostname as an
  # argument and it returns 1 or 0 based on if the hostname should bypass the
  # proxy or not. We could either build the bypassed hosts list and pass it to
  # pi.bypass_hosts, or we can just replace the method with the function in
  # urllib, and completely mimic urllib logic. We do the latter.
  # Since the urllib.proxy_bypass _function_ (no self arg) is not "bound" to the
  # class instance, it doesn't receive the self arg when its called. We don't
  # need to "bind" it via types.MethodType(urllib.proxy_bypass, pi).
  pi.bypass_host = urllib.proxy_bypass

  return pi
开发者ID:hbrucejohnson,项目名称:tt,代码行数:32,代码来源:http_proxy.py


示例8: http_proxy

    def http_proxy(self):
        """
        Retrieves the operating system http proxy.

        First, the method scans the environment for variables named http_proxy, in case insensitive way.
        If both lowercase and uppercase environment variables exist (and disagree), lowercase is preferred.

        When the method cannot find such environment variables:
        - for Mac OS X, it will look for proxy information from Mac OS X System Configuration,
        - for Windows, it will look for proxy information from Windows Systems Registry.

        .. note:: There is a restriction when looking for proxy information from
                  Mac OS X System Configuration or Windows Systems Registry:
                  in these cases, the Toolkit does not support the use of proxies
                  which require authentication (username and password).
        """
        # Get the dictionary of scheme to proxy server URL mappings; for example:
        #     {"http": "http://foo:[email protected]:80", "https": "http://74.50.63.111:443"}
        # "getproxies" scans the environment for variables named <scheme>_proxy, in case insensitive way.
        # When it cannot find it, for Mac OS X it looks for proxy information from Mac OSX System Configuration,
        # and for Windows it looks for proxy information from Windows Systems Registry.
        # If both lowercase and uppercase environment variables exist (and disagree), lowercase is preferred.
        # Note the following restriction: "getproxies" does not support the use of proxies which
        # require authentication (user and password) when looking for proxy information from
        # Mac OSX System Configuration or Windows Systems Registry.
        system_proxies = urllib.getproxies()

        # Get the http proxy when it exists in the dictionary.
        proxy = system_proxies.get("http")

        if proxy:
            # Remove any spurious "http://" from the http proxy string.
            proxy = proxy.replace("http://", "", 1)

        return proxy
开发者ID:adriankrupa,项目名称:tk-core,代码行数:35,代码来源:system_settings.py


示例9: get_environ_proxies

def get_environ_proxies(netloc):
    """Return a dict of environment proxies."""

    get_proxy = lambda k: os.environ.get(k) or os.environ.get(k.upper())

    # First check whether no_proxy is defined. If it is, check that the URL
    # we're getting isn't in the no_proxy list.
    no_proxy = get_proxy('no_proxy')

    if no_proxy:
        # We need to check whether we match here. We need to see if we match
        # the end of the netloc, both with and without the port.
        no_proxy = no_proxy.replace(' ', '').split(',')

        for host in no_proxy:
            if netloc.endswith(host) or netloc.split(':')[0].endswith(host):
                # The URL does match something in no_proxy, so we don't want
                # to apply the proxies on this URL.
                return {}

    # If the system proxy settings indicate that this URL should be bypassed,
    # don't proxy.
    if proxy_bypass(netloc):
        return {}

    # If we get here, we either didn't have no_proxy set or we're not going
    # anywhere that no_proxy applies to, and the system settings don't require
    # bypassing the proxy for the current URL.
    return getproxies()
开发者ID:chitrangjain,项目名称:python-swiftclient,代码行数:29,代码来源:utils.py


示例10: urlopen

def urlopen(url, proxies=None, data=None):
    """
    Return connected request object for given url.
    All errors raise exceptions.
    """
    global _opener
    if proxies is None:
        proxies = urllib.getproxies()
    headers = {
       'User-Agent': UA_STR,
       'Accept-Encoding' : 'gzip;q=1.0, deflate;q=0.9, identity;q=0.5',
    }
    request = urllib2.Request(url, data, headers)
    proxy_support = urllib2.ProxyHandler(proxies)
    if _opener is None:
        # XXX heh, not really protected :)
        pwd_manager = PasswordManager("WebCleaner", "imadoofus")
        handlers = [proxy_support,
            urllib2.UnknownHandler,
            HttpWithGzipHandler,
            urllib2.HTTPBasicAuthHandler(pwd_manager),
            urllib2.ProxyBasicAuthHandler(pwd_manager),
            urllib2.HTTPDigestAuthHandler(pwd_manager),
            urllib2.ProxyDigestAuthHandler(pwd_manager),
            urllib2.HTTPDefaultErrorHandler,
            urllib2.HTTPRedirectHandler,
        ]
        if hasattr(httplib, 'HTTPS'):
            handlers.append(HttpsWithGzipHandler)
        _opener = urllib2.build_opener(*handlers)
        # print _opener.handlers
        urllib2.install_opener(_opener)
    return _opener.open(request)
开发者ID:HomeRad,项目名称:TorCleaner,代码行数:33,代码来源:update.py


示例11: __init__

 def __init__(self, proxies=None):
     if proxies is None:
         proxies = getproxies()
     assert hasattr(proxies, "has_key"), "proxies must be a mapping"
     self.proxies = proxies
     for type, url in proxies.items():
         setattr(self, "%s_open" % type, lambda r, proxy=url, type=type, meth=self.proxy_open: meth(r, proxy, type))
开发者ID:BackupTheBerlios,项目名称:pyasynchio-svn,代码行数:7,代码来源:urllib2.py


示例12: using_http_proxy

def using_http_proxy(url):
    """
    Return True if the url will use HTTP proxy.
    Returns False otherwise.
    """
    up = urlparse(url)
    return up.scheme.lower() in getproxies() and not proxy_bypass(up.netloc)
开发者ID:Hopebaytech,项目名称:swift-bench,代码行数:7,代码来源:utils.py


示例13: connect

    def connect(self):
        #- Parse proxies
        pd = urllib.getproxies().get('http', None)
        if pd is None:
            sockstype = ''
        else:
            sockstype, user, password, hostport = urllib2._parse_proxy(pd)

        if 'socks' not in sockstype:
            return httplib.HTTPConnection.connect(self)

        assert ':' in hostport # if we don't have a port we're screwed
        host, port = hostport.rsplit(':', 1)
        port = int(port)


        for res in socket.getaddrinfo(self.host, self.port, 0, socket.SOCK_STREAM):

            af, socktype, proto, canonname, sa = res
            try:
                self.sock = self._sockettype(af, socktype, proto)
                self.sock.setproxy(proxytype=getattr(socks, 'PROXY_TYPE_%s' % sockstype.upper()), addr=host, port=port, rdns=False, username=user, password=password)
                #- The rest is the same as superclass

                if self.debuglevel > 0:
                    print "connect: (%s, %s)" % (self.host, self.port)
                self.sock.connect(sa)
            except socket.error, msg:
                if self.debuglevel > 0:
                    print 'connect fail:', (self.host, self.port)
                if self.sock:
                    self.sock.close()
                self.sock = None
                continue
            break
开发者ID:AlexUlrich,项目名称:digsby,代码行数:35,代码来源:net.py


示例14: do_upload_and_exit

def do_upload_and_exit(path, url, proxy):

    f = open(path, 'rb')

    # mmap the file to reduce the amount of memory required (see bit.ly/2aNENXC)
    filedata = mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ)

    # Get proxies from environment/system
    proxy_handler = urllib2.ProxyHandler(urllib.getproxies())
    if proxy != "":
        # unless a proxy is explicitly passed, then use that instead
        proxy_handler = urllib2.ProxyHandler({'https': proxy, 'http': proxy})

    opener = urllib2.build_opener(proxy_handler)
    request = urllib2.Request(url.encode('utf-8'), data=filedata)
    request.add_header(str('Content-Type'), str('application/zip'))
    request.get_method = lambda: str('PUT')

    exit_code = 0
    try:
        url = opener.open(request)
        if url.getcode() == 200:
            log('Done uploading')
        else:
            raise Exception('Error uploading, expected status code 200, got status code: {0}'.format(url.getcode()))
    except Exception as e:
        log(traceback.format_exc())
        exit_code = 1

    filedata.close()
    f.close()

    sys.exit(exit_code)
开发者ID:couchbase,项目名称:sync_gateway,代码行数:33,代码来源:tasks.py


示例15: _http_get

def _http_get(uri, silent=False):
    if PYTHON3:
        opener = urllib2.build_opener(urllib2.ProxyHandler(urllib.request.getproxies()))
    else:
        opener = urllib2.build_opener(urllib2.ProxyHandler(urllib.getproxies()))
    for repo in repos:
        if 'storage.jcloud.com' in repo:
            _uri = uri
            for p in ('/', 'dev', 'master', 'update', 'plugins'):
                _uri = _uri.lstrip(p).lstrip('/')
            url = repo + '/' + _uri
        else:
            url = repo + '/raw/' + uri
        try:
            resp = opener.open(urllib2.Request(url, headers=headers), timeout = 15)
            body = resp.read()
            try:
                f = StringIO(body)
                gz = gzip.GzipFile(fileobj = f)
                body = gz.read()
            except:
                pass
        except urllib2.HTTPError as e:
            if not silent:
                print('HTTP Error %s when fetching %s' % (e.code, url))
        except urllib2.URLError as e:
            pass
        else:
            return body
开发者ID:ChizuruAmamiya,项目名称:MAClient,代码行数:29,代码来源:plugin_update.py


示例16: urlopen

def urlopen(url, proxies=None, data=None):
    global _opener

    if not proxies:
        proxies = urllib.getproxies()

    headers = {'User-Agent': UA_STR,
               'Accept-Encoding' : 'gzip;q=1.0, deflate;q=0.9, identity;q=0.5'}
    
    req = urllib2.Request(url, data, headers)

    proxy_support = urllib2.ProxyHandler(proxies)
    if _opener is None:
        pwd_manager = handlepasswd()
        handlers = [proxy_support,
            urllib2.UnknownHandler, HttpWithGzipHandler,
            urllib2.HTTPBasicAuthHandler(pwd_manager),
            urllib2.ProxyBasicAuthHandler(pwd_manager),
            urllib2.HTTPDigestAuthHandler(pwd_manager),
            urllib2.ProxyDigestAuthHandler(pwd_manager),
            urllib2.HTTPDefaultErrorHandler, urllib2.HTTPRedirectHandler,
        ]
        if hasattr(httplib, 'HTTPS'):
            handlers.append(HttpsWithGzipHandler)
        _opener = urllib2.build_opener(*handlers)
        # print _opener.handlers
        urllib2.install_opener(_opener)
    
    return _opener.open(req)
开发者ID:ideamonk,项目名称:apt-offline,代码行数:29,代码来源:AptOffline_urlutils.py


示例17: request_connection

    def request_connection(self):

        """If we are not behind a proxy, create the connection once and reuse
        it for all requests. If we are behind a proxy, we need to revert to
        HTTP 1.0 and use a separate connection for each request.

        """

        # If we haven't done so, determine whether we're behind a proxy.
        if self.behind_proxy is None:
            import urllib
            proxies = urllib.getproxies()
            if "http" in proxies:
                self.behind_proxy = True
                self.proxy = proxies["http"]
            else:
                self.behind_proxy = False
        # Create a new connection or reuse an existing one.
        if self.behind_proxy:
            httplib.HTTPConnection._http_vsn = 10
            httplib.HTTPConnection._http_vsn_str = "HTTP/1.0"
            if self.proxy is not None:
                self.con = httplib.HTTPConnection(self.proxy, self.port)
            else:  # Testsuite has set self.behind_proxy to True to simulate
                # being behind a proxy.
                self.con = httplib.HTTPConnection(self.server, self.port)
        else:
            httplib.HTTPConnection._http_vsn = 11
            httplib.HTTPConnection._http_vsn_str = "HTTP/1.1"
            if not self.con:
                self.con = httplib.HTTPConnection(self.server, self.port)
开发者ID:tbabej,项目名称:mnemosyne,代码行数:31,代码来源:client.py


示例18: fetch_proxies

    def fetch_proxies(self):
        prefs = self.prefs
        try:
            mode = prefs['proxy_mode']
        except Exception:
            TRACE("Couldn't load proxy info from preferences")
            unhandled_exc_handler()
            return None

        if mode == NO_PROXY:
            return None
        if mode == AUTO_PROXY:
            info = urllib.getproxies()
            if info and 'http' in info:
                try:
                    parsed = urlparse.urlparse(info['http'])
                    _type, hostname, port = socks.PROXY_TYPE_HTTP, parsed.hostname, parsed.port
                except Exception:
                    unhandled_exc_handler()
                    return None

            elif info and 'socks' in info:
                _type = socks.PROXY_TYPE_SOCKS4
                try:
                    split = info['socks'].split('//')[1].split(':', 1)
                    if len(split) == 1:
                        hostname = split[0]
                        port = 1080
                    else:
                        hostname, port = split
                        port = int(port)
                except (ValueError, IndexError):
                    return None

            else:
                return None
            return ProxyInfo(_type, hostname, port)
        if prefs['proxy_type'] == SOCKS4:
            args = [socks.PROXY_TYPE_SOCKS4]
        elif prefs['proxy_type'] == SOCKS5:
            args = [socks.PROXY_TYPE_SOCKS5]
        else:
            args = [socks.PROXY_TYPE_HTTP]
        try:

            def tsiu(a):
                if type(a) is unicode:
                    return a.encode('utf-8')
                return a

            args += [tsiu(prefs['proxy_server']), int(prefs['proxy_port'])]
            opt_args = {}
            if prefs['proxy_requires_auth']:
                opt_args['proxy_user'] = tsiu(prefs['proxy_username'])
                opt_args['proxy_pass'] = tsiu(prefs['proxy_password'])
            return ProxyInfo(*args, **opt_args)
        except Exception:
            unhandled_exc_handler()
            return None
开发者ID:bizonix,项目名称:DropBoxLibrarySRC,代码行数:59,代码来源:proxy_info.py


示例19: __init__

 def __init__(self):
     self._log = logging.getLogger(self.__class__.__name__)
     # Parent class will disable the middleware when no proxy
     # is configured in the mining node (raises NotConfigured exception)
     # Just copied the constructor code and removed the exception
     self.proxies = {}
     for key, url in getproxies().items():
         self.proxies[key] = self._get_proxy(url, key)
开发者ID:deepak64,项目名称:scraper-test,代码行数:8,代码来源:middlewares.py


示例20: start_schedulers

def start_schedulers(options):
    try:
        from multiprocessing import Process
    except:
        sys.stderr.write('Sorry, -K only supported for Python 2.6+\n')
        return
    logging.getLogger().setLevel(options.log_level)

    apps = [ag.split(':') for ag in options.schedulers]
    if not options.with_scheduler and len(apps) == 1:
        app, code = get_code_for_scheduler(apps[0], options)
        if not app:
            return
        print('starting single-scheduler for "%s"...' % app)
        run(app, True, True, None, False, code, False, True)
        return

    # Work around OS X problem: http://bugs.python.org/issue9405
    if PY2:
        import urllib
    else:
        import urllib.request as urllib
    urllib.getproxies()

    processes = []
    for app_groups in apps:
        app, code = get_code_for_scheduler(app_groups, options)
        if not app:
            continue
        print('starting scheduler for "%s"...' % app)
        args = (app, True, True, None, False, code, False, True)
        p = Process(target=run, args=args)
        processes.append(p)
        print("Currently running %s scheduler processes" % (len(processes)))
        p.start()
        ##to avoid bashing the db at the same time
        time.sleep(0.7)
        print("Processes started")
    for p in processes:
        try:
            p.join()
        except (KeyboardInterrupt, SystemExit):
            print("Processes stopped")
        except:
            p.terminate()
            p.join()
开发者ID:web2py,项目名称:web2py,代码行数:46,代码来源:widget.py



注:本文中的urllib.getproxies函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
Python urllib.parse函数代码示例发布时间:2022-05-27
下一篇:
Python urllib.ftpwrapper函数代码示例发布时间:2022-05-27
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap