Deal with renamed modules in python3
six.moves handles some of the reorganized modules. With dircache, it was simply removed as it has been deprecated for a long time. os.listdir performs fine these days.
This commit is contained in:
		@@ -1,12 +1,12 @@
 | 
			
		||||
import calendar
 | 
			
		||||
import cookielib
 | 
			
		||||
from six.moves import http_cookiejar
 | 
			
		||||
import copy
 | 
			
		||||
import re
 | 
			
		||||
import urllib
 | 
			
		||||
import urlparse
 | 
			
		||||
from six.moves.urllib.parse import urlparse
 | 
			
		||||
import requests
 | 
			
		||||
import time
 | 
			
		||||
from Cookie import SimpleCookie
 | 
			
		||||
from six.moves.http_cookies import SimpleCookie
 | 
			
		||||
from saml2.time_util import utc_now
 | 
			
		||||
from saml2 import class_name, SAMLError
 | 
			
		||||
from saml2.pack import http_form_post_message
 | 
			
		||||
@@ -98,7 +98,7 @@ class HTTPBase(object):
 | 
			
		||||
                 cert_file=None):
 | 
			
		||||
        self.request_args = {"allow_redirects": False}
 | 
			
		||||
        #self.cookies = {}
 | 
			
		||||
        self.cookiejar = cookielib.CookieJar()
 | 
			
		||||
        self.cookiejar = http_cookiejar.CookieJar()
 | 
			
		||||
 | 
			
		||||
        self.request_args["verify"] = verify
 | 
			
		||||
        if verify:
 | 
			
		||||
@@ -118,7 +118,7 @@ class HTTPBase(object):
 | 
			
		||||
        :param url:
 | 
			
		||||
        :return:
 | 
			
		||||
        """
 | 
			
		||||
        part = urlparse.urlparse(url)
 | 
			
		||||
        part = urlparse(url)
 | 
			
		||||
 | 
			
		||||
        #if part.port:
 | 
			
		||||
        #    _domain = "%s:%s" % (part.hostname, part.port)
 | 
			
		||||
@@ -143,12 +143,12 @@ class HTTPBase(object):
 | 
			
		||||
        return cookie_dict
 | 
			
		||||
 | 
			
		||||
    def set_cookie(self, kaka, request):
 | 
			
		||||
        """Returns a cookielib.Cookie based on a set-cookie header line"""
 | 
			
		||||
        """Returns a http_cookiejar.Cookie based on a set-cookie header line"""
 | 
			
		||||
 | 
			
		||||
        if not kaka:
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
        part = urlparse.urlparse(request.url)
 | 
			
		||||
        part = urlparse(request.url)
 | 
			
		||||
        _domain = part.hostname
 | 
			
		||||
        logger.debug("%s: '%s'" % (_domain, kaka))
 | 
			
		||||
 | 
			
		||||
@@ -205,7 +205,7 @@ class HTTPBase(object):
 | 
			
		||||
                except ValueError:
 | 
			
		||||
                    pass
 | 
			
		||||
            else:
 | 
			
		||||
                new_cookie = cookielib.Cookie(**std_attr)
 | 
			
		||||
                new_cookie = http_cookiejar.Cookie(**std_attr)
 | 
			
		||||
                self.cookiejar.set_cookie(new_cookie)
 | 
			
		||||
 | 
			
		||||
    def send(self, url, method="GET", **kwargs):
 | 
			
		||||
 
 | 
			
		||||
@@ -1,5 +1,4 @@
 | 
			
		||||
from __future__ import print_function
 | 
			
		||||
from dircache import listdir
 | 
			
		||||
import logging
 | 
			
		||||
import os
 | 
			
		||||
import sys
 | 
			
		||||
@@ -765,7 +764,7 @@ class MetadataStore(object):
 | 
			
		||||
            key = args[0]
 | 
			
		||||
            # if library read every file in the library
 | 
			
		||||
            if os.path.isdir(key):
 | 
			
		||||
                files = [f for f in listdir(key) if isfile(join(key, f))]
 | 
			
		||||
                files = [f for f in os.listdir(key) if isfile(join(key, f))]
 | 
			
		||||
                for fil in files:
 | 
			
		||||
                    _fil = join(key, fil)
 | 
			
		||||
                    _md = MetaDataFile(self.onts, self.attrc, _fil)
 | 
			
		||||
@@ -838,7 +837,7 @@ class MetadataStore(object):
 | 
			
		||||
                for key in item['metadata']:
 | 
			
		||||
                    # Separately handle MetaDataFile and directory
 | 
			
		||||
                    if MDloader == MetaDataFile and os.path.isdir(key[0]):
 | 
			
		||||
                        files = [f for f in listdir(key[0]) if isfile(join(key[0], f))]
 | 
			
		||||
                        files = [f for f in os.listdir(key[0]) if isfile(join(key[0], f))]
 | 
			
		||||
                        for fil in files:
 | 
			
		||||
                            _fil = join(key[0], fil)
 | 
			
		||||
                            _md = MetaDataFile(self.onts, self.attrc, _fil)
 | 
			
		||||
 
 | 
			
		||||
@@ -10,7 +10,7 @@ Bindings normally consists of three parts:
 | 
			
		||||
- how to package the information
 | 
			
		||||
- which protocol to use
 | 
			
		||||
"""
 | 
			
		||||
import urlparse
 | 
			
		||||
from six.moves.urllib.parse import urlparse
 | 
			
		||||
import saml2
 | 
			
		||||
import base64
 | 
			
		||||
import urllib
 | 
			
		||||
@@ -129,7 +129,7 @@ def http_redirect_message(message, location, relay_state="", typ="SAMLRequest",
 | 
			
		||||
    else:
 | 
			
		||||
        string = urllib.urlencode(args)
 | 
			
		||||
 | 
			
		||||
    glue_char = "&" if urlparse.urlparse(location).query else "?"
 | 
			
		||||
    glue_char = "&" if urlparse(location).query else "?"
 | 
			
		||||
    login_url = glue_char.join([location, string])
 | 
			
		||||
    headers = [('Location', str(login_url))]
 | 
			
		||||
    body = []
 | 
			
		||||
 
 | 
			
		||||
@@ -1,5 +1,5 @@
 | 
			
		||||
import calendar
 | 
			
		||||
import urlparse
 | 
			
		||||
from six.moves.urllib.parse import urlparse
 | 
			
		||||
import re
 | 
			
		||||
import time_util
 | 
			
		||||
import struct
 | 
			
		||||
@@ -46,7 +46,7 @@ def valid_id(oid):
 | 
			
		||||
def valid_any_uri(item):
 | 
			
		||||
    """very simplistic, ..."""
 | 
			
		||||
    try:
 | 
			
		||||
        part = urlparse.urlparse(item)
 | 
			
		||||
        part = urlparse(item)
 | 
			
		||||
    except Exception:
 | 
			
		||||
        raise NotValid("AnyURI")
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
		Reference in New Issue
	
	Block a user