Deal with renamed modules in python3
six.moves handles some of the reorganized modules. With dircache, it was simply removed as it has been deprecated for a long time. os.listdir performs fine these days.
This commit is contained in:
@@ -1,12 +1,12 @@
|
|||||||
import calendar
|
import calendar
|
||||||
import cookielib
|
from six.moves import http_cookiejar
|
||||||
import copy
|
import copy
|
||||||
import re
|
import re
|
||||||
import urllib
|
import urllib
|
||||||
import urlparse
|
from six.moves.urllib.parse import urlparse
|
||||||
import requests
|
import requests
|
||||||
import time
|
import time
|
||||||
from Cookie import SimpleCookie
|
from six.moves.http_cookies import SimpleCookie
|
||||||
from saml2.time_util import utc_now
|
from saml2.time_util import utc_now
|
||||||
from saml2 import class_name, SAMLError
|
from saml2 import class_name, SAMLError
|
||||||
from saml2.pack import http_form_post_message
|
from saml2.pack import http_form_post_message
|
||||||
@@ -98,7 +98,7 @@ class HTTPBase(object):
|
|||||||
cert_file=None):
|
cert_file=None):
|
||||||
self.request_args = {"allow_redirects": False}
|
self.request_args = {"allow_redirects": False}
|
||||||
#self.cookies = {}
|
#self.cookies = {}
|
||||||
self.cookiejar = cookielib.CookieJar()
|
self.cookiejar = http_cookiejar.CookieJar()
|
||||||
|
|
||||||
self.request_args["verify"] = verify
|
self.request_args["verify"] = verify
|
||||||
if verify:
|
if verify:
|
||||||
@@ -118,7 +118,7 @@ class HTTPBase(object):
|
|||||||
:param url:
|
:param url:
|
||||||
:return:
|
:return:
|
||||||
"""
|
"""
|
||||||
part = urlparse.urlparse(url)
|
part = urlparse(url)
|
||||||
|
|
||||||
#if part.port:
|
#if part.port:
|
||||||
# _domain = "%s:%s" % (part.hostname, part.port)
|
# _domain = "%s:%s" % (part.hostname, part.port)
|
||||||
@@ -143,12 +143,12 @@ class HTTPBase(object):
|
|||||||
return cookie_dict
|
return cookie_dict
|
||||||
|
|
||||||
def set_cookie(self, kaka, request):
|
def set_cookie(self, kaka, request):
|
||||||
"""Returns a cookielib.Cookie based on a set-cookie header line"""
|
"""Returns a http_cookiejar.Cookie based on a set-cookie header line"""
|
||||||
|
|
||||||
if not kaka:
|
if not kaka:
|
||||||
return
|
return
|
||||||
|
|
||||||
part = urlparse.urlparse(request.url)
|
part = urlparse(request.url)
|
||||||
_domain = part.hostname
|
_domain = part.hostname
|
||||||
logger.debug("%s: '%s'" % (_domain, kaka))
|
logger.debug("%s: '%s'" % (_domain, kaka))
|
||||||
|
|
||||||
@@ -205,7 +205,7 @@ class HTTPBase(object):
|
|||||||
except ValueError:
|
except ValueError:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
new_cookie = cookielib.Cookie(**std_attr)
|
new_cookie = http_cookiejar.Cookie(**std_attr)
|
||||||
self.cookiejar.set_cookie(new_cookie)
|
self.cookiejar.set_cookie(new_cookie)
|
||||||
|
|
||||||
def send(self, url, method="GET", **kwargs):
|
def send(self, url, method="GET", **kwargs):
|
||||||
|
@@ -1,5 +1,4 @@
|
|||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
from dircache import listdir
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
@@ -765,7 +764,7 @@ class MetadataStore(object):
|
|||||||
key = args[0]
|
key = args[0]
|
||||||
# if library read every file in the library
|
# if library read every file in the library
|
||||||
if os.path.isdir(key):
|
if os.path.isdir(key):
|
||||||
files = [f for f in listdir(key) if isfile(join(key, f))]
|
files = [f for f in os.listdir(key) if isfile(join(key, f))]
|
||||||
for fil in files:
|
for fil in files:
|
||||||
_fil = join(key, fil)
|
_fil = join(key, fil)
|
||||||
_md = MetaDataFile(self.onts, self.attrc, _fil)
|
_md = MetaDataFile(self.onts, self.attrc, _fil)
|
||||||
@@ -838,7 +837,7 @@ class MetadataStore(object):
|
|||||||
for key in item['metadata']:
|
for key in item['metadata']:
|
||||||
# Separately handle MetaDataFile and directory
|
# Separately handle MetaDataFile and directory
|
||||||
if MDloader == MetaDataFile and os.path.isdir(key[0]):
|
if MDloader == MetaDataFile and os.path.isdir(key[0]):
|
||||||
files = [f for f in listdir(key[0]) if isfile(join(key[0], f))]
|
files = [f for f in os.listdir(key[0]) if isfile(join(key[0], f))]
|
||||||
for fil in files:
|
for fil in files:
|
||||||
_fil = join(key[0], fil)
|
_fil = join(key[0], fil)
|
||||||
_md = MetaDataFile(self.onts, self.attrc, _fil)
|
_md = MetaDataFile(self.onts, self.attrc, _fil)
|
||||||
|
@@ -10,7 +10,7 @@ Bindings normally consists of three parts:
|
|||||||
- how to package the information
|
- how to package the information
|
||||||
- which protocol to use
|
- which protocol to use
|
||||||
"""
|
"""
|
||||||
import urlparse
|
from six.moves.urllib.parse import urlparse
|
||||||
import saml2
|
import saml2
|
||||||
import base64
|
import base64
|
||||||
import urllib
|
import urllib
|
||||||
@@ -129,7 +129,7 @@ def http_redirect_message(message, location, relay_state="", typ="SAMLRequest",
|
|||||||
else:
|
else:
|
||||||
string = urllib.urlencode(args)
|
string = urllib.urlencode(args)
|
||||||
|
|
||||||
glue_char = "&" if urlparse.urlparse(location).query else "?"
|
glue_char = "&" if urlparse(location).query else "?"
|
||||||
login_url = glue_char.join([location, string])
|
login_url = glue_char.join([location, string])
|
||||||
headers = [('Location', str(login_url))]
|
headers = [('Location', str(login_url))]
|
||||||
body = []
|
body = []
|
||||||
|
@@ -1,5 +1,5 @@
|
|||||||
import calendar
|
import calendar
|
||||||
import urlparse
|
from six.moves.urllib.parse import urlparse
|
||||||
import re
|
import re
|
||||||
import time_util
|
import time_util
|
||||||
import struct
|
import struct
|
||||||
@@ -46,7 +46,7 @@ def valid_id(oid):
|
|||||||
def valid_any_uri(item):
|
def valid_any_uri(item):
|
||||||
"""very simplistic, ..."""
|
"""very simplistic, ..."""
|
||||||
try:
|
try:
|
||||||
part = urlparse.urlparse(item)
|
part = urlparse(item)
|
||||||
except Exception:
|
except Exception:
|
||||||
raise NotValid("AnyURI")
|
raise NotValid("AnyURI")
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user