From 7b70f435e0d6e8e9b85ad82bdfddbd30416b02f0 Mon Sep 17 00:00:00 2001 From: Joe Gregorio Date: Wed, 9 Nov 2011 10:18:51 -0500 Subject: [PATCH] Clean up samples and remove Buzz references. --- apiclient/contrib/buzz/__init__.py | 0 apiclient/contrib/buzz/future.json | 142 - contrib/__init__.py | 0 contrib/buzz/__init__.py | 0 contrib_tests/__init__.py | 0 contrib_tests/buzz/__init__.py | 0 docs/dyn/adexchangebuyer.v1.accounts.html | 47 - docs/dyn/adexchangebuyer.v1.html | 28 - gflags.py | 2712 ------------ gflags_validators.py | 187 - httplib2/__init__.py | 1513 ------- httplib2/cacerts.txt | 714 ---- httplib2/iri2uri.py | 110 - httplib2/socks.py | 438 -- oauth2/__init__.py | 735 ---- oauth2/clients/__init__.py | 0 oauth2/clients/imap.py | 40 - oauth2/clients/smtp.py | 41 - samples/appengine/app.yaml | 12 +- .../client_secrets.json | 0 .../grant.html | 2 +- samples/appengine/main.py | 118 +- samples/appengine/simplejson | 1 - samples/appengine/static/go.png | Bin 1139 -> 0 bytes samples/appengine/welcome.html | 25 +- samples/appengine_with_decorator/apiclient | 1 - samples/appengine_with_decorator/app.yaml | 12 - samples/appengine_with_decorator/better.py | 92 - samples/appengine_with_decorator/gflags.py | 1 - .../gflags_validators.py | 1 - samples/appengine_with_decorator/httplib2 | 1 - samples/appengine_with_decorator/main.py | 70 - samples/appengine_with_decorator/oauth2 | 1 - samples/appengine_with_decorator/oauth2client | 1 - samples/appengine_with_decorator/uritemplate | 1 - samples/appengine_with_decorator2/apiclient | 1 - samples/appengine_with_decorator2/app.yaml | 12 - samples/appengine_with_decorator2/gflags.py | 1 - .../gflags_validators.py | 1 - samples/appengine_with_decorator2/httplib2 | 1 - samples/appengine_with_decorator2/index.yaml | 11 - samples/appengine_with_decorator2/main.py | 111 - samples/appengine_with_decorator2/oauth2 | 1 - .../appengine_with_decorator2/oauth2client | 1 - samples/appengine_with_decorator2/uritemplate | 1 - .../appengine_with_decorator2/welcome.html | 8 - samples/{oauth2 => }/dailymotion/app.yaml | 0 samples/{oauth2 => }/dailymotion/gflags.py | 0 .../dailymotion/gflags_validators.py | 0 samples/{oauth2 => }/dailymotion/httplib2 | 0 .../index.yaml | 0 samples/{oauth2 => }/dailymotion/main.py | 0 samples/{oauth2 => }/dailymotion/oauth2client | 0 samples/{oauth2 => }/dailymotion/simplejson | 0 samples/{oauth2 => }/dailymotion/uritemplate | 0 samples/{oauth2 => }/dailymotion/welcome.html | 0 samples/debugging/main.py | 45 - samples/local/main.py | 37 - samples/localdiscovery/buzz.json | 3727 ----------------- samples/localdiscovery/buzz.py | 37 - samples/new_project_template/apiclient | 1 - samples/new_project_template/app.yaml | 12 - samples/new_project_template/gflags.py | 1 - .../new_project_template/gflags_validators.py | 1 - samples/new_project_template/httplib2 | 1 - samples/new_project_template/index.yaml | 11 - samples/new_project_template/main.py | 131 - samples/new_project_template/oauth2 | 1 - samples/new_project_template/oauth2client | 1 - samples/new_project_template/uritemplate | 1 - samples/new_project_template/welcome.html | 8 - samples/oauth2/dailymotion/index.yaml | 11 - samples/oauth2/django_sample/__init__.py | 0 samples/oauth2/django_sample/buzz/__init__.py | 0 samples/oauth2/django_sample/buzz/models.py | 33 - samples/oauth2/django_sample/buzz/tests.py | 24 - samples/oauth2/django_sample/buzz/views.py | 60 - samples/oauth2/django_sample/manage.py | 15 - samples/oauth2/django_sample/settings.py | 83 - samples/oauth2/django_sample/static/go.png | Bin 1139 -> 0 bytes .../django_sample/templates/buzz/login.html | 23 - .../django_sample/templates/buzz/welcome.html | 33 - samples/oauth2/django_sample/urls.py | 25 - simplejson/.__speedups.c | Bin 188 -> 0 bytes simplejson/._decoder.py | Bin 187 -> 0 bytes simplejson/._encoder.py | Bin 186 -> 0 bytes simplejson/._ordered_dict.py | Bin 184 -> 0 bytes simplejson/._scanner.py | Bin 186 -> 0 bytes simplejson/._tool.py | Bin 187 -> 0 bytes simplejson/__init__.py | 437 -- simplejson/_speedups.c | 2561 ----------- simplejson/decoder.py | 421 -- simplejson/encoder.py | 501 --- simplejson/ordered_dict.py | 119 - simplejson/scanner.py | 77 - simplejson/tests/.___init__.py | Bin 184 -> 0 bytes simplejson/tests/._test_decimal.py | Bin 187 -> 0 bytes simplejson/tests/._test_decode.py | Bin 184 -> 0 bytes simplejson/tests/._test_default.py | Bin 184 -> 0 bytes simplejson/tests/._test_dump.py | Bin 184 -> 0 bytes .../tests/._test_encode_basestring_ascii.py | Bin 184 -> 0 bytes simplejson/tests/._test_float.py | Bin 186 -> 0 bytes simplejson/tests/._test_separators.py | Bin 184 -> 0 bytes simplejson/tests/._test_unicode.py | Bin 187 -> 0 bytes simplejson/tests/__init__.py | 63 - simplejson/tests/test_check_circular.py | 30 - simplejson/tests/test_decimal.py | 33 - simplejson/tests/test_decode.py | 73 - simplejson/tests/test_default.py | 9 - simplejson/tests/test_dump.py | 27 - .../tests/test_encode_basestring_ascii.py | 41 - simplejson/tests/test_encode_for_html.py | 32 - simplejson/tests/test_fail.py | 91 - simplejson/tests/test_float.py | 19 - simplejson/tests/test_indent.py | 53 - simplejson/tests/test_pass1.py | 76 - simplejson/tests/test_pass2.py | 14 - simplejson/tests/test_pass3.py | 20 - simplejson/tests/test_recursion.py | 67 - simplejson/tests/test_scanstring.py | 117 - simplejson/tests/test_separators.py | 42 - simplejson/tests/test_speedups.py | 21 - simplejson/tests/test_unicode.py | 99 - simplejson/tool.py | 39 - tests/test_discovery.py | 84 +- tests/test_mocks.py | 59 +- 126 files changed, 114 insertions(+), 16628 deletions(-) delete mode 100644 apiclient/contrib/buzz/__init__.py delete mode 100644 apiclient/contrib/buzz/future.json delete mode 100644 contrib/__init__.py delete mode 100644 contrib/buzz/__init__.py delete mode 100644 contrib_tests/__init__.py delete mode 100644 contrib_tests/buzz/__init__.py delete mode 100644 docs/dyn/adexchangebuyer.v1.accounts.html delete mode 100644 docs/dyn/adexchangebuyer.v1.html delete mode 100644 gflags.py delete mode 100755 gflags_validators.py delete mode 100644 httplib2/__init__.py delete mode 100644 httplib2/cacerts.txt delete mode 100644 httplib2/iri2uri.py delete mode 100644 httplib2/socks.py delete mode 100644 oauth2/__init__.py delete mode 100644 oauth2/clients/__init__.py delete mode 100644 oauth2/clients/imap.py delete mode 100644 oauth2/clients/smtp.py rename samples/{appengine_with_decorator2 => appengine}/client_secrets.json (100%) rename samples/{appengine_with_decorator2 => appengine}/grant.html (83%) mode change 100755 => 100644 samples/appengine/main.py delete mode 120000 samples/appengine/simplejson delete mode 100644 samples/appengine/static/go.png delete mode 120000 samples/appengine_with_decorator/apiclient delete mode 100644 samples/appengine_with_decorator/app.yaml delete mode 100644 samples/appengine_with_decorator/better.py delete mode 120000 samples/appengine_with_decorator/gflags.py delete mode 120000 samples/appengine_with_decorator/gflags_validators.py delete mode 120000 samples/appengine_with_decorator/httplib2 delete mode 100755 samples/appengine_with_decorator/main.py delete mode 120000 samples/appengine_with_decorator/oauth2 delete mode 120000 samples/appengine_with_decorator/oauth2client delete mode 120000 samples/appengine_with_decorator/uritemplate delete mode 120000 samples/appengine_with_decorator2/apiclient delete mode 100644 samples/appengine_with_decorator2/app.yaml delete mode 120000 samples/appengine_with_decorator2/gflags.py delete mode 120000 samples/appengine_with_decorator2/gflags_validators.py delete mode 120000 samples/appengine_with_decorator2/httplib2 delete mode 100644 samples/appengine_with_decorator2/index.yaml delete mode 100644 samples/appengine_with_decorator2/main.py delete mode 120000 samples/appengine_with_decorator2/oauth2 delete mode 120000 samples/appengine_with_decorator2/oauth2client delete mode 120000 samples/appengine_with_decorator2/uritemplate delete mode 100644 samples/appengine_with_decorator2/welcome.html rename samples/{oauth2 => }/dailymotion/app.yaml (100%) rename samples/{oauth2 => }/dailymotion/gflags.py (100%) rename samples/{oauth2 => }/dailymotion/gflags_validators.py (100%) rename samples/{oauth2 => }/dailymotion/httplib2 (100%) rename samples/{appengine_with_decorator => dailymotion}/index.yaml (100%) rename samples/{oauth2 => }/dailymotion/main.py (100%) rename samples/{oauth2 => }/dailymotion/oauth2client (100%) rename samples/{oauth2 => }/dailymotion/simplejson (100%) rename samples/{oauth2 => }/dailymotion/uritemplate (100%) rename samples/{oauth2 => }/dailymotion/welcome.html (100%) delete mode 100644 samples/debugging/main.py delete mode 100644 samples/local/main.py delete mode 100644 samples/localdiscovery/buzz.json delete mode 100644 samples/localdiscovery/buzz.py delete mode 120000 samples/new_project_template/apiclient delete mode 100644 samples/new_project_template/app.yaml delete mode 120000 samples/new_project_template/gflags.py delete mode 120000 samples/new_project_template/gflags_validators.py delete mode 120000 samples/new_project_template/httplib2 delete mode 100644 samples/new_project_template/index.yaml delete mode 100755 samples/new_project_template/main.py delete mode 120000 samples/new_project_template/oauth2 delete mode 120000 samples/new_project_template/oauth2client delete mode 120000 samples/new_project_template/uritemplate delete mode 100644 samples/new_project_template/welcome.html delete mode 100644 samples/oauth2/dailymotion/index.yaml delete mode 100644 samples/oauth2/django_sample/__init__.py delete mode 100644 samples/oauth2/django_sample/buzz/__init__.py delete mode 100644 samples/oauth2/django_sample/buzz/models.py delete mode 100644 samples/oauth2/django_sample/buzz/tests.py delete mode 100644 samples/oauth2/django_sample/buzz/views.py delete mode 100755 samples/oauth2/django_sample/manage.py delete mode 100644 samples/oauth2/django_sample/settings.py delete mode 100644 samples/oauth2/django_sample/static/go.png delete mode 100644 samples/oauth2/django_sample/templates/buzz/login.html delete mode 100644 samples/oauth2/django_sample/templates/buzz/welcome.html delete mode 100644 samples/oauth2/django_sample/urls.py delete mode 100644 simplejson/.__speedups.c delete mode 100644 simplejson/._decoder.py delete mode 100644 simplejson/._encoder.py delete mode 100644 simplejson/._ordered_dict.py delete mode 100644 simplejson/._scanner.py delete mode 100644 simplejson/._tool.py delete mode 100644 simplejson/__init__.py delete mode 100644 simplejson/_speedups.c delete mode 100644 simplejson/decoder.py delete mode 100644 simplejson/encoder.py delete mode 100644 simplejson/ordered_dict.py delete mode 100644 simplejson/scanner.py delete mode 100644 simplejson/tests/.___init__.py delete mode 100644 simplejson/tests/._test_decimal.py delete mode 100644 simplejson/tests/._test_decode.py delete mode 100644 simplejson/tests/._test_default.py delete mode 100644 simplejson/tests/._test_dump.py delete mode 100644 simplejson/tests/._test_encode_basestring_ascii.py delete mode 100644 simplejson/tests/._test_float.py delete mode 100644 simplejson/tests/._test_separators.py delete mode 100644 simplejson/tests/._test_unicode.py delete mode 100644 simplejson/tests/__init__.py delete mode 100644 simplejson/tests/test_check_circular.py delete mode 100644 simplejson/tests/test_decimal.py delete mode 100644 simplejson/tests/test_decode.py delete mode 100644 simplejson/tests/test_default.py delete mode 100644 simplejson/tests/test_dump.py delete mode 100644 simplejson/tests/test_encode_basestring_ascii.py delete mode 100644 simplejson/tests/test_encode_for_html.py delete mode 100644 simplejson/tests/test_fail.py delete mode 100644 simplejson/tests/test_float.py delete mode 100644 simplejson/tests/test_indent.py delete mode 100644 simplejson/tests/test_pass1.py delete mode 100644 simplejson/tests/test_pass2.py delete mode 100644 simplejson/tests/test_pass3.py delete mode 100644 simplejson/tests/test_recursion.py delete mode 100644 simplejson/tests/test_scanstring.py delete mode 100644 simplejson/tests/test_separators.py delete mode 100644 simplejson/tests/test_speedups.py delete mode 100644 simplejson/tests/test_unicode.py delete mode 100644 simplejson/tool.py diff --git a/apiclient/contrib/buzz/__init__.py b/apiclient/contrib/buzz/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/apiclient/contrib/buzz/future.json b/apiclient/contrib/buzz/future.json deleted file mode 100644 index 05018e8..0000000 --- a/apiclient/contrib/buzz/future.json +++ /dev/null @@ -1,142 +0,0 @@ -{ - "baseUrl": "https://www.googleapis.com/", - "auth": { - "request": { - "url": "https://www.google.com/accounts/OAuthGetRequestToken", - "parameters": { - "xoauth_displayname": { - "parameterType": "query", - "required": false - }, - "domain": { - "parameterType": "query", - "required": true - }, - "scope": { - "parameterType": "query", - "required": true - } - } - }, - "authorize": { - "url": "https://www.google.com/buzz/api/auth/OAuthAuthorizeToken", - "parameters": { - "oauth_token": { - "parameterType": "query", - "required": true - }, - "iconUrl": { - "parameterType": "query", - "required": false - }, - "domain": { - "parameterType": "query", - "required": true - }, - "scope": { - "parameterType": "query", - "required": true - } - } - }, - "access": { - "url": "https://www.google.com/accounts/OAuthGetAccessToken", - "parameters": { - "domain": { - "parameterType": "query", - "required": true - }, - "scope": { - "parameterType": "query", - "required": true - } - } - } - }, - "resources": { - "activities": { - "methods": { - "delete": {}, - "extractPeopleFromSearch": {}, - "get": {}, - "insert": {}, - "list": { - "next": { - "type": "uri", - "location": ["links", "next", 0, "href"] - } - }, - "search": { - "next": { - "type": "uri", - "location": ["links", "next", 0, "href"] - } - }, - "update": {} - } - }, - "comments": { - "methods": { - "delete": {}, - "get": {}, - "insert": {}, - "list": {}, - "update": {} - } - }, - "feeds": { - "methods": { - "delete": {}, - "insert": {}, - "list": {}, - "update": {} - } - }, - "groups": { - "methods": { - "delete": {}, - "get": {}, - "insert": {}, - "list": { - "next": { - "type": "uri", - "location": ["links", "next", 0, "href"] - } - }, - "update": {} - } - }, - "people": { - "methods": { - "delete": {}, - "get": {}, - "liked": { - "next": { - "type": "uri", - "location": ["links", "next", 0, "href"] - } - }, - "list": {}, - "relatedToUri": {}, - "reshared": {}, - "search": {}, - "update": {} - } - }, - "photos": { - "methods": { - "insert": {} - } - }, - "related": { - "methods": { - "list": {} - } - }, - "search": { - "methods": { - "extractPeople": {} - } - } - } -} diff --git a/contrib/__init__.py b/contrib/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/contrib/buzz/__init__.py b/contrib/buzz/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/contrib_tests/__init__.py b/contrib_tests/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/contrib_tests/buzz/__init__.py b/contrib_tests/buzz/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/docs/dyn/adexchangebuyer.v1.accounts.html b/docs/dyn/adexchangebuyer.v1.accounts.html deleted file mode 100644 index 2467730..0000000 --- a/docs/dyn/adexchangebuyer.v1.accounts.html +++ /dev/null @@ -1,47 +0,0 @@ - - -Python: class Resource - -

- - - - - - - -
 
-class Resource(__builtin__.object)
   A class for interacting with a resource.
 
 Methods defined here:
-
__init__(self)
- -
get = method(self, **kwargs)
Gets one account by ID.

-Args:
-  id: string, The account id (required)
- -
list = method(self, **kwargs)
Retrieves the authenticated user's list of accounts.

-Args:
- -
patch = method(self, **kwargs)
Updates an existing account. This method supports patch semantics.

-Args:
-  body: object, The request body. (required)
-  id: string, The account id (required)
- -
update = method(self, **kwargs)
Updates an existing account.

-Args:
-  body: object, The request body. (required)
-  id: string, The account id (required)
- -
-Data descriptors defined here:
-
__dict__
-
dictionary for instance variables (if defined)
-
-
__weakref__
-
list of weak references to the object (if defined)
-
-
- \ No newline at end of file diff --git a/docs/dyn/adexchangebuyer.v1.html b/docs/dyn/adexchangebuyer.v1.html deleted file mode 100644 index 1aec0ec..0000000 --- a/docs/dyn/adexchangebuyer.v1.html +++ /dev/null @@ -1,28 +0,0 @@ - - -Python: class Resource - -

- - - - - - - -
 
-class Resource(__builtin__.object)
   A class for interacting with a resource.
 
 Methods defined here:
-
__init__(self)
- -
accounts = methodResource(self)
A collection resource.
- -
-Data descriptors defined here:
-
__dict__
-
dictionary for instance variables (if defined)
-
-
__weakref__
-
list of weak references to the object (if defined)
-
-
- \ No newline at end of file diff --git a/gflags.py b/gflags.py deleted file mode 100644 index 8bf2edf..0000000 --- a/gflags.py +++ /dev/null @@ -1,2712 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2007, Google Inc. -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -# -# --- -# Author: Chad Lester -# Design and style contributions by: -# Amit Patel, Bogdan Cocosel, Daniel Dulitz, Eric Tiedemann, -# Eric Veach, Laurence Gonsalves, Matthew Springer -# Code reorganized a bit by Craig Silverstein - -"""This module is used to define and parse command line flags. - -This module defines a *distributed* flag-definition policy: rather than -an application having to define all flags in or near main(), each python -module defines flags that are useful to it. When one python module -imports another, it gains access to the other's flags. (This is -implemented by having all modules share a common, global registry object -containing all the flag information.) - -Flags are defined through the use of one of the DEFINE_xxx functions. -The specific function used determines how the flag is parsed, checked, -and optionally type-converted, when it's seen on the command line. - - -IMPLEMENTATION: DEFINE_* creates a 'Flag' object and registers it with a -'FlagValues' object (typically the global FlagValues FLAGS, defined -here). The 'FlagValues' object can scan the command line arguments and -pass flag arguments to the corresponding 'Flag' objects for -value-checking and type conversion. The converted flag values are -available as attributes of the 'FlagValues' object. - -Code can access the flag through a FlagValues object, for instance -gflags.FLAGS.myflag. Typically, the __main__ module passes the -command line arguments to gflags.FLAGS for parsing. - -At bottom, this module calls getopt(), so getopt functionality is -supported, including short- and long-style flags, and the use of -- to -terminate flags. - -Methods defined by the flag module will throw 'FlagsError' exceptions. -The exception argument will be a human-readable string. - - -FLAG TYPES: This is a list of the DEFINE_*'s that you can do. All flags -take a name, default value, help-string, and optional 'short' name -(one-letter name). Some flags have other arguments, which are described -with the flag. - -DEFINE_string: takes any input, and interprets it as a string. - -DEFINE_bool or -DEFINE_boolean: typically does not take an argument: say --myflag to - set FLAGS.myflag to true, or --nomyflag to set - FLAGS.myflag to false. Alternately, you can say - --myflag=true or --myflag=t or --myflag=1 or - --myflag=false or --myflag=f or --myflag=0 - -DEFINE_float: takes an input and interprets it as a floating point - number. Takes optional args lower_bound and upper_bound; - if the number specified on the command line is out of - range, it will raise a FlagError. - -DEFINE_integer: takes an input and interprets it as an integer. Takes - optional args lower_bound and upper_bound as for floats. - -DEFINE_enum: takes a list of strings which represents legal values. If - the command-line value is not in this list, raise a flag - error. Otherwise, assign to FLAGS.flag as a string. - -DEFINE_list: Takes a comma-separated list of strings on the commandline. - Stores them in a python list object. - -DEFINE_spaceseplist: Takes a space-separated list of strings on the - commandline. Stores them in a python list object. - Example: --myspacesepflag "foo bar baz" - -DEFINE_multistring: The same as DEFINE_string, except the flag can be - specified more than once on the commandline. The - result is a python list object (list of strings), - even if the flag is only on the command line once. - -DEFINE_multi_int: The same as DEFINE_integer, except the flag can be - specified more than once on the commandline. The - result is a python list object (list of ints), even if - the flag is only on the command line once. - - -SPECIAL FLAGS: There are a few flags that have special meaning: - --help prints a list of all the flags in a human-readable fashion - --helpshort prints a list of all key flags (see below). - --helpxml prints a list of all flags, in XML format. DO NOT parse - the output of --help and --helpshort. Instead, parse - the output of --helpxml. For more info, see - "OUTPUT FOR --helpxml" below. - --flagfile=foo read flags from file foo. - --undefok=f1,f2 ignore unrecognized option errors for f1,f2. - For boolean flags, you should use --undefok=boolflag, and - --boolflag and --noboolflag will be accepted. Do not use - --undefok=noboolflag. - -- as in getopt(), terminates flag-processing - - -FLAGS VALIDATORS: If your program: - - requires flag X to be specified - - needs flag Y to match a regular expression - - or requires any more general constraint to be satisfied -then validators are for you! - -Each validator represents a constraint over one flag, which is enforced -starting from the initial parsing of the flags and until the program -terminates. - -Also, lower_bound and upper_bound for numerical flags are enforced using flag -validators. - -Howto: -If you want to enforce a constraint over one flag, use - -flags.RegisterValidator(flag_name, - checker, - message='Flag validation failed', - flag_values=FLAGS) - -After flag values are initially parsed, and after any change to the specified -flag, method checker(flag_value) will be executed. If constraint is not -satisfied, an IllegalFlagValue exception will be raised. See -RegisterValidator's docstring for a detailed explanation on how to construct -your own checker. - - -EXAMPLE USAGE: - -FLAGS = flags.FLAGS - -flags.DEFINE_integer('my_version', 0, 'Version number.') -flags.DEFINE_string('filename', None, 'Input file name', short_name='f') - -flags.RegisterValidator('my_version', - lambda value: value % 2 == 0, - message='--my_version must be divisible by 2') -flags.MarkFlagAsRequired('filename') - - -NOTE ON --flagfile: - -Flags may be loaded from text files in addition to being specified on -the commandline. - -Any flags you don't feel like typing, throw them in a file, one flag per -line, for instance: - --myflag=myvalue - --nomyboolean_flag -You then specify your file with the special flag '--flagfile=somefile'. -You CAN recursively nest flagfile= tokens OR use multiple files on the -command line. Lines beginning with a single hash '#' or a double slash -'//' are comments in your flagfile. - -Any flagfile= will be interpreted as having a relative path from -the current working directory rather than from the place the file was -included from: - myPythonScript.py --flagfile=config/somefile.cfg - -If somefile.cfg includes further --flagfile= directives, these will be -referenced relative to the original CWD, not from the directory the -including flagfile was found in! - -The caveat applies to people who are including a series of nested files -in a different dir than they are executing out of. Relative path names -are always from CWD, not from the directory of the parent include -flagfile. We do now support '~' expanded directory names. - -Absolute path names ALWAYS work! - - -EXAMPLE USAGE: - - import gflags - FLAGS = gflags.FLAGS - - # Flag names are globally defined! So in general, we need to be - # careful to pick names that are unlikely to be used by other libraries. - # If there is a conflict, we'll get an error at import time. - gflags.DEFINE_string('name', 'Mr. President', 'your name') - gflags.DEFINE_integer('age', None, 'your age in years', lower_bound=0) - gflags.DEFINE_boolean('debug', False, 'produces debugging output') - gflags.DEFINE_enum('gender', 'male', ['male', 'female'], 'your gender') - - def main(argv): - try: - argv = FLAGS(argv) # parse flags - except gflags.FlagsError, e: - print '%s\\nUsage: %s ARGS\\n%s' % (e, sys.argv[0], FLAGS) - sys.exit(1) - if FLAGS.debug: print 'non-flag arguments:', argv - print 'Happy Birthday', FLAGS.name - if FLAGS.age is not None: - print 'You are a %s, who is %d years old' % (FLAGS.gender, FLAGS.age) - - if __name__ == '__main__': - main(sys.argv) - - -KEY FLAGS: - -As we already explained, each module gains access to all flags defined -by all the other modules it transitively imports. In the case of -non-trivial scripts, this means a lot of flags ... For documentation -purposes, it is good to identify the flags that are key (i.e., really -important) to a module. Clearly, the concept of "key flag" is a -subjective one. When trying to determine whether a flag is key to a -module or not, assume that you are trying to explain your module to a -potential user: which flags would you really like to mention first? - -We'll describe shortly how to declare which flags are key to a module. -For the moment, assume we know the set of key flags for each module. -Then, if you use the app.py module, you can use the --helpshort flag to -print only the help for the flags that are key to the main module, in a -human-readable format. - -NOTE: If you need to parse the flag help, do NOT use the output of ---help / --helpshort. That output is meant for human consumption, and -may be changed in the future. Instead, use --helpxml; flags that are -key for the main module are marked there with a yes element. - -The set of key flags for a module M is composed of: - -1. Flags defined by module M by calling a DEFINE_* function. - -2. Flags that module M explictly declares as key by using the function - - DECLARE_key_flag() - -3. Key flags of other modules that M specifies by using the function - - ADOPT_module_key_flags() - - This is a "bulk" declaration of key flags: each flag that is key for - becomes key for the current module too. - -Notice that if you do not use the functions described at points 2 and 3 -above, then --helpshort prints information only about the flags defined -by the main module of our script. In many cases, this behavior is good -enough. But if you move part of the main module code (together with the -related flags) into a different module, then it is nice to use -DECLARE_key_flag / ADOPT_module_key_flags and make sure --helpshort -lists all relevant flags (otherwise, your code refactoring may confuse -your users). - -Note: each of DECLARE_key_flag / ADOPT_module_key_flags has its own -pluses and minuses: DECLARE_key_flag is more targeted and may lead a -more focused --helpshort documentation. ADOPT_module_key_flags is good -for cases when an entire module is considered key to the current script. -Also, it does not require updates to client scripts when a new flag is -added to the module. - - -EXAMPLE USAGE 2 (WITH KEY FLAGS): - -Consider an application that contains the following three files (two -auxiliary modules and a main module): - -File libfoo.py: - - import gflags - - gflags.DEFINE_integer('num_replicas', 3, 'Number of replicas to start') - gflags.DEFINE_boolean('rpc2', True, 'Turn on the usage of RPC2.') - - ... some code ... - -File libbar.py: - - import gflags - - gflags.DEFINE_string('bar_gfs_path', '/gfs/path', - 'Path to the GFS files for libbar.') - gflags.DEFINE_string('email_for_bar_errors', 'bar-team@google.com', - 'Email address for bug reports about module libbar.') - gflags.DEFINE_boolean('bar_risky_hack', False, - 'Turn on an experimental and buggy optimization.') - - ... some code ... - -File myscript.py: - - import gflags - import libfoo - import libbar - - gflags.DEFINE_integer('num_iterations', 0, 'Number of iterations.') - - # Declare that all flags that are key for libfoo are - # key for this module too. - gflags.ADOPT_module_key_flags(libfoo) - - # Declare that the flag --bar_gfs_path (defined in libbar) is key - # for this module. - gflags.DECLARE_key_flag('bar_gfs_path') - - ... some code ... - -When myscript is invoked with the flag --helpshort, the resulted help -message lists information about all the key flags for myscript: ---num_iterations, --num_replicas, --rpc2, and --bar_gfs_path (in -addition to the special flags --help and --helpshort). - -Of course, myscript uses all the flags declared by it (in this case, -just --num_replicas) or by any of the modules it transitively imports -(e.g., the modules libfoo, libbar). E.g., it can access the value of -FLAGS.bar_risky_hack, even if --bar_risky_hack is not declared as a key -flag for myscript. - - -OUTPUT FOR --helpxml: - -The --helpxml flag generates output with the following structure: - - - - PROGRAM_BASENAME - MAIN_MODULE_DOCSTRING - ( - [yes] - DECLARING_MODULE - FLAG_NAME - FLAG_HELP_MESSAGE - DEFAULT_FLAG_VALUE - CURRENT_FLAG_VALUE - FLAG_TYPE - [OPTIONAL_ELEMENTS] - )* - - -Notes: - -1. The output is intentionally similar to the output generated by the -C++ command-line flag library. The few differences are due to the -Python flags that do not have a C++ equivalent (at least not yet), -e.g., DEFINE_list. - -2. New XML elements may be added in the future. - -3. DEFAULT_FLAG_VALUE is in serialized form, i.e., the string you can -pass for this flag on the command-line. E.g., for a flag defined -using DEFINE_list, this field may be foo,bar, not ['foo', 'bar']. - -4. CURRENT_FLAG_VALUE is produced using str(). This means that the -string 'false' will be represented in the same way as the boolean -False. Using repr() would have removed this ambiguity and simplified -parsing, but would have broken the compatibility with the C++ -command-line flags. - -5. OPTIONAL_ELEMENTS describe elements relevant for certain kinds of -flags: lower_bound, upper_bound (for flags that specify bounds), -enum_value (for enum flags), list_separator (for flags that consist of -a list of values, separated by a special token). - -6. We do not provide any example here: please use --helpxml instead. -""" - -import cgi -import getopt -import os -import re -import string -import sys - -import gflags_validators - -# Are we running at least python 2.2? -try: - if tuple(sys.version_info[:3]) < (2,2,0): - raise NotImplementedError("requires python 2.2.0 or later") -except AttributeError: # a very old python, that lacks sys.version_info - raise NotImplementedError("requires python 2.2.0 or later") - -# If we're not running at least python 2.2.1, define True, False, and bool. -# Thanks, Guido, for the code. -try: - True, False, bool -except NameError: - False = 0 - True = 1 - def bool(x): - if x: - return True - else: - return False - -# Are we running under pychecker? -_RUNNING_PYCHECKER = 'pychecker.python' in sys.modules - - -def _GetCallingModule(): - """Returns the name of the module that's calling into this module. - - We generally use this function to get the name of the module calling a - DEFINE_foo... function. - """ - # Walk down the stack to find the first globals dict that's not ours. - for depth in range(1, sys.getrecursionlimit()): - if not sys._getframe(depth).f_globals is globals(): - globals_for_frame = sys._getframe(depth).f_globals - module_name = _GetModuleObjectAndName(globals_for_frame)[1] - if module_name is not None: - return module_name - raise AssertionError("No module was found") - - -def _GetThisModuleObjectAndName(): - """Returns: (module object, module name) for this module.""" - return _GetModuleObjectAndName(globals()) - - -# module exceptions: -class FlagsError(Exception): - """The base class for all flags errors.""" - pass - - -class DuplicateFlag(FlagsError): - """Raised if there is a flag naming conflict.""" - pass - - -class DuplicateFlagCannotPropagateNoneToSwig(DuplicateFlag): - """Special case of DuplicateFlag -- SWIG flag value can't be set to None. - - This can be raised when a duplicate flag is created. Even if allow_override is - True, we still abort if the new value is None, because it's currently - impossible to pass None default value back to SWIG. See FlagValues.SetDefault - for details. - """ - pass - - -# A DuplicateFlagError conveys more information than a -# DuplicateFlag. Since there are external modules that create -# DuplicateFlags, the interface to DuplicateFlag shouldn't change. -class DuplicateFlagError(DuplicateFlag): - - def __init__(self, flagname, flag_values): - self.flagname = flagname - message = "The flag '%s' is defined twice." % self.flagname - flags_by_module = flag_values.FlagsByModuleDict() - for module in flags_by_module: - for flag in flags_by_module[module]: - if flag.name == flagname or flag.short_name == flagname: - message = message + " First from " + module + "," - break - message = message + " Second from " + _GetCallingModule() - DuplicateFlag.__init__(self, message) - - -class IllegalFlagValue(FlagsError): - """The flag command line argument is illegal.""" - pass - - -class UnrecognizedFlag(FlagsError): - """Raised if a flag is unrecognized.""" - pass - - -# An UnrecognizedFlagError conveys more information than an UnrecognizedFlag. -# Since there are external modules that create DuplicateFlags, the interface to -# DuplicateFlag shouldn't change. The flagvalue will be assigned the full value -# of the flag and its argument, if any, allowing handling of unrecognzed flags -# in an exception handler. -# If flagvalue is the empty string, then this exception is an due to a -# reference to a flag that was not already defined. -class UnrecognizedFlagError(UnrecognizedFlag): - def __init__(self, flagname, flagvalue=''): - self.flagname = flagname - self.flagvalue = flagvalue - UnrecognizedFlag.__init__( - self, "Unknown command line flag '%s'" % flagname) - -# Global variable used by expvar -_exported_flags = {} -_help_width = 80 # width of help output - - -def GetHelpWidth(): - """Returns: an integer, the width of help lines that is used in TextWrap.""" - return _help_width - - -def CutCommonSpacePrefix(text): - """Removes a common space prefix from the lines of a multiline text. - - If the first line does not start with a space, it is left as it is and - only in the remaining lines a common space prefix is being searched - for. That means the first line will stay untouched. This is especially - useful to turn doc strings into help texts. This is because some - people prefer to have the doc comment start already after the - apostrophy and then align the following lines while others have the - apostrophies on a seperately line. - - The function also drops trailing empty lines and ignores empty lines - following the initial content line while calculating the initial - common whitespace. - - Args: - text: text to work on - - Returns: - the resulting text - """ - text_lines = text.splitlines() - # Drop trailing empty lines - while text_lines and not text_lines[-1]: - text_lines = text_lines[:-1] - if text_lines: - # We got some content, is the first line starting with a space? - if text_lines[0] and text_lines[0][0].isspace(): - text_first_line = [] - else: - text_first_line = [text_lines.pop(0)] - # Calculate length of common leading whitesppace (only over content lines) - common_prefix = os.path.commonprefix([line for line in text_lines if line]) - space_prefix_len = len(common_prefix) - len(common_prefix.lstrip()) - # If we have a common space prefix, drop it from all lines - if space_prefix_len: - for index in xrange(len(text_lines)): - if text_lines[index]: - text_lines[index] = text_lines[index][space_prefix_len:] - return '\n'.join(text_first_line + text_lines) - return '' - - -def TextWrap(text, length=None, indent='', firstline_indent=None, tabs=' '): - """Wraps a given text to a maximum line length and returns it. - - We turn lines that only contain whitespaces into empty lines. We keep - new lines and tabs (e.g., we do not treat tabs as spaces). - - Args: - text: text to wrap - length: maximum length of a line, includes indentation - if this is None then use GetHelpWidth() - indent: indent for all but first line - firstline_indent: indent for first line; if None, fall back to indent - tabs: replacement for tabs - - Returns: - wrapped text - - Raises: - FlagsError: if indent not shorter than length - FlagsError: if firstline_indent not shorter than length - """ - # Get defaults where callee used None - if length is None: - length = GetHelpWidth() - if indent is None: - indent = '' - if len(indent) >= length: - raise FlagsError('Indent must be shorter than length') - # In line we will be holding the current line which is to be started - # with indent (or firstline_indent if available) and then appended - # with words. - if firstline_indent is None: - firstline_indent = '' - line = indent - else: - line = firstline_indent - if len(firstline_indent) >= length: - raise FlagsError('First iline indent must be shorter than length') - - # If the callee does not care about tabs we simply convert them to - # spaces If callee wanted tabs to be single space then we do that - # already here. - if not tabs or tabs == ' ': - text = text.replace('\t', ' ') - else: - tabs_are_whitespace = not tabs.strip() - - line_regex = re.compile('([ ]*)(\t*)([^ \t]+)', re.MULTILINE) - - # Split the text into lines and the lines with the regex above. The - # resulting lines are collected in result[]. For each split we get the - # spaces, the tabs and the next non white space (e.g. next word). - result = [] - for text_line in text.splitlines(): - # Store result length so we can find out whether processing the next - # line gave any new content - old_result_len = len(result) - # Process next line with line_regex. For optimization we do an rstrip(). - # - process tabs (changes either line or word, see below) - # - process word (first try to squeeze on line, then wrap or force wrap) - # Spaces found on the line are ignored, they get added while wrapping as - # needed. - for spaces, current_tabs, word in line_regex.findall(text_line.rstrip()): - # If tabs weren't converted to spaces, handle them now - if current_tabs: - # If the last thing we added was a space anyway then drop - # it. But let's not get rid of the indentation. - if (((result and line != indent) or - (not result and line != firstline_indent)) and line[-1] == ' '): - line = line[:-1] - # Add the tabs, if that means adding whitespace, just add it at - # the line, the rstrip() code while shorten the line down if - # necessary - if tabs_are_whitespace: - line += tabs * len(current_tabs) - else: - # if not all tab replacement is whitespace we prepend it to the word - word = tabs * len(current_tabs) + word - # Handle the case where word cannot be squeezed onto current last line - if len(line) + len(word) > length and len(indent) + len(word) <= length: - result.append(line.rstrip()) - line = indent + word - word = '' - # No space left on line or can we append a space? - if len(line) + 1 >= length: - result.append(line.rstrip()) - line = indent - else: - line += ' ' - # Add word and shorten it up to allowed line length. Restart next - # line with indent and repeat, or add a space if we're done (word - # finished) This deals with words that caanot fit on one line - # (e.g. indent + word longer than allowed line length). - while len(line) + len(word) >= length: - line += word - result.append(line[:length]) - word = line[length:] - line = indent - # Default case, simply append the word and a space - if word: - line += word + ' ' - # End of input line. If we have content we finish the line. If the - # current line is just the indent but we had content in during this - # original line then we need to add an emoty line. - if (result and line != indent) or (not result and line != firstline_indent): - result.append(line.rstrip()) - elif len(result) == old_result_len: - result.append('') - line = indent - - return '\n'.join(result) - - -def DocToHelp(doc): - """Takes a __doc__ string and reformats it as help.""" - - # Get rid of starting and ending white space. Using lstrip() or even - # strip() could drop more than maximum of first line and right space - # of last line. - doc = doc.strip() - - # Get rid of all empty lines - whitespace_only_line = re.compile('^[ \t]+$', re.M) - doc = whitespace_only_line.sub('', doc) - - # Cut out common space at line beginnings - doc = CutCommonSpacePrefix(doc) - - # Just like this module's comment, comments tend to be aligned somehow. - # In other words they all start with the same amount of white space - # 1) keep double new lines - # 2) keep ws after new lines if not empty line - # 3) all other new lines shall be changed to a space - # Solution: Match new lines between non white space and replace with space. - doc = re.sub('(?<=\S)\n(?=\S)', ' ', doc, re.M) - - return doc - - -def _GetModuleObjectAndName(globals_dict): - """Returns the module that defines a global environment, and its name. - - Args: - globals_dict: A dictionary that should correspond to an environment - providing the values of the globals. - - Returns: - A pair consisting of (1) module object and (2) module name (a - string). Returns (None, None) if the module could not be - identified. - """ - # The use of .items() (instead of .iteritems()) is NOT a mistake: if - # a parallel thread imports a module while we iterate over - # .iteritems() (not nice, but possible), we get a RuntimeError ... - # Hence, we use the slightly slower but safer .items(). - for name, module in sys.modules.items(): - if getattr(module, '__dict__', None) is globals_dict: - if name == '__main__': - # Pick a more informative name for the main module. - name = sys.argv[0] - return (module, name) - return (None, None) - - -def _GetMainModule(): - """Returns the name of the module from which execution started.""" - for depth in range(1, sys.getrecursionlimit()): - try: - globals_of_main = sys._getframe(depth).f_globals - except ValueError: - return _GetModuleObjectAndName(globals_of_main)[1] - raise AssertionError("No module was found") - - -class FlagValues: - """Registry of 'Flag' objects. - - A 'FlagValues' can then scan command line arguments, passing flag - arguments through to the 'Flag' objects that it owns. It also - provides easy access to the flag values. Typically only one - 'FlagValues' object is needed by an application: gflags.FLAGS - - This class is heavily overloaded: - - 'Flag' objects are registered via __setitem__: - FLAGS['longname'] = x # register a new flag - - The .value attribute of the registered 'Flag' objects can be accessed - as attributes of this 'FlagValues' object, through __getattr__. Both - the long and short name of the original 'Flag' objects can be used to - access its value: - FLAGS.longname # parsed flag value - FLAGS.x # parsed flag value (short name) - - Command line arguments are scanned and passed to the registered 'Flag' - objects through the __call__ method. Unparsed arguments, including - argv[0] (e.g. the program name) are returned. - argv = FLAGS(sys.argv) # scan command line arguments - - The original registered Flag objects can be retrieved through the use - of the dictionary-like operator, __getitem__: - x = FLAGS['longname'] # access the registered Flag object - - The str() operator of a 'FlagValues' object provides help for all of - the registered 'Flag' objects. - """ - - def __init__(self): - # Since everything in this class is so heavily overloaded, the only - # way of defining and using fields is to access __dict__ directly. - - # Dictionary: flag name (string) -> Flag object. - self.__dict__['__flags'] = {} - # Dictionary: module name (string) -> list of Flag objects that are defined - # by that module. - self.__dict__['__flags_by_module'] = {} - # Dictionary: module name (string) -> list of Flag objects that are - # key for that module. - self.__dict__['__key_flags_by_module'] = {} - - # Set if we should use new style gnu_getopt rather than getopt when parsing - # the args. Only possible with Python 2.3+ - self.UseGnuGetOpt(False) - - def UseGnuGetOpt(self, use_gnu_getopt=True): - self.__dict__['__use_gnu_getopt'] = use_gnu_getopt - - def IsGnuGetOpt(self): - return self.__dict__['__use_gnu_getopt'] - - def FlagDict(self): - return self.__dict__['__flags'] - - def FlagsByModuleDict(self): - """Returns the dictionary of module_name -> list of defined flags. - - Returns: - A dictionary. Its keys are module names (strings). Its values - are lists of Flag objects. - """ - return self.__dict__['__flags_by_module'] - - def KeyFlagsByModuleDict(self): - """Returns the dictionary of module_name -> list of key flags. - - Returns: - A dictionary. Its keys are module names (strings). Its values - are lists of Flag objects. - """ - return self.__dict__['__key_flags_by_module'] - - def _RegisterFlagByModule(self, module_name, flag): - """Records the module that defines a specific flag. - - We keep track of which flag is defined by which module so that we - can later sort the flags by module. - - Args: - module_name: A string, the name of a Python module. - flag: A Flag object, a flag that is key to the module. - """ - flags_by_module = self.FlagsByModuleDict() - flags_by_module.setdefault(module_name, []).append(flag) - - def _RegisterKeyFlagForModule(self, module_name, flag): - """Specifies that a flag is a key flag for a module. - - Args: - module_name: A string, the name of a Python module. - flag: A Flag object, a flag that is key to the module. - """ - key_flags_by_module = self.KeyFlagsByModuleDict() - # The list of key flags for the module named module_name. - key_flags = key_flags_by_module.setdefault(module_name, []) - # Add flag, but avoid duplicates. - if flag not in key_flags: - key_flags.append(flag) - - def _GetFlagsDefinedByModule(self, module): - """Returns the list of flags defined by a module. - - Args: - module: A module object or a module name (a string). - - Returns: - A new list of Flag objects. Caller may update this list as he - wishes: none of those changes will affect the internals of this - FlagValue object. - """ - if not isinstance(module, str): - module = module.__name__ - - return list(self.FlagsByModuleDict().get(module, [])) - - def _GetKeyFlagsForModule(self, module): - """Returns the list of key flags for a module. - - Args: - module: A module object or a module name (a string) - - Returns: - A new list of Flag objects. Caller may update this list as he - wishes: none of those changes will affect the internals of this - FlagValue object. - """ - if not isinstance(module, str): - module = module.__name__ - - # Any flag is a key flag for the module that defined it. NOTE: - # key_flags is a fresh list: we can update it without affecting the - # internals of this FlagValues object. - key_flags = self._GetFlagsDefinedByModule(module) - - # Take into account flags explicitly declared as key for a module. - for flag in self.KeyFlagsByModuleDict().get(module, []): - if flag not in key_flags: - key_flags.append(flag) - return key_flags - - def AppendFlagValues(self, flag_values): - """Appends flags registered in another FlagValues instance. - - Args: - flag_values: registry to copy from - """ - for flag_name, flag in flag_values.FlagDict().iteritems(): - # Each flags with shortname appears here twice (once under its - # normal name, and again with its short name). To prevent - # problems (DuplicateFlagError) with double flag registration, we - # perform a check to make sure that the entry we're looking at is - # for its normal name. - if flag_name == flag.name: - self[flag_name] = flag - - def RemoveFlagValues(self, flag_values): - """Remove flags that were previously appended from another FlagValues. - - Args: - flag_values: registry containing flags to remove. - """ - for flag_name in flag_values.FlagDict(): - self.__delattr__(flag_name) - - def __setitem__(self, name, flag): - """Registers a new flag variable.""" - fl = self.FlagDict() - if not isinstance(flag, Flag): - raise IllegalFlagValue(flag) - if not isinstance(name, type("")): - raise FlagsError("Flag name must be a string") - if len(name) == 0: - raise FlagsError("Flag name cannot be empty") - # If running under pychecker, duplicate keys are likely to be - # defined. Disable check for duplicate keys when pycheck'ing. - if (fl.has_key(name) and not flag.allow_override and - not fl[name].allow_override and not _RUNNING_PYCHECKER): - raise DuplicateFlagError(name, self) - short_name = flag.short_name - if short_name is not None: - if (fl.has_key(short_name) and not flag.allow_override and - not fl[short_name].allow_override and not _RUNNING_PYCHECKER): - raise DuplicateFlagError(short_name, self) - fl[short_name] = flag - fl[name] = flag - global _exported_flags - _exported_flags[name] = flag - - def __getitem__(self, name): - """Retrieves the Flag object for the flag --name.""" - return self.FlagDict()[name] - - def __getattr__(self, name): - """Retrieves the 'value' attribute of the flag --name.""" - fl = self.FlagDict() - if not fl.has_key(name): - raise AttributeError(name) - return fl[name].value - - def __setattr__(self, name, value): - """Sets the 'value' attribute of the flag --name.""" - fl = self.FlagDict() - fl[name].value = value - self._AssertValidators(fl[name].validators) - return value - - def _AssertAllValidators(self): - all_validators = set() - for flag in self.FlagDict().itervalues(): - for validator in flag.validators: - all_validators.add(validator) - self._AssertValidators(all_validators) - - def _AssertValidators(self, validators): - """Assert if all validators in the list are satisfied. - - Asserts validators in the order they were created. - Args: - validators: Iterable(gflags_validators.Validator), validators to be - verified - Raises: - AttributeError: if validators work with a non-existing flag. - IllegalFlagValue: if validation fails for at least one validator - """ - for validator in sorted( - validators, key=lambda validator: validator.insertion_index): - try: - validator.Verify(self) - except gflags_validators.Error, e: - message = validator.PrintFlagsWithValues(self) - raise IllegalFlagValue('%s: %s' % (message, str(e))) - - def _FlagIsRegistered(self, flag_obj): - """Checks whether a Flag object is registered under some name. - - Note: this is non trivial: in addition to its normal name, a flag - may have a short name too. In self.FlagDict(), both the normal and - the short name are mapped to the same flag object. E.g., calling - only "del FLAGS.short_name" is not unregistering the corresponding - Flag object (it is still registered under the longer name). - - Args: - flag_obj: A Flag object. - - Returns: - A boolean: True iff flag_obj is registered under some name. - """ - flag_dict = self.FlagDict() - # Check whether flag_obj is registered under its long name. - name = flag_obj.name - if flag_dict.get(name, None) == flag_obj: - return True - # Check whether flag_obj is registered under its short name. - short_name = flag_obj.short_name - if (short_name is not None and - flag_dict.get(short_name, None) == flag_obj): - return True - # The flag cannot be registered under any other name, so we do not - # need to do a full search through the values of self.FlagDict(). - return False - - def __delattr__(self, flag_name): - """Deletes a previously-defined flag from a flag object. - - This method makes sure we can delete a flag by using - - del flag_values_object. - - E.g., - - flags.DEFINE_integer('foo', 1, 'Integer flag.') - del flags.FLAGS.foo - - Args: - flag_name: A string, the name of the flag to be deleted. - - Raises: - AttributeError: When there is no registered flag named flag_name. - """ - fl = self.FlagDict() - if flag_name not in fl: - raise AttributeError(flag_name) - - flag_obj = fl[flag_name] - del fl[flag_name] - - if not self._FlagIsRegistered(flag_obj): - # If the Flag object indicated by flag_name is no longer - # registered (please see the docstring of _FlagIsRegistered), then - # we delete the occurences of the flag object in all our internal - # dictionaries. - self.__RemoveFlagFromDictByModule(self.FlagsByModuleDict(), flag_obj) - self.__RemoveFlagFromDictByModule(self.KeyFlagsByModuleDict(), flag_obj) - - def __RemoveFlagFromDictByModule(self, flags_by_module_dict, flag_obj): - """Removes a flag object from a module -> list of flags dictionary. - - Args: - flags_by_module_dict: A dictionary that maps module names to lists of - flags. - flag_obj: A flag object. - """ - for unused_module, flags_in_module in flags_by_module_dict.iteritems(): - # while (as opposed to if) takes care of multiple occurences of a - # flag in the list for the same module. - while flag_obj in flags_in_module: - flags_in_module.remove(flag_obj) - - def SetDefault(self, name, value): - """Changes the default value of the named flag object.""" - fl = self.FlagDict() - if not fl.has_key(name): - raise AttributeError(name) - fl[name].SetDefault(value) - self._AssertValidators(fl[name].validators) - - def __contains__(self, name): - """Returns True if name is a value (flag) in the dict.""" - return name in self.FlagDict() - - has_key = __contains__ # a synonym for __contains__() - - def __iter__(self): - return self.FlagDict().iterkeys() - - def __call__(self, argv): - """Parses flags from argv; stores parsed flags into this FlagValues object. - - All unparsed arguments are returned. Flags are parsed using the GNU - Program Argument Syntax Conventions, using getopt: - - http://www.gnu.org/software/libc/manual/html_mono/libc.html#Getopt - - Args: - argv: argument list. Can be of any type that may be converted to a list. - - Returns: - The list of arguments not parsed as options, including argv[0] - - Raises: - FlagsError: on any parsing error - """ - # Support any sequence type that can be converted to a list - argv = list(argv) - - shortopts = "" - longopts = [] - - fl = self.FlagDict() - - # This pre parses the argv list for --flagfile=<> options. - argv = argv[:1] + self.ReadFlagsFromFiles(argv[1:], force_gnu=False) - - # Correct the argv to support the google style of passing boolean - # parameters. Boolean parameters may be passed by using --mybool, - # --nomybool, --mybool=(true|false|1|0). getopt does not support - # having options that may or may not have a parameter. We replace - # instances of the short form --mybool and --nomybool with their - # full forms: --mybool=(true|false). - original_argv = list(argv) # list() makes a copy - shortest_matches = None - for name, flag in fl.items(): - if not flag.boolean: - continue - if shortest_matches is None: - # Determine the smallest allowable prefix for all flag names - shortest_matches = self.ShortestUniquePrefixes(fl) - no_name = 'no' + name - prefix = shortest_matches[name] - no_prefix = shortest_matches[no_name] - - # Replace all occurences of this boolean with extended forms - for arg_idx in range(1, len(argv)): - arg = argv[arg_idx] - if arg.find('=') >= 0: continue - if arg.startswith('--'+prefix) and ('--'+name).startswith(arg): - argv[arg_idx] = ('--%s=true' % name) - elif arg.startswith('--'+no_prefix) and ('--'+no_name).startswith(arg): - argv[arg_idx] = ('--%s=false' % name) - - # Loop over all of the flags, building up the lists of short options - # and long options that will be passed to getopt. Short options are - # specified as a string of letters, each letter followed by a colon - # if it takes an argument. Long options are stored in an array of - # strings. Each string ends with an '=' if it takes an argument. - for name, flag in fl.items(): - longopts.append(name + "=") - if len(name) == 1: # one-letter option: allow short flag type also - shortopts += name - if not flag.boolean: - shortopts += ":" - - longopts.append('undefok=') - undefok_flags = [] - - # In case --undefok is specified, loop to pick up unrecognized - # options one by one. - unrecognized_opts = [] - args = argv[1:] - while True: - try: - if self.__dict__['__use_gnu_getopt']: - optlist, unparsed_args = getopt.gnu_getopt(args, shortopts, longopts) - else: - optlist, unparsed_args = getopt.getopt(args, shortopts, longopts) - break - except getopt.GetoptError, e: - if not e.opt or e.opt in fl: - # Not an unrecognized option, reraise the exception as a FlagsError - raise FlagsError(e) - # Remove offender from args and try again - for arg_index in range(len(args)): - if ((args[arg_index] == '--' + e.opt) or - (args[arg_index] == '-' + e.opt) or - (args[arg_index].startswith('--' + e.opt + '='))): - unrecognized_opts.append((e.opt, args[arg_index])) - args = args[0:arg_index] + args[arg_index+1:] - break - else: - # We should have found the option, so we don't expect to get - # here. We could assert, but raising the original exception - # might work better. - raise FlagsError(e) - - for name, arg in optlist: - if name == '--undefok': - flag_names = arg.split(',') - undefok_flags.extend(flag_names) - # For boolean flags, if --undefok=boolflag is specified, then we should - # also accept --noboolflag, in addition to --boolflag. - # Since we don't know the type of the undefok'd flag, this will affect - # non-boolean flags as well. - # NOTE: You shouldn't use --undefok=noboolflag, because then we will - # accept --nonoboolflag here. We are choosing not to do the conversion - # from noboolflag -> boolflag because of the ambiguity that flag names - # can start with 'no'. - undefok_flags.extend('no' + name for name in flag_names) - continue - if name.startswith('--'): - # long option - name = name[2:] - short_option = 0 - else: - # short option - name = name[1:] - short_option = 1 - if fl.has_key(name): - flag = fl[name] - if flag.boolean and short_option: arg = 1 - flag.Parse(arg) - - # If there were unrecognized options, raise an exception unless - # the options were named via --undefok. - for opt, value in unrecognized_opts: - if opt not in undefok_flags: - raise UnrecognizedFlagError(opt, value) - - if unparsed_args: - if self.__dict__['__use_gnu_getopt']: - # if using gnu_getopt just return the program name + remainder of argv. - ret_val = argv[:1] + unparsed_args - else: - # unparsed_args becomes the first non-flag detected by getopt to - # the end of argv. Because argv may have been modified above, - # return original_argv for this region. - ret_val = argv[:1] + original_argv[-len(unparsed_args):] - else: - ret_val = argv[:1] - - self._AssertAllValidators() - return ret_val - - def Reset(self): - """Resets the values to the point before FLAGS(argv) was called.""" - for f in self.FlagDict().values(): - f.Unparse() - - def RegisteredFlags(self): - """Returns: a list of the names and short names of all registered flags.""" - return self.FlagDict().keys() - - def FlagValuesDict(self): - """Returns: a dictionary that maps flag names to flag values.""" - flag_values = {} - - for flag_name in self.RegisteredFlags(): - flag = self.FlagDict()[flag_name] - flag_values[flag_name] = flag.value - - return flag_values - - def __str__(self): - """Generates a help string for all known flags.""" - return self.GetHelp() - - def GetHelp(self, prefix=''): - """Generates a help string for all known flags.""" - helplist = [] - - flags_by_module = self.FlagsByModuleDict() - if flags_by_module: - - modules = flags_by_module.keys() - modules.sort() - - # Print the help for the main module first, if possible. - main_module = _GetMainModule() - if main_module in modules: - modules.remove(main_module) - modules = [main_module] + modules - - for module in modules: - self.__RenderOurModuleFlags(module, helplist) - - self.__RenderModuleFlags('gflags', - _SPECIAL_FLAGS.FlagDict().values(), - helplist) - - else: - # Just print one long list of flags. - self.__RenderFlagList( - self.FlagDict().values() + _SPECIAL_FLAGS.FlagDict().values(), - helplist, prefix) - - return '\n'.join(helplist) - - def __RenderModuleFlags(self, module, flags, output_lines, prefix=""): - """Generates a help string for a given module.""" - if not isinstance(module, str): - module = module.__name__ - output_lines.append('\n%s%s:' % (prefix, module)) - self.__RenderFlagList(flags, output_lines, prefix + " ") - - def __RenderOurModuleFlags(self, module, output_lines, prefix=""): - """Generates a help string for a given module.""" - flags = self._GetFlagsDefinedByModule(module) - if flags: - self.__RenderModuleFlags(module, flags, output_lines, prefix) - - def __RenderOurModuleKeyFlags(self, module, output_lines, prefix=""): - """Generates a help string for the key flags of a given module. - - Args: - module: A module object or a module name (a string). - output_lines: A list of strings. The generated help message - lines will be appended to this list. - prefix: A string that is prepended to each generated help line. - """ - key_flags = self._GetKeyFlagsForModule(module) - if key_flags: - self.__RenderModuleFlags(module, key_flags, output_lines, prefix) - - def ModuleHelp(self, module): - """Describe the key flags of a module. - - Args: - module: A module object or a module name (a string). - - Returns: - string describing the key flags of a module. - """ - helplist = [] - self.__RenderOurModuleKeyFlags(module, helplist) - return '\n'.join(helplist) - - def MainModuleHelp(self): - """Describe the key flags of the main module. - - Returns: - string describing the key flags of a module. - """ - return self.ModuleHelp(_GetMainModule()) - - def __RenderFlagList(self, flaglist, output_lines, prefix=" "): - fl = self.FlagDict() - special_fl = _SPECIAL_FLAGS.FlagDict() - flaglist = [(flag.name, flag) for flag in flaglist] - flaglist.sort() - flagset = {} - for (name, flag) in flaglist: - # It's possible this flag got deleted or overridden since being - # registered in the per-module flaglist. Check now against the - # canonical source of current flag information, the FlagDict. - if fl.get(name, None) != flag and special_fl.get(name, None) != flag: - # a different flag is using this name now - continue - # only print help once - if flagset.has_key(flag): continue - flagset[flag] = 1 - flaghelp = "" - if flag.short_name: flaghelp += "-%s," % flag.short_name - if flag.boolean: - flaghelp += "--[no]%s" % flag.name + ":" - else: - flaghelp += "--%s" % flag.name + ":" - flaghelp += " " - if flag.help: - flaghelp += flag.help - flaghelp = TextWrap(flaghelp, indent=prefix+" ", - firstline_indent=prefix) - if flag.default_as_str: - flaghelp += "\n" - flaghelp += TextWrap("(default: %s)" % flag.default_as_str, - indent=prefix+" ") - if flag.parser.syntactic_help: - flaghelp += "\n" - flaghelp += TextWrap("(%s)" % flag.parser.syntactic_help, - indent=prefix+" ") - output_lines.append(flaghelp) - - def get(self, name, default): - """Returns the value of a flag (if not None) or a default value. - - Args: - name: A string, the name of a flag. - default: Default value to use if the flag value is None. - """ - - value = self.__getattr__(name) - if value is not None: # Can't do if not value, b/c value might be '0' or "" - return value - else: - return default - - def ShortestUniquePrefixes(self, fl): - """Returns: dictionary; maps flag names to their shortest unique prefix.""" - # Sort the list of flag names - sorted_flags = [] - for name, flag in fl.items(): - sorted_flags.append(name) - if flag.boolean: - sorted_flags.append('no%s' % name) - sorted_flags.sort() - - # For each name in the sorted list, determine the shortest unique - # prefix by comparing itself to the next name and to the previous - # name (the latter check uses cached info from the previous loop). - shortest_matches = {} - prev_idx = 0 - for flag_idx in range(len(sorted_flags)): - curr = sorted_flags[flag_idx] - if flag_idx == (len(sorted_flags) - 1): - next = None - else: - next = sorted_flags[flag_idx+1] - next_len = len(next) - for curr_idx in range(len(curr)): - if (next is None - or curr_idx >= next_len - or curr[curr_idx] != next[curr_idx]): - # curr longer than next or no more chars in common - shortest_matches[curr] = curr[:max(prev_idx, curr_idx) + 1] - prev_idx = curr_idx - break - else: - # curr shorter than (or equal to) next - shortest_matches[curr] = curr - prev_idx = curr_idx + 1 # next will need at least one more char - return shortest_matches - - def __IsFlagFileDirective(self, flag_string): - """Checks whether flag_string contain a --flagfile= directive.""" - if isinstance(flag_string, type("")): - if flag_string.startswith('--flagfile='): - return 1 - elif flag_string == '--flagfile': - return 1 - elif flag_string.startswith('-flagfile='): - return 1 - elif flag_string == '-flagfile': - return 1 - else: - return 0 - return 0 - - def ExtractFilename(self, flagfile_str): - """Returns filename from a flagfile_str of form -[-]flagfile=filename. - - The cases of --flagfile foo and -flagfile foo shouldn't be hitting - this function, as they are dealt with in the level above this - function. - """ - if flagfile_str.startswith('--flagfile='): - return os.path.expanduser((flagfile_str[(len('--flagfile=')):]).strip()) - elif flagfile_str.startswith('-flagfile='): - return os.path.expanduser((flagfile_str[(len('-flagfile=')):]).strip()) - else: - raise FlagsError('Hit illegal --flagfile type: %s' % flagfile_str) - - def __GetFlagFileLines(self, filename, parsed_file_list): - """Returns the useful (!=comments, etc) lines from a file with flags. - - Args: - filename: A string, the name of the flag file. - parsed_file_list: A list of the names of the files we have - already read. MUTATED BY THIS FUNCTION. - - Returns: - List of strings. See the note below. - - NOTE(springer): This function checks for a nested --flagfile= - tag and handles the lower file recursively. It returns a list of - all the lines that _could_ contain command flags. This is - EVERYTHING except whitespace lines and comments (lines starting - with '#' or '//'). - """ - line_list = [] # All line from flagfile. - flag_line_list = [] # Subset of lines w/o comments, blanks, flagfile= tags. - try: - file_obj = open(filename, 'r') - except IOError, e_msg: - print e_msg - print 'ERROR:: Unable to open flagfile: %s' % (filename) - return flag_line_list - - line_list = file_obj.readlines() - file_obj.close() - parsed_file_list.append(filename) - - # This is where we check each line in the file we just read. - for line in line_list: - if line.isspace(): - pass - # Checks for comment (a line that starts with '#'). - elif line.startswith('#') or line.startswith('//'): - pass - # Checks for a nested "--flagfile=" flag in the current file. - # If we find one, recursively parse down into that file. - elif self.__IsFlagFileDirective(line): - sub_filename = self.ExtractFilename(line) - # We do a little safety check for reparsing a file we've already done. - if not sub_filename in parsed_file_list: - included_flags = self.__GetFlagFileLines(sub_filename, - parsed_file_list) - flag_line_list.extend(included_flags) - else: # Case of hitting a circularly included file. - print >>sys.stderr, ('Warning: Hit circular flagfile dependency: %s' - % sub_filename) - else: - # Any line that's not a comment or a nested flagfile should get - # copied into 2nd position. This leaves earlier arguements - # further back in the list, thus giving them higher priority. - flag_line_list.append(line.strip()) - return flag_line_list - - def ReadFlagsFromFiles(self, argv, force_gnu=True): - """Processes command line args, but also allow args to be read from file. - Args: - argv: A list of strings, usually sys.argv[1:], which may contain one or - more flagfile directives of the form --flagfile="./filename". - Note that the name of the program (sys.argv[0]) should be omitted. - force_gnu: If False, --flagfile parsing obeys normal flag semantics. - If True, --flagfile parsing instead follows gnu_getopt semantics. - *** WARNING *** force_gnu=False may become the future default! - - Returns: - - A new list which has the original list combined with what we read - from any flagfile(s). - - References: Global gflags.FLAG class instance. - - This function should be called before the normal FLAGS(argv) call. - This function scans the input list for a flag that looks like: - --flagfile=. Then it opens , reads all valid key - and value pairs and inserts them into the input list between the - first item of the list and any subsequent items in the list. - - Note that your application's flags are still defined the usual way - using gflags DEFINE_flag() type functions. - - Notes (assuming we're getting a commandline of some sort as our input): - --> Flags from the command line argv _should_ always take precedence! - --> A further "--flagfile=" CAN be nested in a flagfile. - It will be processed after the parent flag file is done. - --> For duplicate flags, first one we hit should "win". - --> In a flagfile, a line beginning with # or // is a comment. - --> Entirely blank lines _should_ be ignored. - """ - parsed_file_list = [] - rest_of_args = argv - new_argv = [] - while rest_of_args: - current_arg = rest_of_args[0] - rest_of_args = rest_of_args[1:] - if self.__IsFlagFileDirective(current_arg): - # This handles the case of -(-)flagfile foo. In this case the - # next arg really is part of this one. - if current_arg == '--flagfile' or current_arg == '-flagfile': - if not rest_of_args: - raise IllegalFlagValue('--flagfile with no argument') - flag_filename = os.path.expanduser(rest_of_args[0]) - rest_of_args = rest_of_args[1:] - else: - # This handles the case of (-)-flagfile=foo. - flag_filename = self.ExtractFilename(current_arg) - new_argv[0:0] = self.__GetFlagFileLines(flag_filename, parsed_file_list) - else: - new_argv.append(current_arg) - # Stop parsing after '--', like getopt and gnu_getopt. - if current_arg == '--': - break - # Stop parsing after a non-flag, like getopt. - if not current_arg.startswith('-'): - if not force_gnu and not self.__dict__['__use_gnu_getopt']: - break - - if rest_of_args: - new_argv.extend(rest_of_args) - - return new_argv - - def FlagsIntoString(self): - """Returns a string with the flags assignments from this FlagValues object. - - This function ignores flags whose value is None. Each flag - assignment is separated by a newline. - - NOTE: MUST mirror the behavior of the C++ function - CommandlineFlagsIntoString from google3/base/commandlineflags.cc. - """ - s = '' - for flag in self.FlagDict().values(): - if flag.value is not None: - s += flag.Serialize() + '\n' - return s - - def AppendFlagsIntoFile(self, filename): - """Appends all flags assignments from this FlagInfo object to a file. - - Output will be in the format of a flagfile. - - NOTE: MUST mirror the behavior of the C++ version of - AppendFlagsIntoFile from google3/base/commandlineflags.cc. - """ - out_file = open(filename, 'a') - out_file.write(self.FlagsIntoString()) - out_file.close() - - def WriteHelpInXMLFormat(self, outfile=None): - """Outputs flag documentation in XML format. - - NOTE: We use element names that are consistent with those used by - the C++ command-line flag library, from - google3/base/commandlineflags_reporting.cc. We also use a few new - elements (e.g., ), but we do not interfere / overlap with - existing XML elements used by the C++ library. Please maintain this - consistency. - - Args: - outfile: File object we write to. Default None means sys.stdout. - """ - outfile = outfile or sys.stdout - - outfile.write('\n') - outfile.write('\n') - indent = ' ' - _WriteSimpleXMLElement(outfile, 'program', os.path.basename(sys.argv[0]), - indent) - - usage_doc = sys.modules['__main__'].__doc__ - if not usage_doc: - usage_doc = '\nUSAGE: %s [flags]\n' % sys.argv[0] - else: - usage_doc = usage_doc.replace('%s', sys.argv[0]) - _WriteSimpleXMLElement(outfile, 'usage', usage_doc, indent) - - # Get list of key flags for the main module. - key_flags = self._GetKeyFlagsForModule(_GetMainModule()) - - # Sort flags by declaring module name and next by flag name. - flags_by_module = self.FlagsByModuleDict() - all_module_names = list(flags_by_module.keys()) - all_module_names.sort() - for module_name in all_module_names: - flag_list = [(f.name, f) for f in flags_by_module[module_name]] - flag_list.sort() - for unused_flag_name, flag in flag_list: - is_key = flag in key_flags - flag.WriteInfoInXMLFormat(outfile, module_name, - is_key=is_key, indent=indent) - - outfile.write('\n') - outfile.flush() - - def AddValidator(self, validator): - """Register new flags validator to be checked. - - Args: - validator: gflags_validators.Validator - Raises: - AttributeError: if validators work with a non-existing flag. - """ - for flag_name in validator.GetFlagsNames(): - flag = self.FlagDict()[flag_name] - flag.validators.append(validator) - -# end of FlagValues definition - - -# The global FlagValues instance -FLAGS = FlagValues() - - -def _MakeXMLSafe(s): - """Escapes <, >, and & from s, and removes XML 1.0-illegal chars.""" - s = cgi.escape(s) # Escape <, >, and & - # Remove characters that cannot appear in an XML 1.0 document - # (http://www.w3.org/TR/REC-xml/#charsets). - # - # NOTE: if there are problems with current solution, one may move to - # XML 1.1, which allows such chars, if they're entity-escaped (&#xHH;). - s = re.sub(r'[\x00-\x08\x0b\x0c\x0e-\x1f]', '', s) - return s - - -def _WriteSimpleXMLElement(outfile, name, value, indent): - """Writes a simple XML element. - - Args: - outfile: File object we write the XML element to. - name: A string, the name of XML element. - value: A Python object, whose string representation will be used - as the value of the XML element. - indent: A string, prepended to each line of generated output. - """ - value_str = str(value) - if isinstance(value, bool): - # Display boolean values as the C++ flag library does: no caps. - value_str = value_str.lower() - outfile.write('%s<%s>%s\n' % - (indent, name, _MakeXMLSafe(value_str), name)) - - -class Flag: - """Information about a command-line flag. - - 'Flag' objects define the following fields: - .name - the name for this flag - .default - the default value for this flag - .default_as_str - default value as repr'd string, e.g., "'true'" (or None) - .value - the most recent parsed value of this flag; set by Parse() - .help - a help string or None if no help is available - .short_name - the single letter alias for this flag (or None) - .boolean - if 'true', this flag does not accept arguments - .present - true if this flag was parsed from command line flags. - .parser - an ArgumentParser object - .serializer - an ArgumentSerializer object - .allow_override - the flag may be redefined without raising an error - - The only public method of a 'Flag' object is Parse(), but it is - typically only called by a 'FlagValues' object. The Parse() method is - a thin wrapper around the 'ArgumentParser' Parse() method. The parsed - value is saved in .value, and the .present attribute is updated. If - this flag was already present, a FlagsError is raised. - - Parse() is also called during __init__ to parse the default value and - initialize the .value attribute. This enables other python modules to - safely use flags even if the __main__ module neglects to parse the - command line arguments. The .present attribute is cleared after - __init__ parsing. If the default value is set to None, then the - __init__ parsing step is skipped and the .value attribute is - initialized to None. - - Note: The default value is also presented to the user in the help - string, so it is important that it be a legal value for this flag. - """ - - def __init__(self, parser, serializer, name, default, help_string, - short_name=None, boolean=0, allow_override=0): - self.name = name - - if not help_string: - help_string = '(no help available)' - - self.help = help_string - self.short_name = short_name - self.boolean = boolean - self.present = 0 - self.parser = parser - self.serializer = serializer - self.allow_override = allow_override - self.value = None - self.validators = [] - - self.SetDefault(default) - - def __GetParsedValueAsString(self, value): - if value is None: - return None - if self.serializer: - return repr(self.serializer.Serialize(value)) - if self.boolean: - if value: - return repr('true') - else: - return repr('false') - return repr(str(value)) - - def Parse(self, argument): - try: - self.value = self.parser.Parse(argument) - except ValueError, e: # recast ValueError as IllegalFlagValue - raise IllegalFlagValue("flag --%s=%s: %s" % (self.name, argument, e)) - self.present += 1 - - def Unparse(self): - if self.default is None: - self.value = None - else: - self.Parse(self.default) - self.present = 0 - - def Serialize(self): - if self.value is None: - return '' - if self.boolean: - if self.value: - return "--%s" % self.name - else: - return "--no%s" % self.name - else: - if not self.serializer: - raise FlagsError("Serializer not present for flag %s" % self.name) - return "--%s=%s" % (self.name, self.serializer.Serialize(self.value)) - - def SetDefault(self, value): - """Changes the default value (and current value too) for this Flag.""" - # We can't allow a None override because it may end up not being - # passed to C++ code when we're overriding C++ flags. So we - # cowardly bail out until someone fixes the semantics of trying to - # pass None to a C++ flag. See swig_flags.Init() for details on - # this behavior. - # TODO(olexiy): Users can directly call this method, bypassing all flags - # validators (we don't have FlagValues here, so we can not check - # validators). - # The simplest solution I see is to make this method private. - # Another approach would be to store reference to the corresponding - # FlagValues with each flag, but this seems to be an overkill. - if value is None and self.allow_override: - raise DuplicateFlagCannotPropagateNoneToSwig(self.name) - - self.default = value - self.Unparse() - self.default_as_str = self.__GetParsedValueAsString(self.value) - - def Type(self): - """Returns: a string that describes the type of this Flag.""" - # NOTE: we use strings, and not the types.*Type constants because - # our flags can have more exotic types, e.g., 'comma separated list - # of strings', 'whitespace separated list of strings', etc. - return self.parser.Type() - - def WriteInfoInXMLFormat(self, outfile, module_name, is_key=False, indent=''): - """Writes common info about this flag, in XML format. - - This is information that is relevant to all flags (e.g., name, - meaning, etc.). If you defined a flag that has some other pieces of - info, then please override _WriteCustomInfoInXMLFormat. - - Please do NOT override this method. - - Args: - outfile: File object we write to. - module_name: A string, the name of the module that defines this flag. - is_key: A boolean, True iff this flag is key for main module. - indent: A string that is prepended to each generated line. - """ - outfile.write(indent + '\n') - inner_indent = indent + ' ' - if is_key: - _WriteSimpleXMLElement(outfile, 'key', 'yes', inner_indent) - _WriteSimpleXMLElement(outfile, 'file', module_name, inner_indent) - # Print flag features that are relevant for all flags. - _WriteSimpleXMLElement(outfile, 'name', self.name, inner_indent) - if self.short_name: - _WriteSimpleXMLElement(outfile, 'short_name', self.short_name, - inner_indent) - if self.help: - _WriteSimpleXMLElement(outfile, 'meaning', self.help, inner_indent) - # The default flag value can either be represented as a string like on the - # command line, or as a Python object. We serialize this value in the - # latter case in order to remain consistent. - if self.serializer and not isinstance(self.default, str): - default_serialized = self.serializer.Serialize(self.default) - else: - default_serialized = self.default - _WriteSimpleXMLElement(outfile, 'default', default_serialized, inner_indent) - _WriteSimpleXMLElement(outfile, 'current', self.value, inner_indent) - _WriteSimpleXMLElement(outfile, 'type', self.Type(), inner_indent) - # Print extra flag features this flag may have. - self._WriteCustomInfoInXMLFormat(outfile, inner_indent) - outfile.write(indent + '\n') - - def _WriteCustomInfoInXMLFormat(self, outfile, indent): - """Writes extra info about this flag, in XML format. - - "Extra" means "not already printed by WriteInfoInXMLFormat above." - - Args: - outfile: File object we write to. - indent: A string that is prepended to each generated line. - """ - # Usually, the parser knows the extra details about the flag, so - # we just forward the call to it. - self.parser.WriteCustomInfoInXMLFormat(outfile, indent) -# End of Flag definition - - -class _ArgumentParserCache(type): - """Metaclass used to cache and share argument parsers among flags.""" - - _instances = {} - - def __call__(mcs, *args, **kwargs): - """Returns an instance of the argument parser cls. - - This method overrides behavior of the __new__ methods in - all subclasses of ArgumentParser (inclusive). If an instance - for mcs with the same set of arguments exists, this instance is - returned, otherwise a new instance is created. - - If any keyword arguments are defined, or the values in args - are not hashable, this method always returns a new instance of - cls. - - Args: - args: Positional initializer arguments. - kwargs: Initializer keyword arguments. - - Returns: - An instance of cls, shared or new. - """ - if kwargs: - return type.__call__(mcs, *args, **kwargs) - else: - instances = mcs._instances - key = (mcs,) + tuple(args) - try: - return instances[key] - except KeyError: - # No cache entry for key exists, create a new one. - return instances.setdefault(key, type.__call__(mcs, *args)) - except TypeError: - # An object in args cannot be hashed, always return - # a new instance. - return type.__call__(mcs, *args) - - -class ArgumentParser(object): - """Base class used to parse and convert arguments. - - The Parse() method checks to make sure that the string argument is a - legal value and convert it to a native type. If the value cannot be - converted, it should throw a 'ValueError' exception with a human - readable explanation of why the value is illegal. - - Subclasses should also define a syntactic_help string which may be - presented to the user to describe the form of the legal values. - - Argument parser classes must be stateless, since instances are cached - and shared between flags. Initializer arguments are allowed, but all - member variables must be derived from initializer arguments only. - """ - __metaclass__ = _ArgumentParserCache - - syntactic_help = "" - - def Parse(self, argument): - """Default implementation: always returns its argument unmodified.""" - return argument - - def Type(self): - return 'string' - - def WriteCustomInfoInXMLFormat(self, outfile, indent): - pass - - -class ArgumentSerializer: - """Base class for generating string representations of a flag value.""" - - def Serialize(self, value): - return str(value) - - -class ListSerializer(ArgumentSerializer): - - def __init__(self, list_sep): - self.list_sep = list_sep - - def Serialize(self, value): - return self.list_sep.join([str(x) for x in value]) - - -# Flags validators - - -def RegisterValidator(flag_name, - checker, - message='Flag validation failed', - flag_values=FLAGS): - """Adds a constraint, which will be enforced during program execution. - - The constraint is validated when flags are initially parsed, and after each - change of the corresponding flag's value. - Args: - flag_name: string, name of the flag to be checked. - checker: method to validate the flag. - input - value of the corresponding flag (string, boolean, etc. - This value will be passed to checker by the library). See file's - docstring for examples. - output - Boolean. - Must return True if validator constraint is satisfied. - If constraint is not satisfied, it should either return False or - raise gflags_validators.Error(desired_error_message). - message: error text to be shown to the user if checker returns False. - If checker raises gflags_validators.Error, message from the raised - Error will be shown. - flag_values: FlagValues - Raises: - AttributeError: if flag_name is not registered as a valid flag name. - """ - flag_values.AddValidator(gflags_validators.SimpleValidator(flag_name, - checker, - message)) - - -def MarkFlagAsRequired(flag_name, flag_values=FLAGS): - """Ensure that flag is not None during program execution. - - Registers a flag validator, which will follow usual validator - rules. - Args: - flag_name: string, name of the flag - flag_values: FlagValues - Raises: - AttributeError: if flag_name is not registered as a valid flag name. - """ - RegisterValidator(flag_name, - lambda value: value is not None, - message='Flag --%s must be specified.' % flag_name, - flag_values=flag_values) - - -def _RegisterBoundsValidatorIfNeeded(parser, name, flag_values): - """Enforce lower and upper bounds for numeric flags. - - Args: - parser: NumericParser (either FloatParser or IntegerParser). Provides lower - and upper bounds, and help text to display. - name: string, name of the flag - flag_values: FlagValues - """ - if parser.lower_bound is not None or parser.upper_bound is not None: - - def Checker(value): - if value is not None and parser.IsOutsideBounds(value): - message = '%s is not %s' % (value, parser.syntactic_help) - raise gflags_validators.Error(message) - return True - - RegisterValidator(name, - Checker, - flag_values=flag_values) - - -# The DEFINE functions are explained in mode details in the module doc string. - - -def DEFINE(parser, name, default, help, flag_values=FLAGS, serializer=None, - **args): - """Registers a generic Flag object. - - NOTE: in the docstrings of all DEFINE* functions, "registers" is short - for "creates a new flag and registers it". - - Auxiliary function: clients should use the specialized DEFINE_ - function instead. - - Args: - parser: ArgumentParser that is used to parse the flag arguments. - name: A string, the flag name. - default: The default value of the flag. - help: A help string. - flag_values: FlagValues object the flag will be registered with. - serializer: ArgumentSerializer that serializes the flag value. - args: Dictionary with extra keyword args that are passes to the - Flag __init__. - """ - DEFINE_flag(Flag(parser, serializer, name, default, help, **args), - flag_values) - - -def DEFINE_flag(flag, flag_values=FLAGS): - """Registers a 'Flag' object with a 'FlagValues' object. - - By default, the global FLAGS 'FlagValue' object is used. - - Typical users will use one of the more specialized DEFINE_xxx - functions, such as DEFINE_string or DEFINE_integer. But developers - who need to create Flag objects themselves should use this function - to register their flags. - """ - # copying the reference to flag_values prevents pychecker warnings - fv = flag_values - fv[flag.name] = flag - # Tell flag_values who's defining the flag. - if isinstance(flag_values, FlagValues): - # Regarding the above isinstance test: some users pass funny - # values of flag_values (e.g., {}) in order to avoid the flag - # registration (in the past, there used to be a flag_values == - # FLAGS test here) and redefine flags with the same name (e.g., - # debug). To avoid breaking their code, we perform the - # registration only if flag_values is a real FlagValues object. - flag_values._RegisterFlagByModule(_GetCallingModule(), flag) - - -def _InternalDeclareKeyFlags(flag_names, - flag_values=FLAGS, key_flag_values=None): - """Declares a flag as key for the calling module. - - Internal function. User code should call DECLARE_key_flag or - ADOPT_module_key_flags instead. - - Args: - flag_names: A list of strings that are names of already-registered - Flag objects. - flag_values: A FlagValues object that the flags listed in - flag_names have registered with (the value of the flag_values - argument from the DEFINE_* calls that defined those flags). - This should almost never need to be overridden. - key_flag_values: A FlagValues object that (among possibly many - other things) keeps track of the key flags for each module. - Default None means "same as flag_values". This should almost - never need to be overridden. - - Raises: - UnrecognizedFlagError: when we refer to a flag that was not - defined yet. - """ - key_flag_values = key_flag_values or flag_values - - module = _GetCallingModule() - - for flag_name in flag_names: - if flag_name not in flag_values: - raise UnrecognizedFlagError(flag_name) - flag = flag_values.FlagDict()[flag_name] - key_flag_values._RegisterKeyFlagForModule(module, flag) - - -def DECLARE_key_flag(flag_name, flag_values=FLAGS): - """Declares one flag as key to the current module. - - Key flags are flags that are deemed really important for a module. - They are important when listing help messages; e.g., if the - --helpshort command-line flag is used, then only the key flags of the - main module are listed (instead of all flags, as in the case of - --help). - - Sample usage: - - flags.DECLARED_key_flag('flag_1') - - Args: - flag_name: A string, the name of an already declared flag. - (Redeclaring flags as key, including flags implicitly key - because they were declared in this module, is a no-op.) - flag_values: A FlagValues object. This should almost never - need to be overridden. - """ - if flag_name in _SPECIAL_FLAGS: - # Take care of the special flags, e.g., --flagfile, --undefok. - # These flags are defined in _SPECIAL_FLAGS, and are treated - # specially during flag parsing, taking precedence over the - # user-defined flags. - _InternalDeclareKeyFlags([flag_name], - flag_values=_SPECIAL_FLAGS, - key_flag_values=flag_values) - return - _InternalDeclareKeyFlags([flag_name], flag_values=flag_values) - - -def ADOPT_module_key_flags(module, flag_values=FLAGS): - """Declares that all flags key to a module are key to the current module. - - Args: - module: A module object. - flag_values: A FlagValues object. This should almost never need - to be overridden. - - Raises: - FlagsError: When given an argument that is a module name (a - string), instead of a module object. - """ - # NOTE(salcianu): an even better test would be if not - # isinstance(module, types.ModuleType) but I didn't want to import - # types for such a tiny use. - if isinstance(module, str): - raise FlagsError('Received module name %s; expected a module object.' - % module) - _InternalDeclareKeyFlags( - [f.name for f in flag_values._GetKeyFlagsForModule(module.__name__)], - flag_values=flag_values) - # If module is this flag module, take _SPECIAL_FLAGS into account. - if module == _GetThisModuleObjectAndName()[0]: - _InternalDeclareKeyFlags( - # As we associate flags with _GetCallingModule(), the special - # flags defined in this module are incorrectly registered with - # a different module. So, we can't use _GetKeyFlagsForModule. - # Instead, we take all flags from _SPECIAL_FLAGS (a private - # FlagValues, where no other module should register flags). - [f.name for f in _SPECIAL_FLAGS.FlagDict().values()], - flag_values=_SPECIAL_FLAGS, - key_flag_values=flag_values) - - -# -# STRING FLAGS -# - - -def DEFINE_string(name, default, help, flag_values=FLAGS, **args): - """Registers a flag whose value can be any string.""" - parser = ArgumentParser() - serializer = ArgumentSerializer() - DEFINE(parser, name, default, help, flag_values, serializer, **args) - - -# -# BOOLEAN FLAGS -# -# and the special HELP flags. - -class BooleanParser(ArgumentParser): - """Parser of boolean values.""" - - def Convert(self, argument): - """Converts the argument to a boolean; raise ValueError on errors.""" - if type(argument) == str: - if argument.lower() in ['true', 't', '1']: - return True - elif argument.lower() in ['false', 'f', '0']: - return False - - bool_argument = bool(argument) - if argument == bool_argument: - # The argument is a valid boolean (True, False, 0, or 1), and not just - # something that always converts to bool (list, string, int, etc.). - return bool_argument - - raise ValueError('Non-boolean argument to boolean flag', argument) - - def Parse(self, argument): - val = self.Convert(argument) - return val - - def Type(self): - return 'bool' - - -class BooleanFlag(Flag): - """Basic boolean flag. - - Boolean flags do not take any arguments, and their value is either - True (1) or False (0). The false value is specified on the command - line by prepending the word 'no' to either the long or the short flag - name. - - For example, if a Boolean flag was created whose long name was - 'update' and whose short name was 'x', then this flag could be - explicitly unset through either --noupdate or --nox. - """ - - def __init__(self, name, default, help, short_name=None, **args): - p = BooleanParser() - Flag.__init__(self, p, None, name, default, help, short_name, 1, **args) - if not self.help: self.help = "a boolean value" - - -def DEFINE_boolean(name, default, help, flag_values=FLAGS, **args): - """Registers a boolean flag. - - Such a boolean flag does not take an argument. If a user wants to - specify a false value explicitly, the long option beginning with 'no' - must be used: i.e. --noflag - - This flag will have a value of None, True or False. None is possible - if default=None and the user does not specify the flag on the command - line. - """ - DEFINE_flag(BooleanFlag(name, default, help, **args), flag_values) - -# Match C++ API to unconfuse C++ people. -DEFINE_bool = DEFINE_boolean - -class HelpFlag(BooleanFlag): - """ - HelpFlag is a special boolean flag that prints usage information and - raises a SystemExit exception if it is ever found in the command - line arguments. Note this is called with allow_override=1, so other - apps can define their own --help flag, replacing this one, if they want. - """ - def __init__(self): - BooleanFlag.__init__(self, "help", 0, "show this help", - short_name="?", allow_override=1) - def Parse(self, arg): - if arg: - doc = sys.modules["__main__"].__doc__ - flags = str(FLAGS) - print doc or ("\nUSAGE: %s [flags]\n" % sys.argv[0]) - if flags: - print "flags:" - print flags - sys.exit(1) - - -class HelpXMLFlag(BooleanFlag): - """Similar to HelpFlag, but generates output in XML format.""" - - def __init__(self): - BooleanFlag.__init__(self, 'helpxml', False, - 'like --help, but generates XML output', - allow_override=1) - - def Parse(self, arg): - if arg: - FLAGS.WriteHelpInXMLFormat(sys.stdout) - sys.exit(1) - - -class HelpshortFlag(BooleanFlag): - """ - HelpshortFlag is a special boolean flag that prints usage - information for the "main" module, and rasies a SystemExit exception - if it is ever found in the command line arguments. Note this is - called with allow_override=1, so other apps can define their own - --helpshort flag, replacing this one, if they want. - """ - def __init__(self): - BooleanFlag.__init__(self, "helpshort", 0, - "show usage only for this module", allow_override=1) - def Parse(self, arg): - if arg: - doc = sys.modules["__main__"].__doc__ - flags = FLAGS.MainModuleHelp() - print doc or ("\nUSAGE: %s [flags]\n" % sys.argv[0]) - if flags: - print "flags:" - print flags - sys.exit(1) - -# -# Numeric parser - base class for Integer and Float parsers -# - - -class NumericParser(ArgumentParser): - """Parser of numeric values. - - Parsed value may be bounded to a given upper and lower bound. - """ - - def IsOutsideBounds(self, val): - return ((self.lower_bound is not None and val < self.lower_bound) or - (self.upper_bound is not None and val > self.upper_bound)) - - def Parse(self, argument): - val = self.Convert(argument) - if self.IsOutsideBounds(val): - raise ValueError("%s is not %s" % (val, self.syntactic_help)) - return val - - def WriteCustomInfoInXMLFormat(self, outfile, indent): - if self.lower_bound is not None: - _WriteSimpleXMLElement(outfile, 'lower_bound', self.lower_bound, indent) - if self.upper_bound is not None: - _WriteSimpleXMLElement(outfile, 'upper_bound', self.upper_bound, indent) - - def Convert(self, argument): - """Default implementation: always returns its argument unmodified.""" - return argument - -# End of Numeric Parser - -# -# FLOAT FLAGS -# - -class FloatParser(NumericParser): - """Parser of floating point values. - - Parsed value may be bounded to a given upper and lower bound. - """ - number_article = "a" - number_name = "number" - syntactic_help = " ".join((number_article, number_name)) - - def __init__(self, lower_bound=None, upper_bound=None): - super(FloatParser, self).__init__() - self.lower_bound = lower_bound - self.upper_bound = upper_bound - sh = self.syntactic_help - if lower_bound is not None and upper_bound is not None: - sh = ("%s in the range [%s, %s]" % (sh, lower_bound, upper_bound)) - elif lower_bound == 0: - sh = "a non-negative %s" % self.number_name - elif upper_bound == 0: - sh = "a non-positive %s" % self.number_name - elif upper_bound is not None: - sh = "%s <= %s" % (self.number_name, upper_bound) - elif lower_bound is not None: - sh = "%s >= %s" % (self.number_name, lower_bound) - self.syntactic_help = sh - - def Convert(self, argument): - """Converts argument to a float; raises ValueError on errors.""" - return float(argument) - - def Type(self): - return 'float' -# End of FloatParser - - -def DEFINE_float(name, default, help, lower_bound=None, upper_bound=None, - flag_values=FLAGS, **args): - """Registers a flag whose value must be a float. - - If lower_bound or upper_bound are set, then this flag must be - within the given range. - """ - parser = FloatParser(lower_bound, upper_bound) - serializer = ArgumentSerializer() - DEFINE(parser, name, default, help, flag_values, serializer, **args) - _RegisterBoundsValidatorIfNeeded(parser, name, flag_values=flag_values) - -# -# INTEGER FLAGS -# - - -class IntegerParser(NumericParser): - """Parser of an integer value. - - Parsed value may be bounded to a given upper and lower bound. - """ - number_article = "an" - number_name = "integer" - syntactic_help = " ".join((number_article, number_name)) - - def __init__(self, lower_bound=None, upper_bound=None): - super(IntegerParser, self).__init__() - self.lower_bound = lower_bound - self.upper_bound = upper_bound - sh = self.syntactic_help - if lower_bound is not None and upper_bound is not None: - sh = ("%s in the range [%s, %s]" % (sh, lower_bound, upper_bound)) - elif lower_bound == 1: - sh = "a positive %s" % self.number_name - elif upper_bound == -1: - sh = "a negative %s" % self.number_name - elif lower_bound == 0: - sh = "a non-negative %s" % self.number_name - elif upper_bound == 0: - sh = "a non-positive %s" % self.number_name - elif upper_bound is not None: - sh = "%s <= %s" % (self.number_name, upper_bound) - elif lower_bound is not None: - sh = "%s >= %s" % (self.number_name, lower_bound) - self.syntactic_help = sh - - def Convert(self, argument): - __pychecker__ = 'no-returnvalues' - if type(argument) == str: - base = 10 - if len(argument) > 2 and argument[0] == "0" and argument[1] == "x": - base = 16 - try: - return int(argument, base) - # ValueError is thrown when argument is a string, and overflows an int. - except ValueError: - return long(argument, base) - else: - try: - return int(argument) - # OverflowError is thrown when argument is numeric, and overflows an int. - except OverflowError: - return long(argument) - - def Type(self): - return 'int' - - -def DEFINE_integer(name, default, help, lower_bound=None, upper_bound=None, - flag_values=FLAGS, **args): - """Registers a flag whose value must be an integer. - - If lower_bound, or upper_bound are set, then this flag must be - within the given range. - """ - parser = IntegerParser(lower_bound, upper_bound) - serializer = ArgumentSerializer() - DEFINE(parser, name, default, help, flag_values, serializer, **args) - _RegisterBoundsValidatorIfNeeded(parser, name, flag_values=flag_values) - - -# -# ENUM FLAGS -# - - -class EnumParser(ArgumentParser): - """Parser of a string enum value (a string value from a given set). - - If enum_values (see below) is not specified, any string is allowed. - """ - - def __init__(self, enum_values=None): - super(EnumParser, self).__init__() - self.enum_values = enum_values - - def Parse(self, argument): - if self.enum_values and argument not in self.enum_values: - raise ValueError("value should be one of <%s>" % - "|".join(self.enum_values)) - return argument - - def Type(self): - return 'string enum' - - -class EnumFlag(Flag): - """Basic enum flag; its value can be any string from list of enum_values.""" - - def __init__(self, name, default, help, enum_values=None, - short_name=None, **args): - enum_values = enum_values or [] - p = EnumParser(enum_values) - g = ArgumentSerializer() - Flag.__init__(self, p, g, name, default, help, short_name, **args) - if not self.help: self.help = "an enum string" - self.help = "<%s>: %s" % ("|".join(enum_values), self.help) - - def _WriteCustomInfoInXMLFormat(self, outfile, indent): - for enum_value in self.parser.enum_values: - _WriteSimpleXMLElement(outfile, 'enum_value', enum_value, indent) - - -def DEFINE_enum(name, default, enum_values, help, flag_values=FLAGS, - **args): - """Registers a flag whose value can be any string from enum_values.""" - DEFINE_flag(EnumFlag(name, default, help, enum_values, ** args), - flag_values) - - -# -# LIST FLAGS -# - - -class BaseListParser(ArgumentParser): - """Base class for a parser of lists of strings. - - To extend, inherit from this class; from the subclass __init__, call - - BaseListParser.__init__(self, token, name) - - where token is a character used to tokenize, and name is a description - of the separator. - """ - - def __init__(self, token=None, name=None): - assert name - super(BaseListParser, self).__init__() - self._token = token - self._name = name - self.syntactic_help = "a %s separated list" % self._name - - def Parse(self, argument): - if isinstance(argument, list): - return argument - elif argument == '': - return [] - else: - return [s.strip() for s in argument.split(self._token)] - - def Type(self): - return '%s separated list of strings' % self._name - - -class ListParser(BaseListParser): - """Parser for a comma-separated list of strings.""" - - def __init__(self): - BaseListParser.__init__(self, ',', 'comma') - - def WriteCustomInfoInXMLFormat(self, outfile, indent): - BaseListParser.WriteCustomInfoInXMLFormat(self, outfile, indent) - _WriteSimpleXMLElement(outfile, 'list_separator', repr(','), indent) - - -class WhitespaceSeparatedListParser(BaseListParser): - """Parser for a whitespace-separated list of strings.""" - - def __init__(self): - BaseListParser.__init__(self, None, 'whitespace') - - def WriteCustomInfoInXMLFormat(self, outfile, indent): - BaseListParser.WriteCustomInfoInXMLFormat(self, outfile, indent) - separators = list(string.whitespace) - separators.sort() - for ws_char in string.whitespace: - _WriteSimpleXMLElement(outfile, 'list_separator', repr(ws_char), indent) - - -def DEFINE_list(name, default, help, flag_values=FLAGS, **args): - """Registers a flag whose value is a comma-separated list of strings.""" - parser = ListParser() - serializer = ListSerializer(',') - DEFINE(parser, name, default, help, flag_values, serializer, **args) - - -def DEFINE_spaceseplist(name, default, help, flag_values=FLAGS, **args): - """Registers a flag whose value is a whitespace-separated list of strings. - - Any whitespace can be used as a separator. - """ - parser = WhitespaceSeparatedListParser() - serializer = ListSerializer(' ') - DEFINE(parser, name, default, help, flag_values, serializer, **args) - - -# -# MULTI FLAGS -# - - -class MultiFlag(Flag): - """A flag that can appear multiple time on the command-line. - - The value of such a flag is a list that contains the individual values - from all the appearances of that flag on the command-line. - - See the __doc__ for Flag for most behavior of this class. Only - differences in behavior are described here: - - * The default value may be either a single value or a list of values. - A single value is interpreted as the [value] singleton list. - - * The value of the flag is always a list, even if the option was - only supplied once, and even if the default value is a single - value - """ - - def __init__(self, *args, **kwargs): - Flag.__init__(self, *args, **kwargs) - self.help += ';\n repeat this option to specify a list of values' - - def Parse(self, arguments): - """Parses one or more arguments with the installed parser. - - Args: - arguments: a single argument or a list of arguments (typically a - list of default values); a single argument is converted - internally into a list containing one item. - """ - if not isinstance(arguments, list): - # Default value may be a list of values. Most other arguments - # will not be, so convert them into a single-item list to make - # processing simpler below. - arguments = [arguments] - - if self.present: - # keep a backup reference to list of previously supplied option values - values = self.value - else: - # "erase" the defaults with an empty list - values = [] - - for item in arguments: - # have Flag superclass parse argument, overwriting self.value reference - Flag.Parse(self, item) # also increments self.present - values.append(self.value) - - # put list of option values back in the 'value' attribute - self.value = values - - def Serialize(self): - if not self.serializer: - raise FlagsError("Serializer not present for flag %s" % self.name) - if self.value is None: - return '' - - s = '' - - multi_value = self.value - - for self.value in multi_value: - if s: s += ' ' - s += Flag.Serialize(self) - - self.value = multi_value - - return s - - def Type(self): - return 'multi ' + self.parser.Type() - - -def DEFINE_multi(parser, serializer, name, default, help, flag_values=FLAGS, - **args): - """Registers a generic MultiFlag that parses its args with a given parser. - - Auxiliary function. Normal users should NOT use it directly. - - Developers who need to create their own 'Parser' classes for options - which can appear multiple times can call this module function to - register their flags. - """ - DEFINE_flag(MultiFlag(parser, serializer, name, default, help, **args), - flag_values) - - -def DEFINE_multistring(name, default, help, flag_values=FLAGS, **args): - """Registers a flag whose value can be a list of any strings. - - Use the flag on the command line multiple times to place multiple - string values into the list. The 'default' may be a single string - (which will be converted into a single-element list) or a list of - strings. - """ - parser = ArgumentParser() - serializer = ArgumentSerializer() - DEFINE_multi(parser, serializer, name, default, help, flag_values, **args) - - -def DEFINE_multi_int(name, default, help, lower_bound=None, upper_bound=None, - flag_values=FLAGS, **args): - """Registers a flag whose value can be a list of arbitrary integers. - - Use the flag on the command line multiple times to place multiple - integer values into the list. The 'default' may be a single integer - (which will be converted into a single-element list) or a list of - integers. - """ - parser = IntegerParser(lower_bound, upper_bound) - serializer = ArgumentSerializer() - DEFINE_multi(parser, serializer, name, default, help, flag_values, **args) - - -# Now register the flags that we want to exist in all applications. -# These are all defined with allow_override=1, so user-apps can use -# these flagnames for their own purposes, if they want. -DEFINE_flag(HelpFlag()) -DEFINE_flag(HelpshortFlag()) -DEFINE_flag(HelpXMLFlag()) - -# Define special flags here so that help may be generated for them. -# NOTE: Please do NOT use _SPECIAL_FLAGS from outside this module. -_SPECIAL_FLAGS = FlagValues() - - -DEFINE_string( - 'flagfile', "", - "Insert flag definitions from the given file into the command line.", - _SPECIAL_FLAGS) - -DEFINE_string( - 'undefok', "", - "comma-separated list of flag names that it is okay to specify " - "on the command line even if the program does not define a flag " - "with that name. IMPORTANT: flags in this list that have " - "arguments MUST use the --flag=value format.", _SPECIAL_FLAGS) diff --git a/gflags_validators.py b/gflags_validators.py deleted file mode 100755 index 92d0e23..0000000 --- a/gflags_validators.py +++ /dev/null @@ -1,187 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2010, Google Inc. -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Module to enforce different constraints on flags. - -A validator represents an invariant, enforced over a one or more flags. -See 'FLAGS VALIDATORS' in flags.py's docstring for a usage manual. -""" - -__author__ = 'olexiy@google.com (Olexiy Oryeshko)' - - -class Error(Exception): - """Thrown If validator constraint is not satisfied.""" - - -class Validator(object): - """Base class for flags validators. - - Users should NOT overload these classes, and use flags.Register... - methods instead. - """ - - # Used to assign each validator an unique insertion_index - validators_count = 0 - - def __init__(self, checker, message): - """Constructor to create all validators. - - Args: - checker: function to verify the constraint. - Input of this method varies, see SimpleValidator and - DictionaryValidator for a detailed description. - message: string, error message to be shown to the user - """ - self.checker = checker - self.message = message - Validator.validators_count += 1 - # Used to assert validators in the order they were registered (CL/18694236) - self.insertion_index = Validator.validators_count - - def Verify(self, flag_values): - """Verify that constraint is satisfied. - - flags library calls this method to verify Validator's constraint. - Args: - flag_values: flags.FlagValues, containing all flags - Raises: - Error: if constraint is not satisfied. - """ - param = self._GetInputToCheckerFunction(flag_values) - if not self.checker(param): - raise Error(self.message) - - def GetFlagsNames(self): - """Return the names of the flags checked by this validator. - - Returns: - [string], names of the flags - """ - raise NotImplementedError('This method should be overloaded') - - def PrintFlagsWithValues(self, flag_values): - raise NotImplementedError('This method should be overloaded') - - def _GetInputToCheckerFunction(self, flag_values): - """Given flag values, construct the input to be given to checker. - - Args: - flag_values: flags.FlagValues, containing all flags. - Returns: - Return type depends on the specific validator. - """ - raise NotImplementedError('This method should be overloaded') - - -class SimpleValidator(Validator): - """Validator behind RegisterValidator() method. - - Validates that a single flag passes its checker function. The checker function - takes the flag value and returns True (if value looks fine) or, if flag value - is not valid, either returns False or raises an Exception.""" - def __init__(self, flag_name, checker, message): - """Constructor. - - Args: - flag_name: string, name of the flag. - checker: function to verify the validator. - input - value of the corresponding flag (string, boolean, etc). - output - Boolean. Must return True if validator constraint is satisfied. - If constraint is not satisfied, it should either return False or - raise Error. - message: string, error message to be shown to the user if validator's - condition is not satisfied - """ - super(SimpleValidator, self).__init__(checker, message) - self.flag_name = flag_name - - def GetFlagsNames(self): - return [self.flag_name] - - def PrintFlagsWithValues(self, flag_values): - return 'flag --%s=%s' % (self.flag_name, flag_values[self.flag_name].value) - - def _GetInputToCheckerFunction(self, flag_values): - """Given flag values, construct the input to be given to checker. - - Args: - flag_values: flags.FlagValues - Returns: - value of the corresponding flag. - """ - return flag_values[self.flag_name].value - - -class DictionaryValidator(Validator): - """Validator behind RegisterDictionaryValidator method. - - Validates that flag values pass their common checker function. The checker - function takes flag values and returns True (if values look fine) or, - if values are not valid, either returns False or raises an Exception. - """ - def __init__(self, flag_names, checker, message): - """Constructor. - - Args: - flag_names: [string], containing names of the flags used by checker. - checker: function to verify the validator. - input - dictionary, with keys() being flag_names, and value for each - key being the value of the corresponding flag (string, boolean, etc). - output - Boolean. Must return True if validator constraint is satisfied. - If constraint is not satisfied, it should either return False or - raise Error. - message: string, error message to be shown to the user if validator's - condition is not satisfied - """ - super(DictionaryValidator, self).__init__(checker, message) - self.flag_names = flag_names - - def _GetInputToCheckerFunction(self, flag_values): - """Given flag values, construct the input to be given to checker. - - Args: - flag_values: flags.FlagValues - Returns: - dictionary, with keys() being self.lag_names, and value for each key - being the value of the corresponding flag (string, boolean, etc). - """ - return dict([key, flag_values[key].value] for key in self.flag_names) - - def PrintFlagsWithValues(self, flag_values): - prefix = 'flags ' - flags_with_values = [] - for key in self.flag_names: - flags_with_values.append('%s=%s' % (key, flag_values[key].value)) - return prefix + ', '.join(flags_with_values) - - def GetFlagsNames(self): - return self.flag_names diff --git a/httplib2/__init__.py b/httplib2/__init__.py deleted file mode 100644 index bbf6f79..0000000 --- a/httplib2/__init__.py +++ /dev/null @@ -1,1513 +0,0 @@ -from __future__ import generators -""" -httplib2 - -A caching http interface that supports ETags and gzip -to conserve bandwidth. - -Requires Python 2.3 or later - -Changelog: -2007-08-18, Rick: Modified so it's able to use a socks proxy if needed. - -""" - -__author__ = "Joe Gregorio (joe@bitworking.org)" -__copyright__ = "Copyright 2006, Joe Gregorio" -__contributors__ = ["Thomas Broyer (t.broyer@ltgt.net)", - "James Antill", - "Xavier Verges Farrero", - "Jonathan Feinberg", - "Blair Zajac", - "Sam Ruby", - "Louis Nyffenegger"] -__license__ = "MIT" -__version__ = "0.7.0" - -import re -import sys -import email -import email.Utils -import email.Message -import email.FeedParser -import StringIO -import gzip -import zlib -import httplib -import urlparse -import base64 -import os -import copy -import calendar -import time -import random -import errno -# remove depracated warning in python2.6 -try: - from hashlib import sha1 as _sha, md5 as _md5 -except ImportError: - import sha - import md5 - _sha = sha.new - _md5 = md5.new -import hmac -from gettext import gettext as _ -import socket - -try: - from httplib2 import socks -except ImportError: - socks = None - -# Build the appropriate socket wrapper for ssl -try: - import ssl # python 2.6 - ssl_SSLError = ssl.SSLError - def _ssl_wrap_socket(sock, key_file, cert_file, - disable_validation, ca_certs): - if disable_validation: - cert_reqs = ssl.CERT_NONE - else: - cert_reqs = ssl.CERT_REQUIRED - # We should be specifying SSL version 3 or TLS v1, but the ssl module - # doesn't expose the necessary knobs. So we need to go with the default - # of SSLv23. - return ssl.wrap_socket(sock, keyfile=key_file, certfile=cert_file, - cert_reqs=cert_reqs, ca_certs=ca_certs) -except (AttributeError, ImportError): - ssl_SSLError = None - def _ssl_wrap_socket(sock, key_file, cert_file, - disable_validation, ca_certs): - if not disable_validation: - raise CertificateValidationUnsupported( - "SSL certificate validation is not supported without " - "the ssl module installed. To avoid this error, install " - "the ssl module, or explicity disable validation.") - ssl_sock = socket.ssl(sock, key_file, cert_file) - return httplib.FakeSocket(sock, ssl_sock) - - -if sys.version_info >= (2,3): - from iri2uri import iri2uri -else: - def iri2uri(uri): - return uri - -def has_timeout(timeout): # python 2.6 - if hasattr(socket, '_GLOBAL_DEFAULT_TIMEOUT'): - return (timeout is not None and timeout is not socket._GLOBAL_DEFAULT_TIMEOUT) - return (timeout is not None) - -__all__ = ['Http', 'Response', 'ProxyInfo', 'HttpLib2Error', - 'RedirectMissingLocation', 'RedirectLimit', 'FailedToDecompressContent', - 'UnimplementedDigestAuthOptionError', 'UnimplementedHmacDigestAuthOptionError', - 'debuglevel', 'ProxiesUnavailableError'] - - -# The httplib debug level, set to a non-zero value to get debug output -debuglevel = 0 - - -# Python 2.3 support -if sys.version_info < (2,4): - def sorted(seq): - seq.sort() - return seq - -# Python 2.3 support -def HTTPResponse__getheaders(self): - """Return list of (header, value) tuples.""" - if self.msg is None: - raise httplib.ResponseNotReady() - return self.msg.items() - -if not hasattr(httplib.HTTPResponse, 'getheaders'): - httplib.HTTPResponse.getheaders = HTTPResponse__getheaders - -# All exceptions raised here derive from HttpLib2Error -class HttpLib2Error(Exception): pass - -# Some exceptions can be caught and optionally -# be turned back into responses. -class HttpLib2ErrorWithResponse(HttpLib2Error): - def __init__(self, desc, response, content): - self.response = response - self.content = content - HttpLib2Error.__init__(self, desc) - -class RedirectMissingLocation(HttpLib2ErrorWithResponse): pass -class RedirectLimit(HttpLib2ErrorWithResponse): pass -class FailedToDecompressContent(HttpLib2ErrorWithResponse): pass -class UnimplementedDigestAuthOptionError(HttpLib2ErrorWithResponse): pass -class UnimplementedHmacDigestAuthOptionError(HttpLib2ErrorWithResponse): pass - -class MalformedHeader(HttpLib2Error): pass -class RelativeURIError(HttpLib2Error): pass -class ServerNotFoundError(HttpLib2Error): pass -class ProxiesUnavailableError(HttpLib2Error): pass -class CertificateValidationUnsupported(HttpLib2Error): pass -class SSLHandshakeError(HttpLib2Error): pass -class NotSupportedOnThisPlatform(HttpLib2Error): pass -class CertificateHostnameMismatch(SSLHandshakeError): - def __init__(self, desc, host, cert): - HttpLib2Error.__init__(self, desc) - self.host = host - self.cert = cert - -# Open Items: -# ----------- -# Proxy support - -# Are we removing the cached content too soon on PUT (only delete on 200 Maybe?) - -# Pluggable cache storage (supports storing the cache in -# flat files by default. We need a plug-in architecture -# that can support Berkeley DB and Squid) - -# == Known Issues == -# Does not handle a resource that uses conneg and Last-Modified but no ETag as a cache validator. -# Does not handle Cache-Control: max-stale -# Does not use Age: headers when calculating cache freshness. - - -# The number of redirections to follow before giving up. -# Note that only GET redirects are automatically followed. -# Will also honor 301 requests by saving that info and never -# requesting that URI again. -DEFAULT_MAX_REDIRECTS = 5 - -# Default CA certificates file bundled with httplib2. -CA_CERTS = os.path.join( - os.path.dirname(os.path.abspath(__file__ )), "cacerts.txt") - -# Which headers are hop-by-hop headers by default -HOP_BY_HOP = ['connection', 'keep-alive', 'proxy-authenticate', 'proxy-authorization', 'te', 'trailers', 'transfer-encoding', 'upgrade'] - -def _get_end2end_headers(response): - hopbyhop = list(HOP_BY_HOP) - hopbyhop.extend([x.strip() for x in response.get('connection', '').split(',')]) - return [header for header in response.keys() if header not in hopbyhop] - -URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?") - -def parse_uri(uri): - """Parses a URI using the regex given in Appendix B of RFC 3986. - - (scheme, authority, path, query, fragment) = parse_uri(uri) - """ - groups = URI.match(uri).groups() - return (groups[1], groups[3], groups[4], groups[6], groups[8]) - -def urlnorm(uri): - (scheme, authority, path, query, fragment) = parse_uri(uri) - if not scheme or not authority: - raise RelativeURIError("Only absolute URIs are allowed. uri = %s" % uri) - authority = authority.lower() - scheme = scheme.lower() - if not path: - path = "/" - # Could do syntax based normalization of the URI before - # computing the digest. See Section 6.2.2 of Std 66. - request_uri = query and "?".join([path, query]) or path - scheme = scheme.lower() - defrag_uri = scheme + "://" + authority + request_uri - return scheme, authority, request_uri, defrag_uri - - -# Cache filename construction (original borrowed from Venus http://intertwingly.net/code/venus/) -re_url_scheme = re.compile(r'^\w+://') -re_slash = re.compile(r'[?/:|]+') - -def safename(filename): - """Return a filename suitable for the cache. - - Strips dangerous and common characters to create a filename we - can use to store the cache in. - """ - - try: - if re_url_scheme.match(filename): - if isinstance(filename,str): - filename = filename.decode('utf-8') - filename = filename.encode('idna') - else: - filename = filename.encode('idna') - except UnicodeError: - pass - if isinstance(filename,unicode): - filename=filename.encode('utf-8') - filemd5 = _md5(filename).hexdigest() - filename = re_url_scheme.sub("", filename) - filename = re_slash.sub(",", filename) - - # limit length of filename - if len(filename)>200: - filename=filename[:200] - return ",".join((filename, filemd5)) - -NORMALIZE_SPACE = re.compile(r'(?:\r\n)?[ \t]+') -def _normalize_headers(headers): - return dict([ (key.lower(), NORMALIZE_SPACE.sub(value, ' ').strip()) for (key, value) in headers.iteritems()]) - -def _parse_cache_control(headers): - retval = {} - if headers.has_key('cache-control'): - parts = headers['cache-control'].split(',') - parts_with_args = [tuple([x.strip().lower() for x in part.split("=", 1)]) for part in parts if -1 != part.find("=")] - parts_wo_args = [(name.strip().lower(), 1) for name in parts if -1 == name.find("=")] - retval = dict(parts_with_args + parts_wo_args) - return retval - -# Whether to use a strict mode to parse WWW-Authenticate headers -# Might lead to bad results in case of ill-formed header value, -# so disabled by default, falling back to relaxed parsing. -# Set to true to turn on, usefull for testing servers. -USE_WWW_AUTH_STRICT_PARSING = 0 - -# In regex below: -# [^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+ matches a "token" as defined by HTTP -# "(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?" matches a "quoted-string" as defined by HTTP, when LWS have already been replaced by a single space -# Actually, as an auth-param value can be either a token or a quoted-string, they are combined in a single pattern which matches both: -# \"?((?<=\")(?:[^\0-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?@,;:\\\"/[\]?={} \t]+(?!\"))\"? -WWW_AUTH_STRICT = re.compile(r"^(?:\s*(?:,\s*)?([^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+)\s*=\s*\"?((?<=\")(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?@,;:\\\"/[\]?={} \t]+(?!\"))\"?)(.*)$") -WWW_AUTH_RELAXED = re.compile(r"^(?:\s*(?:,\s*)?([^ \t\r\n=]+)\s*=\s*\"?((?<=\")(?:[^\\\"]|\\.)*?(?=\")|(? current_age: - retval = "FRESH" - return retval - -def _decompressContent(response, new_content): - content = new_content - try: - encoding = response.get('content-encoding', None) - if encoding in ['gzip', 'deflate']: - if encoding == 'gzip': - content = gzip.GzipFile(fileobj=StringIO.StringIO(new_content)).read() - if encoding == 'deflate': - content = zlib.decompress(content) - response['content-length'] = str(len(content)) - # Record the historical presence of the encoding in a way the won't interfere. - response['-content-encoding'] = response['content-encoding'] - del response['content-encoding'] - except IOError: - content = "" - raise FailedToDecompressContent(_("Content purported to be compressed with %s but failed to decompress.") % response.get('content-encoding'), response, content) - return content - -def _updateCache(request_headers, response_headers, content, cache, cachekey): - if cachekey: - cc = _parse_cache_control(request_headers) - cc_response = _parse_cache_control(response_headers) - if cc.has_key('no-store') or cc_response.has_key('no-store'): - cache.delete(cachekey) - else: - info = email.Message.Message() - for key, value in response_headers.iteritems(): - if key not in ['status','content-encoding','transfer-encoding']: - info[key] = value - - # Add annotations to the cache to indicate what headers - # are variant for this request. - vary = response_headers.get('vary', None) - if vary: - vary_headers = vary.lower().replace(' ', '').split(',') - for header in vary_headers: - key = '-varied-%s' % header - try: - info[key] = request_headers[header] - except KeyError: - pass - - status = response_headers.status - if status == 304: - status = 200 - - status_header = 'status: %d\r\n' % status - - header_str = info.as_string() - - header_str = re.sub("\r(?!\n)|(? 0: - service = "cl" - # No point in guessing Base or Spreadsheet - #elif request_uri.find("spreadsheets") > 0: - # service = "wise" - - auth = dict(Email=credentials[0], Passwd=credentials[1], service=service, source=headers['user-agent']) - resp, content = self.http.request("https://www.google.com/accounts/ClientLogin", method="POST", body=urlencode(auth), headers={'Content-Type': 'application/x-www-form-urlencoded'}) - lines = content.split('\n') - d = dict([tuple(line.split("=", 1)) for line in lines if line]) - if resp.status == 403: - self.Auth = "" - else: - self.Auth = d['Auth'] - - def request(self, method, request_uri, headers, content): - """Modify the request headers to add the appropriate - Authorization header.""" - headers['authorization'] = 'GoogleLogin Auth=' + self.Auth - - -AUTH_SCHEME_CLASSES = { - "basic": BasicAuthentication, - "wsse": WsseAuthentication, - "digest": DigestAuthentication, - "hmacdigest": HmacDigestAuthentication, - "googlelogin": GoogleLoginAuthentication -} - -AUTH_SCHEME_ORDER = ["hmacdigest", "googlelogin", "digest", "wsse", "basic"] - -class FileCache(object): - """Uses a local directory as a store for cached files. - Not really safe to use if multiple threads or processes are going to - be running on the same cache. - """ - def __init__(self, cache, safe=safename): # use safe=lambda x: md5.new(x).hexdigest() for the old behavior - self.cache = cache - self.safe = safe - if not os.path.exists(cache): - os.makedirs(self.cache) - - def get(self, key): - retval = None - cacheFullPath = os.path.join(self.cache, self.safe(key)) - try: - f = file(cacheFullPath, "rb") - retval = f.read() - f.close() - except IOError: - pass - return retval - - def set(self, key, value): - cacheFullPath = os.path.join(self.cache, self.safe(key)) - f = file(cacheFullPath, "wb") - f.write(value) - f.close() - - def delete(self, key): - cacheFullPath = os.path.join(self.cache, self.safe(key)) - if os.path.exists(cacheFullPath): - os.remove(cacheFullPath) - -class Credentials(object): - def __init__(self): - self.credentials = [] - - def add(self, name, password, domain=""): - self.credentials.append((domain.lower(), name, password)) - - def clear(self): - self.credentials = [] - - def iter(self, domain): - for (cdomain, name, password) in self.credentials: - if cdomain == "" or domain == cdomain: - yield (name, password) - -class KeyCerts(Credentials): - """Identical to Credentials except that - name/password are mapped to key/cert.""" - pass - - -class ProxyInfo(object): - """Collect information required to use a proxy.""" - def __init__(self, proxy_type, proxy_host, proxy_port, proxy_rdns=None, proxy_user=None, proxy_pass=None): - """The parameter proxy_type must be set to one of socks.PROXY_TYPE_XXX - constants. For example: - -p = ProxyInfo(proxy_type=socks.PROXY_TYPE_HTTP, proxy_host='localhost', proxy_port=8000) - """ - self.proxy_type, self.proxy_host, self.proxy_port, self.proxy_rdns, self.proxy_user, self.proxy_pass = proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass - - def astuple(self): - return (self.proxy_type, self.proxy_host, self.proxy_port, self.proxy_rdns, - self.proxy_user, self.proxy_pass) - - def isgood(self): - return (self.proxy_host != None) and (self.proxy_port != None) - - -class HTTPConnectionWithTimeout(httplib.HTTPConnection): - """ - HTTPConnection subclass that supports timeouts - - All timeouts are in seconds. If None is passed for timeout then - Python's default timeout for sockets will be used. See for example - the docs of socket.setdefaulttimeout(): - http://docs.python.org/library/socket.html#socket.setdefaulttimeout - """ - - def __init__(self, host, port=None, strict=None, timeout=None, proxy_info=None): - httplib.HTTPConnection.__init__(self, host, port, strict) - self.timeout = timeout - self.proxy_info = proxy_info - - def connect(self): - """Connect to the host and port specified in __init__.""" - # Mostly verbatim from httplib.py. - if self.proxy_info and socks is None: - raise ProxiesUnavailableError( - 'Proxy support missing but proxy use was requested!') - msg = "getaddrinfo returns an empty list" - for res in socket.getaddrinfo(self.host, self.port, 0, - socket.SOCK_STREAM): - af, socktype, proto, canonname, sa = res - try: - if self.proxy_info and self.proxy_info.isgood(): - self.sock = socks.socksocket(af, socktype, proto) - self.sock.setproxy(*self.proxy_info.astuple()) - else: - self.sock = socket.socket(af, socktype, proto) - self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) - # Different from httplib: support timeouts. - if has_timeout(self.timeout): - self.sock.settimeout(self.timeout) - # End of difference from httplib. - if self.debuglevel > 0: - print "connect: (%s, %s)" % (self.host, self.port) - - self.sock.connect(sa) - except socket.error, msg: - if self.debuglevel > 0: - print 'connect fail:', (self.host, self.port) - if self.sock: - self.sock.close() - self.sock = None - continue - break - if not self.sock: - raise socket.error, msg - -class HTTPSConnectionWithTimeout(httplib.HTTPSConnection): - """ - This class allows communication via SSL. - - All timeouts are in seconds. If None is passed for timeout then - Python's default timeout for sockets will be used. See for example - the docs of socket.setdefaulttimeout(): - http://docs.python.org/library/socket.html#socket.setdefaulttimeout - """ - def __init__(self, host, port=None, key_file=None, cert_file=None, - strict=None, timeout=None, proxy_info=None, - ca_certs=None, disable_ssl_certificate_validation=False): - httplib.HTTPSConnection.__init__(self, host, port=port, key_file=key_file, - cert_file=cert_file, strict=strict) - self.timeout = timeout - self.proxy_info = proxy_info - if ca_certs is None: - ca_certs = CA_CERTS - self.ca_certs = ca_certs - self.disable_ssl_certificate_validation = \ - disable_ssl_certificate_validation - - # The following two methods were adapted from https_wrapper.py, released - # with the Google Appengine SDK at - # http://googleappengine.googlecode.com/svn-history/r136/trunk/python/google/appengine/tools/https_wrapper.py - # under the following license: - # - # Copyright 2007 Google Inc. - # - # Licensed under the Apache License, Version 2.0 (the "License"); - # you may not use this file except in compliance with the License. - # You may obtain a copy of the License at - # - # http://www.apache.org/licenses/LICENSE-2.0 - # - # Unless required by applicable law or agreed to in writing, software - # distributed under the License is distributed on an "AS IS" BASIS, - # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - # See the License for the specific language governing permissions and - # limitations under the License. - # - - def _GetValidHostsForCert(self, cert): - """Returns a list of valid host globs for an SSL certificate. - - Args: - cert: A dictionary representing an SSL certificate. - Returns: - list: A list of valid host globs. - """ - if 'subjectAltName' in cert: - return [x[1] for x in cert['subjectAltName'] - if x[0].lower() == 'dns'] - else: - return [x[0][1] for x in cert['subject'] - if x[0][0].lower() == 'commonname'] - - def _ValidateCertificateHostname(self, cert, hostname): - """Validates that a given hostname is valid for an SSL certificate. - - Args: - cert: A dictionary representing an SSL certificate. - hostname: The hostname to test. - Returns: - bool: Whether or not the hostname is valid for this certificate. - """ - hosts = self._GetValidHostsForCert(cert) - for host in hosts: - host_re = host.replace('.', '\.').replace('*', '[^.]*') - if re.search('^%s$' % (host_re,), hostname, re.I): - return True - return False - - def connect(self): - "Connect to a host on a given (SSL) port." - - msg = "getaddrinfo returns an empty list" - for family, socktype, proto, canonname, sockaddr in socket.getaddrinfo( - self.host, self.port, 0, socket.SOCK_STREAM): - try: - if self.proxy_info and self.proxy_info.isgood(): - sock = socks.socksocket(family, socktype, proto) - sock.setproxy(*self.proxy_info.astuple()) - else: - sock = socket.socket(family, socktype, proto) - sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) - - if has_timeout(self.timeout): - sock.settimeout(self.timeout) - sock.connect((self.host, self.port)) - self.sock =_ssl_wrap_socket( - sock, self.key_file, self.cert_file, - self.disable_ssl_certificate_validation, self.ca_certs) - if self.debuglevel > 0: - print "connect: (%s, %s)" % (self.host, self.port) - if not self.disable_ssl_certificate_validation: - cert = self.sock.getpeercert() - hostname = self.host.split(':', 0)[0] - if not self._ValidateCertificateHostname(cert, hostname): - raise CertificateHostnameMismatch( - 'Server presented certificate that does not match ' - 'host %s: %s' % (hostname, cert), hostname, cert) - except ssl_SSLError, e: - if sock: - sock.close() - if self.sock: - self.sock.close() - self.sock = None - # Unfortunately the ssl module doesn't seem to provide any way - # to get at more detailed error information, in particular - # whether the error is due to certificate validation or - # something else (such as SSL protocol mismatch). - if e.errno == ssl.SSL_ERROR_SSL: - raise SSLHandshakeError(e) - else: - raise - except (socket.timeout, socket.gaierror): - raise - except socket.error, msg: - if self.debuglevel > 0: - print 'connect fail:', (self.host, self.port) - if self.sock: - self.sock.close() - self.sock = None - continue - break - if not self.sock: - raise socket.error, msg - -SCHEME_TO_CONNECTION = { - 'http': HTTPConnectionWithTimeout, - 'https': HTTPSConnectionWithTimeout - } - -# Use a different connection object for Google App Engine -try: - from google.appengine.api.urlfetch import fetch - from google.appengine.api.urlfetch import InvalidURLError - from google.appengine.api.urlfetch import DownloadError - from google.appengine.api.urlfetch import ResponseTooLargeError - from google.appengine.api.urlfetch import SSLCertificateError - - - class ResponseDict(dict): - """Is a dictionary that also has a read() method, so - that it can pass itself off as an httlib.HTTPResponse().""" - def read(self): - pass - - - class AppEngineHttpConnection(object): - """Emulates an httplib.HTTPConnection object, but actually uses the Google - App Engine urlfetch library. This allows the timeout to be properly used on - Google App Engine, and avoids using httplib, which on Google App Engine is - just another wrapper around urlfetch. - """ - def __init__(self, host, port=None, key_file=None, cert_file=None, - strict=None, timeout=None, proxy_info=None, ca_certs=None, - disable_certificate_validation=False): - self.host = host - self.port = port - self.timeout = timeout - if key_file or cert_file or proxy_info or ca_certs: - raise NotSupportedOnThisPlatform() - self.response = None - self.scheme = 'http' - self.validate_certificate = not disable_certificate_validation - self.sock = True - - def request(self, method, url, body, headers): - # Calculate the absolute URI, which fetch requires - netloc = self.host - if self.port: - netloc = '%s:%s' % (self.host, self.port) - absolute_uri = '%s://%s%s' % (self.scheme, netloc, url) - try: - response = fetch(absolute_uri, payload=body, method=method, - headers=headers, allow_truncated=False, follow_redirects=False, - deadline=self.timeout, - validate_certificate=self.validate_certificate) - self.response = ResponseDict(response.headers) - self.response['status'] = str(response.status_code) - self.response.status = response.status_code - setattr(self.response, 'read', lambda : response.content) - - # Make sure the exceptions raised match the exceptions expected. - except InvalidURLError: - raise socket.gaierror('') - except (DownloadError, ResponseTooLargeError, SSLCertificateError): - raise httplib.HTTPException() - - def getresponse(self): - return self.response - - def set_debuglevel(self, level): - pass - - def connect(self): - pass - - def close(self): - pass - - - class AppEngineHttpsConnection(AppEngineHttpConnection): - """Same as AppEngineHttpConnection, but for HTTPS URIs.""" - def __init__(self, host, port=None, key_file=None, cert_file=None, - strict=None, timeout=None, proxy_info=None): - AppEngineHttpConnection.__init__(self, host, port, key_file, cert_file, - strict, timeout, proxy_info) - self.scheme = 'https' - - # Update the connection classes to use the Googel App Engine specific ones. - SCHEME_TO_CONNECTION = { - 'http': AppEngineHttpConnection, - 'https': AppEngineHttpsConnection - } - -except ImportError: - pass - - -class Http(object): - """An HTTP client that handles: -- all methods -- caching -- ETags -- compression, -- HTTPS -- Basic -- Digest -- WSSE - -and more. - """ - def __init__(self, cache=None, timeout=None, proxy_info=None, - ca_certs=None, disable_ssl_certificate_validation=False): - """ - The value of proxy_info is a ProxyInfo instance. - - If 'cache' is a string then it is used as a directory name for - a disk cache. Otherwise it must be an object that supports the - same interface as FileCache. - - All timeouts are in seconds. If None is passed for timeout - then Python's default timeout for sockets will be used. See - for example the docs of socket.setdefaulttimeout(): - http://docs.python.org/library/socket.html#socket.setdefaulttimeout - - ca_certs is the path of a file containing root CA certificates for SSL - server certificate validation. By default, a CA cert file bundled with - httplib2 is used. - - If disable_ssl_certificate_validation is true, SSL cert validation will - not be performed. - """ - self.proxy_info = proxy_info - self.ca_certs = ca_certs - self.disable_ssl_certificate_validation = \ - disable_ssl_certificate_validation - - # Map domain name to an httplib connection - self.connections = {} - # The location of the cache, for now a directory - # where cached responses are held. - if cache and isinstance(cache, basestring): - self.cache = FileCache(cache) - else: - self.cache = cache - - # Name/password - self.credentials = Credentials() - - # Key/cert - self.certificates = KeyCerts() - - # authorization objects - self.authorizations = [] - - # If set to False then no redirects are followed, even safe ones. - self.follow_redirects = True - - # Which HTTP methods do we apply optimistic concurrency to, i.e. - # which methods get an "if-match:" etag header added to them. - self.optimistic_concurrency_methods = ["PUT", "PATCH"] - - # If 'follow_redirects' is True, and this is set to True then - # all redirecs are followed, including unsafe ones. - self.follow_all_redirects = False - - self.ignore_etag = False - - self.force_exception_to_status_code = False - - self.timeout = timeout - - def _auth_from_challenge(self, host, request_uri, headers, response, content): - """A generator that creates Authorization objects - that can be applied to requests. - """ - challenges = _parse_www_authenticate(response, 'www-authenticate') - for cred in self.credentials.iter(host): - for scheme in AUTH_SCHEME_ORDER: - if challenges.has_key(scheme): - yield AUTH_SCHEME_CLASSES[scheme](cred, host, request_uri, headers, response, content, self) - - def add_credentials(self, name, password, domain=""): - """Add a name and password that will be used - any time a request requires authentication.""" - self.credentials.add(name, password, domain) - - def add_certificate(self, key, cert, domain): - """Add a key and cert that will be used - any time a request requires authentication.""" - self.certificates.add(key, cert, domain) - - def clear_credentials(self): - """Remove all the names and passwords - that are used for authentication""" - self.credentials.clear() - self.authorizations = [] - - def _conn_request(self, conn, request_uri, method, body, headers): - for i in range(2): - try: - if conn.sock is None: - conn.connect() - conn.request(method, request_uri, body, headers) - except socket.timeout: - raise - except socket.gaierror: - conn.close() - raise ServerNotFoundError("Unable to find the server at %s" % conn.host) - except ssl_SSLError: - conn.close() - raise - except socket.error, e: - err = 0 - if hasattr(e, 'args'): - err = getattr(e, 'args')[0] - else: - err = e.errno - if err == errno.ECONNREFUSED: # Connection refused - raise - except httplib.HTTPException: - # Just because the server closed the connection doesn't apparently mean - # that the server didn't send a response. - if conn.sock is None: - if i == 0: - conn.close() - conn.connect() - continue - else: - conn.close() - raise - if i == 0: - conn.close() - conn.connect() - continue - pass - try: - response = conn.getresponse() - except (socket.error, httplib.HTTPException): - if i == 0: - conn.close() - conn.connect() - continue - else: - raise - else: - content = "" - if method == "HEAD": - response.close() - else: - content = response.read() - response = Response(response) - if method != "HEAD": - content = _decompressContent(response, content) - break - return (response, content) - - - def _request(self, conn, host, absolute_uri, request_uri, method, body, headers, redirections, cachekey): - """Do the actual request using the connection object - and also follow one level of redirects if necessary""" - - auths = [(auth.depth(request_uri), auth) for auth in self.authorizations if auth.inscope(host, request_uri)] - auth = auths and sorted(auths)[0][1] or None - if auth: - auth.request(method, request_uri, headers, body) - - (response, content) = self._conn_request(conn, request_uri, method, body, headers) - - if auth: - if auth.response(response, body): - auth.request(method, request_uri, headers, body) - (response, content) = self._conn_request(conn, request_uri, method, body, headers ) - response._stale_digest = 1 - - if response.status == 401: - for authorization in self._auth_from_challenge(host, request_uri, headers, response, content): - authorization.request(method, request_uri, headers, body) - (response, content) = self._conn_request(conn, request_uri, method, body, headers, ) - if response.status != 401: - self.authorizations.append(authorization) - authorization.response(response, body) - break - - if (self.follow_all_redirects or (method in ["GET", "HEAD"]) or response.status == 303): - if self.follow_redirects and response.status in [300, 301, 302, 303, 307]: - # Pick out the location header and basically start from the beginning - # remembering first to strip the ETag header and decrement our 'depth' - if redirections: - if not response.has_key('location') and response.status != 300: - raise RedirectMissingLocation( _("Redirected but the response is missing a Location: header."), response, content) - # Fix-up relative redirects (which violate an RFC 2616 MUST) - if response.has_key('location'): - location = response['location'] - (scheme, authority, path, query, fragment) = parse_uri(location) - if authority == None: - response['location'] = urlparse.urljoin(absolute_uri, location) - if response.status == 301 and method in ["GET", "HEAD"]: - response['-x-permanent-redirect-url'] = response['location'] - if not response.has_key('content-location'): - response['content-location'] = absolute_uri - _updateCache(headers, response, content, self.cache, cachekey) - if headers.has_key('if-none-match'): - del headers['if-none-match'] - if headers.has_key('if-modified-since'): - del headers['if-modified-since'] - if response.has_key('location'): - location = response['location'] - old_response = copy.deepcopy(response) - if not old_response.has_key('content-location'): - old_response['content-location'] = absolute_uri - redirect_method = method - if response.status in [302, 303]: - redirect_method = "GET" - body = None - (response, content) = self.request(location, redirect_method, body=body, headers = headers, redirections = redirections - 1) - response.previous = old_response - else: - raise RedirectLimit("Redirected more times than rediection_limit allows.", response, content) - elif response.status in [200, 203] and method in ["GET", "HEAD"]: - # Don't cache 206's since we aren't going to handle byte range requests - if not response.has_key('content-location'): - response['content-location'] = absolute_uri - _updateCache(headers, response, content, self.cache, cachekey) - - return (response, content) - - def _normalize_headers(self, headers): - return _normalize_headers(headers) - -# Need to catch and rebrand some exceptions -# Then need to optionally turn all exceptions into status codes -# including all socket.* and httplib.* exceptions. - - - def request(self, uri, method="GET", body=None, headers=None, redirections=DEFAULT_MAX_REDIRECTS, connection_type=None): - """ Performs a single HTTP request. -The 'uri' is the URI of the HTTP resource and can begin -with either 'http' or 'https'. The value of 'uri' must be an absolute URI. - -The 'method' is the HTTP method to perform, such as GET, POST, DELETE, etc. -There is no restriction on the methods allowed. - -The 'body' is the entity body to be sent with the request. It is a string -object. - -Any extra headers that are to be sent with the request should be provided in the -'headers' dictionary. - -The maximum number of redirect to follow before raising an -exception is 'redirections. The default is 5. - -The return value is a tuple of (response, content), the first -being and instance of the 'Response' class, the second being -a string that contains the response entity body. - """ - try: - if headers is None: - headers = {} - else: - headers = self._normalize_headers(headers) - - if not headers.has_key('user-agent'): - headers['user-agent'] = "Python-httplib2/%s (gzip)" % __version__ - - uri = iri2uri(uri) - - (scheme, authority, request_uri, defrag_uri) = urlnorm(uri) - domain_port = authority.split(":")[0:2] - if len(domain_port) == 2 and domain_port[1] == '443' and scheme == 'http': - scheme = 'https' - authority = domain_port[0] - - conn_key = scheme+":"+authority - if conn_key in self.connections: - conn = self.connections[conn_key] - else: - if not connection_type: - connection_type = SCHEME_TO_CONNECTION[scheme] - certs = list(self.certificates.iter(authority)) - if issubclass(connection_type, HTTPSConnectionWithTimeout): - if certs: - conn = self.connections[conn_key] = connection_type( - authority, key_file=certs[0][0], - cert_file=certs[0][1], timeout=self.timeout, - proxy_info=self.proxy_info, - ca_certs=self.ca_certs, - disable_ssl_certificate_validation= - self.disable_ssl_certificate_validation) - else: - conn = self.connections[conn_key] = connection_type( - authority, timeout=self.timeout, - proxy_info=self.proxy_info, - ca_certs=self.ca_certs, - disable_ssl_certificate_validation= - self.disable_ssl_certificate_validation) - else: - conn = self.connections[conn_key] = connection_type( - authority, timeout=self.timeout, - proxy_info=self.proxy_info) - conn.set_debuglevel(debuglevel) - - if 'range' not in headers and 'accept-encoding' not in headers: - headers['accept-encoding'] = 'gzip, deflate' - - info = email.Message.Message() - cached_value = None - if self.cache: - cachekey = defrag_uri - cached_value = self.cache.get(cachekey) - if cached_value: - # info = email.message_from_string(cached_value) - # - # Need to replace the line above with the kludge below - # to fix the non-existent bug not fixed in this - # bug report: http://mail.python.org/pipermail/python-bugs-list/2005-September/030289.html - try: - info, content = cached_value.split('\r\n\r\n', 1) - feedparser = email.FeedParser.FeedParser() - feedparser.feed(info) - info = feedparser.close() - feedparser._parse = None - except IndexError: - self.cache.delete(cachekey) - cachekey = None - cached_value = None - else: - cachekey = None - - if method in self.optimistic_concurrency_methods and self.cache and info.has_key('etag') and not self.ignore_etag and 'if-match' not in headers: - # http://www.w3.org/1999/04/Editing/ - headers['if-match'] = info['etag'] - - if method not in ["GET", "HEAD"] and self.cache and cachekey: - # RFC 2616 Section 13.10 - self.cache.delete(cachekey) - - # Check the vary header in the cache to see if this request - # matches what varies in the cache. - if method in ['GET', 'HEAD'] and 'vary' in info: - vary = info['vary'] - vary_headers = vary.lower().replace(' ', '').split(',') - for header in vary_headers: - key = '-varied-%s' % header - value = info[key] - if headers.get(header, None) != value: - cached_value = None - break - - if cached_value and method in ["GET", "HEAD"] and self.cache and 'range' not in headers: - if info.has_key('-x-permanent-redirect-url'): - # Should cached permanent redirects be counted in our redirection count? For now, yes. - if redirections <= 0: - raise RedirectLimit("Redirected more times than rediection_limit allows.", {}, "") - (response, new_content) = self.request(info['-x-permanent-redirect-url'], "GET", headers = headers, redirections = redirections - 1) - response.previous = Response(info) - response.previous.fromcache = True - else: - # Determine our course of action: - # Is the cached entry fresh or stale? - # Has the client requested a non-cached response? - # - # There seems to be three possible answers: - # 1. [FRESH] Return the cache entry w/o doing a GET - # 2. [STALE] Do the GET (but add in cache validators if available) - # 3. [TRANSPARENT] Do a GET w/o any cache validators (Cache-Control: no-cache) on the request - entry_disposition = _entry_disposition(info, headers) - - if entry_disposition == "FRESH": - if not cached_value: - info['status'] = '504' - content = "" - response = Response(info) - if cached_value: - response.fromcache = True - return (response, content) - - if entry_disposition == "STALE": - if info.has_key('etag') and not self.ignore_etag and not 'if-none-match' in headers: - headers['if-none-match'] = info['etag'] - if info.has_key('last-modified') and not 'last-modified' in headers: - headers['if-modified-since'] = info['last-modified'] - elif entry_disposition == "TRANSPARENT": - pass - - (response, new_content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey) - - if response.status == 304 and method == "GET": - # Rewrite the cache entry with the new end-to-end headers - # Take all headers that are in response - # and overwrite their values in info. - # unless they are hop-by-hop, or are listed in the connection header. - - for key in _get_end2end_headers(response): - info[key] = response[key] - merged_response = Response(info) - if hasattr(response, "_stale_digest"): - merged_response._stale_digest = response._stale_digest - _updateCache(headers, merged_response, content, self.cache, cachekey) - response = merged_response - response.status = 200 - response.fromcache = True - - elif response.status == 200: - content = new_content - else: - self.cache.delete(cachekey) - content = new_content - else: - cc = _parse_cache_control(headers) - if cc.has_key('only-if-cached'): - info['status'] = '504' - response = Response(info) - content = "" - else: - (response, content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey) - except Exception, e: - if self.force_exception_to_status_code: - if isinstance(e, HttpLib2ErrorWithResponse): - response = e.response - content = e.content - response.status = 500 - response.reason = str(e) - elif isinstance(e, socket.timeout): - content = "Request Timeout" - response = Response( { - "content-type": "text/plain", - "status": "408", - "content-length": len(content) - }) - response.reason = "Request Timeout" - else: - content = str(e) - response = Response( { - "content-type": "text/plain", - "status": "400", - "content-length": len(content) - }) - response.reason = "Bad Request" - else: - raise - - - return (response, content) - - - -class Response(dict): - """An object more like email.Message than httplib.HTTPResponse.""" - - """Is this response from our local cache""" - fromcache = False - - """HTTP protocol version used by server. 10 for HTTP/1.0, 11 for HTTP/1.1. """ - version = 11 - - "Status code returned by server. " - status = 200 - - """Reason phrase returned by server.""" - reason = "Ok" - - previous = None - - def __init__(self, info): - # info is either an email.Message or - # an httplib.HTTPResponse object. - if isinstance(info, httplib.HTTPResponse): - for key, value in info.getheaders(): - self[key.lower()] = value - self.status = info.status - self['status'] = str(self.status) - self.reason = info.reason - self.version = info.version - elif isinstance(info, email.Message.Message): - for key, value in info.items(): - self[key] = value - self.status = int(self['status']) - else: - for key, value in info.iteritems(): - self[key] = value - self.status = int(self.get('status', self.status)) - - - def __getattr__(self, name): - if name == 'dict': - return self - else: - raise AttributeError, name diff --git a/httplib2/cacerts.txt b/httplib2/cacerts.txt deleted file mode 100644 index da36ed1..0000000 --- a/httplib2/cacerts.txt +++ /dev/null @@ -1,714 +0,0 @@ -# Certifcate Authority certificates for validating SSL connections. -# -# This file contains PEM format certificates generated from -# http://mxr.mozilla.org/seamonkey/source/security/nss/lib/ckfw/builtins/certdata.txt -# -# ***** BEGIN LICENSE BLOCK ***** -# Version: MPL 1.1/GPL 2.0/LGPL 2.1 -# -# The contents of this file are subject to the Mozilla Public License Version -# 1.1 (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# http://www.mozilla.org/MPL/ -# -# Software distributed under the License is distributed on an "AS IS" basis, -# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License -# for the specific language governing rights and limitations under the -# License. -# -# The Original Code is the Netscape security libraries. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1994-2000 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# -# Alternatively, the contents of this file may be used under the terms of -# either the GNU General Public License Version 2 or later (the "GPL"), or -# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), -# in which case the provisions of the GPL or the LGPL are applicable instead -# of those above. If you wish to allow use of your version of this file only -# under the terms of either the GPL or the LGPL, and not to allow others to -# use your version of this file under the terms of the MPL, indicate your -# decision by deleting the provisions above and replace them with the notice -# and other provisions required by the GPL or the LGPL. If you do not delete -# the provisions above, a recipient may use your version of this file under -# the terms of any one of the MPL, the GPL or the LGPL. -# -# ***** END LICENSE BLOCK ***** - -Verisign/RSA Secure Server CA -============================= - ------BEGIN CERTIFICATE----- -MIICNDCCAaECEAKtZn5ORf5eV288mBle3cAwDQYJKoZIhvcNAQECBQAwXzELMAkG -A1UEBhMCVVMxIDAeBgNVBAoTF1JTQSBEYXRhIFNlY3VyaXR5LCBJbmMuMS4wLAYD -VQQLEyVTZWN1cmUgU2VydmVyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk0 -MTEwOTAwMDAwMFoXDTEwMDEwNzIzNTk1OVowXzELMAkGA1UEBhMCVVMxIDAeBgNV -BAoTF1JTQSBEYXRhIFNlY3VyaXR5LCBJbmMuMS4wLAYDVQQLEyVTZWN1cmUgU2Vy -dmVyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGbMA0GCSqGSIb3DQEBAQUAA4GJ -ADCBhQJ+AJLOesGugz5aqomDV6wlAXYMra6OLDfO6zV4ZFQD5YRAUcm/jwjiioII -0haGN1XpsSECrXZogZoFokvJSyVmIlZsiAeP94FZbYQHZXATcXY+m3dM41CJVphI -uR2nKRoTLkoRWZweFdVJVCxzOmmCsZc5nG1wZ0jl3S3WyB57AgMBAAEwDQYJKoZI -hvcNAQECBQADfgBl3X7hsuyw4jrg7HFGmhkRuNPHoLQDQCYCPgmc4RKz0Vr2N6W3 -YQO2WxZpO8ZECAyIUwxrl0nHPjXcbLm7qt9cuzovk2C2qUtN8iD3zV9/ZHuO3ABc -1/p3yjkWWW8O6tO1g39NTUJWdrTJXwT4OPjr0l91X817/OWOgHz8UA== ------END CERTIFICATE----- - -Thawte Personal Basic CA -======================== - ------BEGIN CERTIFICATE----- -MIIDITCCAoqgAwIBAgIBADANBgkqhkiG9w0BAQQFADCByzELMAkGA1UEBhMCWkEx -FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMRowGAYD -VQQKExFUaGF3dGUgQ29uc3VsdGluZzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBT -ZXJ2aWNlcyBEaXZpc2lvbjEhMB8GA1UEAxMYVGhhd3RlIFBlcnNvbmFsIEJhc2lj -IENBMSgwJgYJKoZIhvcNAQkBFhlwZXJzb25hbC1iYXNpY0B0aGF3dGUuY29tMB4X -DTk2MDEwMTAwMDAwMFoXDTIwMTIzMTIzNTk1OVowgcsxCzAJBgNVBAYTAlpBMRUw -EwYDVQQIEwxXZXN0ZXJuIENhcGUxEjAQBgNVBAcTCUNhcGUgVG93bjEaMBgGA1UE -ChMRVGhhd3RlIENvbnN1bHRpbmcxKDAmBgNVBAsTH0NlcnRpZmljYXRpb24gU2Vy -dmljZXMgRGl2aXNpb24xITAfBgNVBAMTGFRoYXd0ZSBQZXJzb25hbCBCYXNpYyBD -QTEoMCYGCSqGSIb3DQEJARYZcGVyc29uYWwtYmFzaWNAdGhhd3RlLmNvbTCBnzAN -BgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAvLyTU23AUE+CFeZIlDWmWr5vQvoPR+53 -dXLdjUmbllegeNTKP1GzaQuRdhciB5dqxFGTS+CN7zeVoQxN2jSQHReJl+A1OFdK -wPQIcOk8RHtQfmGakOMj04gRRif1CwcOu93RfyAKiLlWCy4cgNrx454p7xS9CkT7 -G1sY0b8jkyECAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQQF -AAOBgQAt4plrsD16iddZopQBHyvdEktTwq1/qqcAXJFAVyVKOKqEcLnZgA+le1z7 -c8a914phXAPjLSeoF+CEhULcXpvGt7Jtu3Sv5D/Lp7ew4F2+eIMllNLbgQ95B21P -9DkVWlIBe94y1k049hJcBlDfBVu9FEuh3ym6O0GN92NWod8isQ== ------END CERTIFICATE----- - -Thawte Personal Premium CA -========================== - ------BEGIN CERTIFICATE----- -MIIDKTCCApKgAwIBAgIBADANBgkqhkiG9w0BAQQFADCBzzELMAkGA1UEBhMCWkEx -FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMRowGAYD -VQQKExFUaGF3dGUgQ29uc3VsdGluZzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBT -ZXJ2aWNlcyBEaXZpc2lvbjEjMCEGA1UEAxMaVGhhd3RlIFBlcnNvbmFsIFByZW1p -dW0gQ0ExKjAoBgkqhkiG9w0BCQEWG3BlcnNvbmFsLXByZW1pdW1AdGhhd3RlLmNv -bTAeFw05NjAxMDEwMDAwMDBaFw0yMDEyMzEyMzU5NTlaMIHPMQswCQYDVQQGEwJa -QTEVMBMGA1UECBMMV2VzdGVybiBDYXBlMRIwEAYDVQQHEwlDYXBlIFRvd24xGjAY -BgNVBAoTEVRoYXd0ZSBDb25zdWx0aW5nMSgwJgYDVQQLEx9DZXJ0aWZpY2F0aW9u -IFNlcnZpY2VzIERpdmlzaW9uMSMwIQYDVQQDExpUaGF3dGUgUGVyc29uYWwgUHJl -bWl1bSBDQTEqMCgGCSqGSIb3DQEJARYbcGVyc29uYWwtcHJlbWl1bUB0aGF3dGUu -Y29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDJZtn4B0TPuYwu8KHvE0Vs -Bd/eJxZRNkERbGw77f4QfRKe5ZtCmv5gMcNmt3M6SK5O0DI3lIi1DbbZ8/JE2dWI -Et12TfIa/G8jHnrx2JhFTgcQ7xZC0EN1bUre4qrJMf8fAHB8Zs8QJQi6+u4A6UYD -ZicRFTuqW/KY3TZCstqIdQIDAQABoxMwETAPBgNVHRMBAf8EBTADAQH/MA0GCSqG -SIb3DQEBBAUAA4GBAGk2ifc0KjNyL2071CKyuG+axTZmDhs8obF1Wub9NdP4qPIH -b4Vnjt4rueIXsDqg8A6iAJrf8xQVbrvIhVqYgPn/vnQdPfP+MCXRNzRn+qVxeTBh -KXLA4CxM+1bkOqhv5TJZUtt1KFBZDPgLGeSs2a+WjS9Q2wfD6h+rM+D1KzGJ ------END CERTIFICATE----- - -Thawte Personal Freemail CA -=========================== - ------BEGIN CERTIFICATE----- -MIIDLTCCApagAwIBAgIBADANBgkqhkiG9w0BAQQFADCB0TELMAkGA1UEBhMCWkEx -FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMRowGAYD -VQQKExFUaGF3dGUgQ29uc3VsdGluZzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBT -ZXJ2aWNlcyBEaXZpc2lvbjEkMCIGA1UEAxMbVGhhd3RlIFBlcnNvbmFsIEZyZWVt -YWlsIENBMSswKQYJKoZIhvcNAQkBFhxwZXJzb25hbC1mcmVlbWFpbEB0aGF3dGUu -Y29tMB4XDTk2MDEwMTAwMDAwMFoXDTIwMTIzMTIzNTk1OVowgdExCzAJBgNVBAYT -AlpBMRUwEwYDVQQIEwxXZXN0ZXJuIENhcGUxEjAQBgNVBAcTCUNhcGUgVG93bjEa -MBgGA1UEChMRVGhhd3RlIENvbnN1bHRpbmcxKDAmBgNVBAsTH0NlcnRpZmljYXRp -b24gU2VydmljZXMgRGl2aXNpb24xJDAiBgNVBAMTG1RoYXd0ZSBQZXJzb25hbCBG -cmVlbWFpbCBDQTErMCkGCSqGSIb3DQEJARYccGVyc29uYWwtZnJlZW1haWxAdGhh -d3RlLmNvbTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA1GnX1LCUZFtx6UfY -DFG26nKRsIRefS0Nj3sS34UldSh0OkIsYyeflXtL734Zhx2G6qPduc6WZBrCFG5E -rHzmj+hND3EfQDimAKOHePb5lIZererAXnbr2RSjXW56fAylS1V/Bhkpf56aJtVq -uzgkCGqYx7Hao5iR/Xnb5VrEHLkCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zAN -BgkqhkiG9w0BAQQFAAOBgQDH7JJ+Tvj1lqVnYiqk8E0RYNBvjWBYYawmu1I1XAjP -MPuoSpaKH2JCI4wXD/S6ZJwXrEcp352YXtJsYHFcoqzceePnbgBHH7UNKOgCneSa -/RP0ptl8sfjcXyMmCZGAc9AUG95DqYMl8uacLxXK/qarigd1iwzdUYRr5PjRznei -gQ== ------END CERTIFICATE----- - -Thawte Server CA -================ - ------BEGIN CERTIFICATE----- -MIIDEzCCAnygAwIBAgIBATANBgkqhkiG9w0BAQQFADCBxDELMAkGA1UEBhMCWkEx -FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD -VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv -biBTZXJ2aWNlcyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEm -MCQGCSqGSIb3DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wHhcNOTYwODAx -MDAwMDAwWhcNMjAxMjMxMjM1OTU5WjCBxDELMAkGA1UEBhMCWkExFTATBgNVBAgT -DFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYDVQQKExRUaGF3 -dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNl -cyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEmMCQGCSqGSIb3 -DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wgZ8wDQYJKoZIhvcNAQEBBQAD -gY0AMIGJAoGBANOkUG7I/1Zr5s9dtuoMaHVHoqrC2oQl/Kj0R1HahbUgdJSGHg91 -yekIYfUGbTBuFRkC6VLAYttNmZ7iagxEOM3+vuNkCXDF/rFrKbYvScg71CcEJRCX -L+eQbcAoQpnXTEPew/UhbVSfXcNY4cDk2VuwuNy0e982OsK1ZiIS1ocNAgMBAAGj -EzARMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEEBQADgYEAB/pMaVz7lcxG -7oWDTSEwjsrZqG9JGubaUeNgcGyEYRGhGshIPllDfU+VPaGLtwtimHp1it2ITk6e -QNuozDJ0uW8NxuOzRAvZim+aKZuZGCg70eNAKJpaPNW15yAbi8qkq43pUdniTCxZ -qdq5snUb9kLy78fyGPmJvKP/iiMucEc= ------END CERTIFICATE----- - -Thawte Premium Server CA -======================== - ------BEGIN CERTIFICATE----- -MIIDJzCCApCgAwIBAgIBATANBgkqhkiG9w0BAQQFADCBzjELMAkGA1UEBhMCWkEx -FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD -VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv -biBTZXJ2aWNlcyBEaXZpc2lvbjEhMB8GA1UEAxMYVGhhd3RlIFByZW1pdW0gU2Vy -dmVyIENBMSgwJgYJKoZIhvcNAQkBFhlwcmVtaXVtLXNlcnZlckB0aGF3dGUuY29t -MB4XDTk2MDgwMTAwMDAwMFoXDTIwMTIzMTIzNTk1OVowgc4xCzAJBgNVBAYTAlpB -MRUwEwYDVQQIEwxXZXN0ZXJuIENhcGUxEjAQBgNVBAcTCUNhcGUgVG93bjEdMBsG -A1UEChMUVGhhd3RlIENvbnN1bHRpbmcgY2MxKDAmBgNVBAsTH0NlcnRpZmljYXRp -b24gU2VydmljZXMgRGl2aXNpb24xITAfBgNVBAMTGFRoYXd0ZSBQcmVtaXVtIFNl -cnZlciBDQTEoMCYGCSqGSIb3DQEJARYZcHJlbWl1bS1zZXJ2ZXJAdGhhd3RlLmNv -bTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA0jY2aovXwlue2oFBYo847kkE -VdbQ7xwblRZH7xhINTpS9CtqBo87L+pW46+GjZ4X9560ZXUCTe/LCaIhUdib0GfQ -ug2SBhRz1JPLlyoAnFxODLz6FVL88kRu2hFKbgifLy3j+ao6hnO2RlNYyIkFvYMR -uHM/qgeN9EJN50CdHDcCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG -9w0BAQQFAAOBgQAmSCwWwlj66BZ0DKqqX1Q/8tfJeGBeXm43YyJ3Nn6yF8Q0ufUI -hfzJATj/Tb7yFkJD57taRvvBxhEf8UqwKEbJw8RCfbz6q1lu1bdRiBHjpIUZa4JM -pAwSremkrj/xw0llmozFyD4lt5SZu5IycQfwhl7tUCemDaYj+bvLpgcUQg== ------END CERTIFICATE----- - -Equifax Secure CA -================= - ------BEGIN CERTIFICATE----- -MIIDIDCCAomgAwIBAgIENd70zzANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJV -UzEQMA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2Vy -dGlmaWNhdGUgQXV0aG9yaXR5MB4XDTk4MDgyMjE2NDE1MVoXDTE4MDgyMjE2NDE1 -MVowTjELMAkGA1UEBhMCVVMxEDAOBgNVBAoTB0VxdWlmYXgxLTArBgNVBAsTJEVx -dWlmYXggU2VjdXJlIENlcnRpZmljYXRlIEF1dGhvcml0eTCBnzANBgkqhkiG9w0B -AQEFAAOBjQAwgYkCgYEAwV2xWGcIYu6gmi0fCG2RFGiYCh7+2gRvE4RiIcPRfM6f -BeC4AfBONOziipUEZKzxa1NfBbPLZ4C/QgKO/t0BCezhABRP/PvwDN1Dulsr4R+A -cJkVV5MW8Q+XarfCaCMczE1ZMKxRHjuvK9buY0V7xdlfUNLjUA86iOe/FP3gx7kC -AwEAAaOCAQkwggEFMHAGA1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEQ -MA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2VydGlm -aWNhdGUgQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMBoGA1UdEAQTMBGBDzIwMTgw -ODIyMTY0MTUxWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUSOZo+SvSspXXR9gj -IBBPM5iQn9QwHQYDVR0OBBYEFEjmaPkr0rKV10fYIyAQTzOYkJ/UMAwGA1UdEwQF -MAMBAf8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUA -A4GBAFjOKer89961zgK5F7WF0bnj4JXMJTENAKaSbn+2kmOeUJXRmm/kEd5jhW6Y -7qj/WsjTVbJmcVfewCHrPSqnI0kBBIZCe/zuf6IWUrVnZ9NA2zsmWLIodz2uFHdh -1voqZiegDfqnc1zqcPGUIWVEX/r87yloqaKHee9570+sB3c4 ------END CERTIFICATE----- - -Verisign Class 1 Public Primary Certification Authority -======================================================= - ------BEGIN CERTIFICATE----- -MIICPTCCAaYCEQDNun9W8N/kvFT+IqyzcqpVMA0GCSqGSIb3DQEBAgUAMF8xCzAJ -BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE3MDUGA1UECxMuQ2xh -c3MgMSBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw05 -NjAxMjkwMDAwMDBaFw0yODA4MDEyMzU5NTlaMF8xCzAJBgNVBAYTAlVTMRcwFQYD -VQQKEw5WZXJpU2lnbiwgSW5jLjE3MDUGA1UECxMuQ2xhc3MgMSBQdWJsaWMgUHJp -bWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCBnzANBgkqhkiG9w0BAQEFAAOB -jQAwgYkCgYEA5Rm/baNWYS2ZSHH2Z965jeu3noaACpEO+jglr0aIguVzqKCbJF0N -H8xlbgyw0FaEGIeaBpsQoXPftFg5a27B9hXVqKg/qhIGjTGsf7A01480Z4gJzRQR -4k5FVmkfeAKA2txHkSm7NsljXMXg1y2He6G3MrB7MLoqLzGq7qNn2tsCAwEAATAN -BgkqhkiG9w0BAQIFAAOBgQBMP7iLxmjf7kMzDl3ppssHhE16M/+SG/Q2rdiVIjZo -EWx8QszznC7EBz8UsA9P/5CSdvnivErpj82ggAr3xSnxgiJduLHdgSOjeyUVRjB5 -FvjqBUuUfx3CHMjjt/QQQDwTw18fU+hI5Ia0e6E1sHslurjTjqs/OJ0ANACY89Fx -lA== ------END CERTIFICATE----- - -Verisign Class 2 Public Primary Certification Authority -======================================================= - ------BEGIN CERTIFICATE----- -MIICPDCCAaUCEC0b/EoXjaOR6+f/9YtFvgswDQYJKoZIhvcNAQECBQAwXzELMAkG -A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFz -cyAyIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2 -MDEyOTAwMDAwMFoXDTI4MDgwMTIzNTk1OVowXzELMAkGA1UEBhMCVVMxFzAVBgNV -BAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAyIFB1YmxpYyBQcmlt -YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUAA4GN -ADCBiQKBgQC2WoujDWojg4BrzzmH9CETMwZMJaLtVRKXxaeAufqDwSCg+i8VDXyh -YGt+eSz6Bg86rvYbb7HS/y8oUl+DfUvEerf4Zh+AVPy3wo5ZShRXRtGak75BkQO7 -FYCTXOvnzAhsPz6zSvz/S2wj1VCCJkQZjiPDceoZJEcEnnW/yKYAHwIDAQABMA0G -CSqGSIb3DQEBAgUAA4GBAIobK/o5wXTXXtgZZKJYSi034DNHD6zt96rbHuSLBlxg -J8pFUs4W7z8GZOeUaHxgMxURaa+dYo2jA1Rrpr7l7gUYYAS/QoD90KioHgE796Nc -r6Pc5iaAIzy4RHT3Cq5Ji2F4zCS/iIqnDupzGUH9TQPwiNHleI2lKk/2lw0Xd8rY ------END CERTIFICATE----- - -Verisign Class 3 Public Primary Certification Authority -======================================================= - ------BEGIN CERTIFICATE----- -MIICPDCCAaUCEHC65B0Q2Sk0tjjKewPMur8wDQYJKoZIhvcNAQECBQAwXzELMAkG -A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFz -cyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2 -MDEyOTAwMDAwMFoXDTI4MDgwMTIzNTk1OVowXzELMAkGA1UEBhMCVVMxFzAVBgNV -BAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAzIFB1YmxpYyBQcmlt -YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUAA4GN -ADCBiQKBgQDJXFme8huKARS0EN8EQNvjV69qRUCPhAwL0TPZ2RHP7gJYHyX3KqhE -BarsAx94f56TuZoAqiN91qyFomNFx3InzPRMxnVx0jnvT0Lwdd8KkMaOIG+YD/is -I19wKTakyYbnsZogy1Olhec9vn2a/iRFM9x2Fe0PonFkTGUugWhFpwIDAQABMA0G -CSqGSIb3DQEBAgUAA4GBALtMEivPLCYATxQT3ab7/AoRhIzzKBxnki98tsX63/Do -lbwdj2wsqFHMc9ikwFPwTtYmwHYBV4GSXiHx0bH/59AhWM1pF+NEHJwZRDmJXNyc -AA9WjQKZ7aKQRUzkuxCkPfAyAw7xzvjoyVGM5mKf5p/AfbdynMk2OmufTqj/ZA1k ------END CERTIFICATE----- - -Verisign Class 1 Public Primary Certification Authority - G2 -============================================================ - ------BEGIN CERTIFICATE----- -MIIDAjCCAmsCEEzH6qqYPnHTkxD4PTqJkZIwDQYJKoZIhvcNAQEFBQAwgcExCzAJ -BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xh -c3MgMSBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcy -MTowOAYDVQQLEzEoYykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3Jp -emVkIHVzZSBvbmx5MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMB4X -DTk4MDUxODAwMDAwMFoXDTI4MDgwMTIzNTk1OVowgcExCzAJBgNVBAYTAlVTMRcw -FQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xhc3MgMSBQdWJsaWMg -UHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMTowOAYDVQQLEzEo -YykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5 -MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMIGfMA0GCSqGSIb3DQEB -AQUAA4GNADCBiQKBgQCq0Lq+Fi24g9TK0g+8djHKlNgdk4xWArzZbxpvUjZudVYK -VdPfQ4chEWWKfo+9Id5rMj8bhDSVBZ1BNeuS65bdqlk/AVNtmU/t5eIqWpDBucSm -Fc/IReumXY6cPvBkJHalzasab7bYe1FhbqZ/h8jit+U03EGI6glAvnOSPWvndQID -AQABMA0GCSqGSIb3DQEBBQUAA4GBAKlPww3HZ74sy9mozS11534Vnjty637rXC0J -h9ZrbWB85a7FkCMMXErQr7Fd88e2CtvgFZMN3QO8x3aKtd1Pw5sTdbgBwObJW2ul -uIncrKTdcu1OofdPvAbT6shkdHvClUGcZXNY8ZCaPGqxmMnEh7zPRW1F4m4iP/68 -DzFc6PLZ ------END CERTIFICATE----- - -Verisign Class 2 Public Primary Certification Authority - G2 -============================================================ - ------BEGIN CERTIFICATE----- -MIIDAzCCAmwCEQC5L2DMiJ+hekYJuFtwbIqvMA0GCSqGSIb3DQEBBQUAMIHBMQsw -CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xPDA6BgNVBAsTM0Ns -YXNzIDIgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBH -MjE6MDgGA1UECxMxKGMpIDE5OTggVmVyaVNpZ24sIEluYy4gLSBGb3IgYXV0aG9y -aXplZCB1c2Ugb25seTEfMB0GA1UECxMWVmVyaVNpZ24gVHJ1c3QgTmV0d29yazAe -Fw05ODA1MTgwMDAwMDBaFw0yODA4MDEyMzU5NTlaMIHBMQswCQYDVQQGEwJVUzEX -MBUGA1UEChMOVmVyaVNpZ24sIEluYy4xPDA6BgNVBAsTM0NsYXNzIDIgUHVibGlj -IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBHMjE6MDgGA1UECxMx -KGMpIDE5OTggVmVyaVNpZ24sIEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25s -eTEfMB0GA1UECxMWVmVyaVNpZ24gVHJ1c3QgTmV0d29yazCBnzANBgkqhkiG9w0B -AQEFAAOBjQAwgYkCgYEAp4gBIXQs5xoD8JjhlzwPIQjxnNuX6Zr8wgQGE75fUsjM -HiwSViy4AWkszJkfrbCWrnkE8hM5wXuYuggs6MKEEyyqaekJ9MepAqRCwiNPStjw -DqL7MWzJ5m+ZJwf15vRMeJ5t60aG+rmGyVTyssSv1EYcWskVMP8NbPUtDm3Of3cC -AwEAATANBgkqhkiG9w0BAQUFAAOBgQByLvl/0fFx+8Se9sVeUYpAmLho+Jscg9ji -nb3/7aHmZuovCfTK1+qlK5X2JGCGTUQug6XELaDTrnhpb3LabK4I8GOSN+a7xDAX -rXfMSTWqz9iP0b63GJZHc2pUIjRkLbYWm1lbtFFZOrMLFPQS32eg9K0yZF6xRnIn -jBJ7xUS0rg== ------END CERTIFICATE----- - -Verisign Class 3 Public Primary Certification Authority - G2 -============================================================ - ------BEGIN CERTIFICATE----- -MIIDAjCCAmsCEH3Z/gfPqB63EHln+6eJNMYwDQYJKoZIhvcNAQEFBQAwgcExCzAJ -BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xh -c3MgMyBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcy -MTowOAYDVQQLEzEoYykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3Jp -emVkIHVzZSBvbmx5MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMB4X -DTk4MDUxODAwMDAwMFoXDTI4MDgwMTIzNTk1OVowgcExCzAJBgNVBAYTAlVTMRcw -FQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xhc3MgMyBQdWJsaWMg -UHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMTowOAYDVQQLEzEo -YykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5 -MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMIGfMA0GCSqGSIb3DQEB -AQUAA4GNADCBiQKBgQDMXtERXVxp0KvTuWpMmR9ZmDCOFoUgRm1HP9SFIIThbbP4 -pO0M8RcPO/mn+SXXwc+EY/J8Y8+iR/LGWzOOZEAEaMGAuWQcRXfH2G71lSk8UOg0 -13gfqLptQ5GVj0VXXn7F+8qkBOvqlzdUMG+7AUcyM83cV5tkaWH4mx0ciU9cZwID -AQABMA0GCSqGSIb3DQEBBQUAA4GBAFFNzb5cy5gZnBWyATl4Lk0PZ3BwmcYQWpSk -U01UbSuvDV1Ai2TT1+7eVmGSX6bEHRBhNtMsJzzoKQm5EWR0zLVznxxIqbxhAe7i -F6YM40AIOw7n60RzKprxaZLvcRTDOaxxp5EJb+RxBrO6WVcmeQD2+A2iMzAo1KpY -oJ2daZH9 ------END CERTIFICATE----- - -Verisign Class 4 Public Primary Certification Authority - G2 -============================================================ - ------BEGIN CERTIFICATE----- -MIIDAjCCAmsCEDKIjprS9esTR/h/xCA3JfgwDQYJKoZIhvcNAQEFBQAwgcExCzAJ -BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xh -c3MgNCBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcy -MTowOAYDVQQLEzEoYykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3Jp -emVkIHVzZSBvbmx5MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMB4X -DTk4MDUxODAwMDAwMFoXDTI4MDgwMTIzNTk1OVowgcExCzAJBgNVBAYTAlVTMRcw -FQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xhc3MgNCBQdWJsaWMg -UHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMTowOAYDVQQLEzEo -YykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5 -MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMIGfMA0GCSqGSIb3DQEB -AQUAA4GNADCBiQKBgQC68OTP+cSuhVS5B1f5j8V/aBH4xBewRNzjMHPVKmIquNDM -HO0oW369atyzkSTKQWI8/AIBvxwWMZQFl3Zuoq29YRdsTjCG8FE3KlDHqGKB3FtK -qsGgtG7rL+VXxbErQHDbWk2hjh+9Ax/YA9SPTJlxvOKCzFjomDqG04Y48wApHwID -AQABMA0GCSqGSIb3DQEBBQUAA4GBAIWMEsGnuVAVess+rLhDityq3RS6iYF+ATwj -cSGIL4LcY/oCRaxFWdcqWERbt5+BO5JoPeI3JPV7bI92NZYJqFmduc4jq3TWg/0y -cyfYaT5DdPauxYma51N86Xv2S/PBZYPejYqcPIiNOVn8qj8ijaHBZlCBckztImRP -T8qAkbYp ------END CERTIFICATE----- - -Verisign Class 1 Public Primary Certification Authority - G3 -============================================================ - ------BEGIN CERTIFICATE----- -MIIEGjCCAwICEQCLW3VWhFSFCwDPrzhIzrGkMA0GCSqGSIb3DQEBBQUAMIHKMQsw -CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl -cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu -LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT -aWduIENsYXNzIDEgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp -dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD -VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT -aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ -bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu -IENsYXNzIDEgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg -LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAN2E1Lm0+afY8wR4 -nN493GwTFtl63SRRZsDHJlkNrAYIwpTRMx/wgzUfbhvI3qpuFU5UJ+/EbRrsC+MO -8ESlV8dAWB6jRx9x7GD2bZTIGDnt/kIYVt/kTEkQeE4BdjVjEjbdZrwBBDajVWjV -ojYJrKshJlQGrT/KFOCsyq0GHZXi+J3x4GD/wn91K0zM2v6HmSHquv4+VNfSWXjb -PG7PoBMAGrgnoeS+Z5bKoMWznN3JdZ7rMJpfo83ZrngZPyPpXNspva1VyBtUjGP2 -6KbqxzcSXKMpHgLZ2x87tNcPVkeBFQRKr4Mn0cVYiMHd9qqnoxjaaKptEVHhv2Vr -n5Z20T0CAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAq2aN17O6x5q25lXQBfGfMY1a -qtmqRiYPce2lrVNWYgFHKkTp/j90CxObufRNG7LRX7K20ohcs5/Ny9Sn2WCVhDr4 -wTcdYcrnsMXlkdpUpqwxga6X3s0IrLjAl4B/bnKk52kTlWUfxJM8/XmPBNQ+T+r3 -ns7NZ3xPZQL/kYVUc8f/NveGLezQXk//EZ9yBta4GvFMDSZl4kSAHsef493oCtrs -pSCAaWihT37ha88HQfqDjrw43bAuEbFrskLMmrz5SCJ5ShkPshw+IHTZasO+8ih4 -E1Z5T21Q6huwtVexN2ZYI/PcD98Kh8TvhgXVOBRgmaNL3gaWcSzy27YfpO8/7g== ------END CERTIFICATE----- - -Verisign Class 2 Public Primary Certification Authority - G3 -============================================================ - ------BEGIN CERTIFICATE----- -MIIEGTCCAwECEGFwy0mMX5hFKeewptlQW3owDQYJKoZIhvcNAQEFBQAwgcoxCzAJ -BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjEfMB0GA1UECxMWVmVy -aVNpZ24gVHJ1c3QgTmV0d29yazE6MDgGA1UECxMxKGMpIDE5OTkgVmVyaVNpZ24s -IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTFFMEMGA1UEAxM8VmVyaVNp -Z24gQ2xhc3MgMiBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0 -eSAtIEczMB4XDTk5MTAwMTAwMDAwMFoXDTM2MDcxNjIzNTk1OVowgcoxCzAJBgNV -BAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjEfMB0GA1UECxMWVmVyaVNp -Z24gVHJ1c3QgTmV0d29yazE6MDgGA1UECxMxKGMpIDE5OTkgVmVyaVNpZ24sIElu -Yy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTFFMEMGA1UEAxM8VmVyaVNpZ24g -Q2xhc3MgMiBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAt -IEczMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArwoNwtUs22e5LeWU -J92lvuCwTY+zYVY81nzD9M0+hsuiiOLh2KRpxbXiv8GmR1BeRjmL1Za6tW8UvxDO -JxOeBUebMXoT2B/Z0wI3i60sR/COgQanDTAM6/c8DyAd3HJG7qUCyFvDyVZpTMUY -wZF7C9UTAJu878NIPkZgIIUq1ZC2zYugzDLdt/1AVbJQHFauzI13TccgTacxdu9o -koqQHgiBVrKtaaNS0MscxCM9H5n+TOgWY47GCI72MfbS+uV23bUckqNJzc0BzWjN -qWm6o+sdDZykIKbBoMXRRkwXbdKsZj+WjOCE1Db/IlnF+RFgqF8EffIa9iVCYQ/E -Srg+iQIDAQABMA0GCSqGSIb3DQEBBQUAA4IBAQA0JhU8wI1NQ0kdvekhktdmnLfe -xbjQ5F1fdiLAJvmEOjr5jLX77GDx6M4EsMjdpwOPMPOY36TmpDHf0xwLRtxyID+u -7gU8pDM/CzmscHhzS5kr3zDCVLCoO1Wh/hYozUK9dG6A2ydEp85EXdQbkJgNHkKU -sQAsBNB0owIFImNjzYO1+8FtYmtpdf1dcEG59b98377BMnMiIYtYgXsVkXq642RI -sH/7NiXaldDxJBQX3RiAa0YjOVT1jmIJBB2UkKab5iXiQkWquJCtvgiPqQtCGJTP -cjnhsUPgKM+351psE2tJs//jGHyJizNdrDPXp/naOlXJWBD5qu9ats9LS98q ------END CERTIFICATE----- - -Verisign Class 3 Public Primary Certification Authority - G3 -============================================================ - ------BEGIN CERTIFICATE----- -MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQsw -CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl -cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu -LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT -aWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp -dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD -VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT -aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ -bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu -IENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg -LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMu6nFL8eB8aHm8b -N3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1EUGO+i2t -KmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGu -kxUccLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBm -CC+Vk7+qRy+oRpfwEuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJ -Xwzw3sJ2zq/3avL6QaaiMxTJ5Xpj055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWu -imi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAERSWwauSCPc/L8my/uRan2Te -2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5fj267Cz3qWhMe -DGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC -/Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565p -F4ErWjfJXir0xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGt -TxzhT5yvDwyd93gN2PQ1VoDat20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ== ------END CERTIFICATE----- - -Verisign Class 4 Public Primary Certification Authority - G3 -============================================================ - ------BEGIN CERTIFICATE----- -MIIEGjCCAwICEQDsoKeLbnVqAc/EfMwvlF7XMA0GCSqGSIb3DQEBBQUAMIHKMQsw -CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl -cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu -LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT -aWduIENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp -dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD -VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT -aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ -bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu -IENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg -LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAK3LpRFpxlmr8Y+1 -GQ9Wzsy1HyDkniYlS+BzZYlZ3tCD5PUPtbut8XzoIfzk6AzufEUiGXaStBO3IFsJ -+mGuqPKljYXCKtbeZjbSmwL0qJJgfJxptI8kHtCGUvYynEFYHiK9zUVilQhu0Gbd -U6LM8BDcVHOLBKFGMzNcF0C5nk3T875Vg+ixiY5afJqWIpA7iCXy0lOIAgwLePLm -NxdLMEYH5IBtptiWLugs+BGzOA1mppvqySNb247i8xOOGlktqgLw7KSHZtzBP/XY -ufTsgsbSPZUd5cBPhMnZo0QoBmrXRazwa2rvTl/4EYIeOGM0ZlDUPpNz+jDDZq3/ -ky2X7wMCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAj/ola09b5KROJ1WrIhVZPMq1 -CtRK26vdoV9TxaBXOcLORyu+OshWv8LZJxA6sQU8wHcxuzrTBXttmhwwjIDLk5Mq -g6sFUYICABFna/OIYUdfA5PVWw3g8dShMjWFsjrbsIKr0csKvE+MW8VLADsfKoKm -fjaF3H48ZwC15DtS4KjrXRX5xm3wrR0OhbepmnMUWluPQSjA1egtTaRezarZ7c7c -2NU8Qh0XwRJdRTjDOPP8hS6DRkiy1yBfkjaP53kPmF6Z6PDQpLv1U70qzlmwr25/ -bLvSHgCwIe34QWKCudiyxLtGUPMxxY8BqHTr9Xgn2uf3ZkPznoM+IKrDNWCRzg== ------END CERTIFICATE----- - -Equifax Secure Global eBusiness CA -================================== - ------BEGIN CERTIFICATE----- -MIICkDCCAfmgAwIBAgIBATANBgkqhkiG9w0BAQQFADBaMQswCQYDVQQGEwJVUzEc -MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEtMCsGA1UEAxMkRXF1aWZheCBT -ZWN1cmUgR2xvYmFsIGVCdXNpbmVzcyBDQS0xMB4XDTk5MDYyMTA0MDAwMFoXDTIw -MDYyMTA0MDAwMFowWjELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0VxdWlmYXggU2Vj -dXJlIEluYy4xLTArBgNVBAMTJEVxdWlmYXggU2VjdXJlIEdsb2JhbCBlQnVzaW5l -c3MgQ0EtMTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAuucXkAJlsTRVPEnC -UdXfp9E3j9HngXNBUmCbnaEXJnitx7HoJpQytd4zjTov2/KaelpzmKNc6fuKcxtc -58O/gGzNqfTWK8D3+ZmqY6KxRwIP1ORROhI8bIpaVIRw28HFkM9yRcuoWcDNM50/ -o5brhTMhHD4ePmBudpxnhcXIw2ECAwEAAaNmMGQwEQYJYIZIAYb4QgEBBAQDAgAH -MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUvqigdHJQa0S3ySPY+6j/s1dr -aGwwHQYDVR0OBBYEFL6ooHRyUGtEt8kj2Puo/7NXa2hsMA0GCSqGSIb3DQEBBAUA -A4GBADDiAVGqx+pf2rnQZQ8w1j7aDRRJbpGTJxQx78T3LUX47Me/okENI7SS+RkA -Z70Br83gcfxaz2TE4JaY0KNA4gGK7ycH8WUBikQtBmV1UsCGECAhX2xrD2yuCRyv -8qIYNMR1pHMc8Y3c7635s3a0kr/clRAevsvIO1qEYBlWlKlV ------END CERTIFICATE----- - -Equifax Secure eBusiness CA 1 -============================= - ------BEGIN CERTIFICATE----- -MIICgjCCAeugAwIBAgIBBDANBgkqhkiG9w0BAQQFADBTMQswCQYDVQQGEwJVUzEc -MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEmMCQGA1UEAxMdRXF1aWZheCBT -ZWN1cmUgZUJ1c2luZXNzIENBLTEwHhcNOTkwNjIxMDQwMDAwWhcNMjAwNjIxMDQw -MDAwWjBTMQswCQYDVQQGEwJVUzEcMBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5j -LjEmMCQGA1UEAxMdRXF1aWZheCBTZWN1cmUgZUJ1c2luZXNzIENBLTEwgZ8wDQYJ -KoZIhvcNAQEBBQADgY0AMIGJAoGBAM4vGbwXt3fek6lfWg0XTzQaDJj0ItlZ1MRo -RvC0NcWFAyDGr0WlIVFFQesWWDYyb+JQYmT5/VGcqiTZ9J2DKocKIdMSODRsjQBu -WqDZQu4aIZX5UkxVWsUPOE9G+m34LjXWHXzr4vCwdYDIqROsvojvOm6rXyo4YgKw -Env+j6YDAgMBAAGjZjBkMBEGCWCGSAGG+EIBAQQEAwIABzAPBgNVHRMBAf8EBTAD -AQH/MB8GA1UdIwQYMBaAFEp4MlIR21kWNl7fwRQ2QGpHfEyhMB0GA1UdDgQWBBRK -eDJSEdtZFjZe38EUNkBqR3xMoTANBgkqhkiG9w0BAQQFAAOBgQB1W6ibAxHm6VZM -zfmpTMANmvPMZWnmJXbMWbfWVMMdzZmsGd20hdXgPfxiIKeES1hl8eL5lSE/9dR+ -WB5Hh1Q+WKG1tfgq73HnvMP2sUlG4tega+VWeponmHxGYhTnyfxuAxJ5gDgdSIKN -/Bf+KpYrtWKmpj29f5JZzVoqgrI3eQ== ------END CERTIFICATE----- - -Equifax Secure eBusiness CA 2 -============================= - ------BEGIN CERTIFICATE----- -MIIDIDCCAomgAwIBAgIEN3DPtTANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJV -UzEXMBUGA1UEChMORXF1aWZheCBTZWN1cmUxJjAkBgNVBAsTHUVxdWlmYXggU2Vj -dXJlIGVCdXNpbmVzcyBDQS0yMB4XDTk5MDYyMzEyMTQ0NVoXDTE5MDYyMzEyMTQ0 -NVowTjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDkVxdWlmYXggU2VjdXJlMSYwJAYD -VQQLEx1FcXVpZmF4IFNlY3VyZSBlQnVzaW5lc3MgQ0EtMjCBnzANBgkqhkiG9w0B -AQEFAAOBjQAwgYkCgYEA5Dk5kx5SBhsoNviyoynF7Y6yEb3+6+e0dMKP/wXn2Z0G -vxLIPw7y1tEkshHe0XMJitSxLJgJDR5QRrKDpkWNYmi7hRsgcDKqQM2mll/EcTc/ -BPO3QSQ5BxoeLmFYoBIL5aXfxavqN3HMHMg3OrmXUqesxWoklE6ce8/AatbfIb0C -AwEAAaOCAQkwggEFMHAGA1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEX -MBUGA1UEChMORXF1aWZheCBTZWN1cmUxJjAkBgNVBAsTHUVxdWlmYXggU2VjdXJl -IGVCdXNpbmVzcyBDQS0yMQ0wCwYDVQQDEwRDUkwxMBoGA1UdEAQTMBGBDzIwMTkw -NjIzMTIxNDQ1WjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUUJ4L6q9euSBIplBq -y/3YIHqngnYwHQYDVR0OBBYEFFCeC+qvXrkgSKZQasv92CB6p4J2MAwGA1UdEwQF -MAMBAf8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUA -A4GBAAyGgq3oThr1jokn4jVYPSm0B482UJW/bsGe68SQsoWou7dC4A8HOd/7npCy -0cE+U58DRLB+S/Rv5Hwf5+Kx5Lia78O9zt4LMjTZ3ijtM2vE1Nc9ElirfQkty3D1 -E4qUoSek1nDFbZS1yX2doNLGCEnZZpum0/QL3MUmV+GRMOrN ------END CERTIFICATE----- - -Thawte Time Stamping CA -======================= - ------BEGIN CERTIFICATE----- -MIICoTCCAgqgAwIBAgIBADANBgkqhkiG9w0BAQQFADCBizELMAkGA1UEBhMCWkEx -FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTEUMBIGA1UEBxMLRHVyYmFudmlsbGUxDzAN -BgNVBAoTBlRoYXd0ZTEdMBsGA1UECxMUVGhhd3RlIENlcnRpZmljYXRpb24xHzAd -BgNVBAMTFlRoYXd0ZSBUaW1lc3RhbXBpbmcgQ0EwHhcNOTcwMTAxMDAwMDAwWhcN -MjAxMjMxMjM1OTU5WjCBizELMAkGA1UEBhMCWkExFTATBgNVBAgTDFdlc3Rlcm4g -Q2FwZTEUMBIGA1UEBxMLRHVyYmFudmlsbGUxDzANBgNVBAoTBlRoYXd0ZTEdMBsG -A1UECxMUVGhhd3RlIENlcnRpZmljYXRpb24xHzAdBgNVBAMTFlRoYXd0ZSBUaW1l -c3RhbXBpbmcgQ0EwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBANYrWHhhRYZT -6jR7UZztsOYuGA7+4F+oJ9O0yeB8WU4WDnNUYMF/9p8u6TqFJBU820cEY8OexJQa -Wt9MevPZQx08EHp5JduQ/vBR5zDWQQD9nyjfeb6Uu522FOMjhdepQeBMpHmwKxqL -8vg7ij5FrHGSALSQQZj7X+36ty6K+Ig3AgMBAAGjEzARMA8GA1UdEwEB/wQFMAMB -Af8wDQYJKoZIhvcNAQEEBQADgYEAZ9viwuaHPUCDhjc1fR/OmsMMZiCouqoEiYbC -9RAIDb/LogWK0E02PvTX72nGXuSwlG9KuefeW4i2e9vjJ+V2w/A1wcu1J5szedyQ -pgCed/r8zSeUQhac0xxo7L9c3eWpexAKMnRUEzGLhQOEkbdYATAUOK8oyvyxUBkZ -CayJSdM= ------END CERTIFICATE----- - -thawte Primary Root CA -====================== - ------BEGIN CERTIFICATE----- -MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCB -qTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf -Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw -MDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNV -BAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3MDAwMDAwWhcNMzYw -NzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5j -LjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYG -A1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl -IG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqG -SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsoPD7gFnUnMekz52hWXMJEEUMDSxuaPFs -W0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ1CRfBsDMRJSUjQJib+ta -3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGcq/gcfomk -6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6 -Sk/KaAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94J -NqR32HuHUETVPm4pafs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBA -MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XP -r87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUFAAOCAQEAeRHAS7ORtvzw6WfU -DW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeEuzLlQRHAd9mz -YJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAX -xPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2 -/qxAeeWsEG89jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/ -LHbTY5xZ3Y+m4Q6gLkH3LpVHz7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7 -jVaMaA== ------END CERTIFICATE----- - -VeriSign Class 3 Public Primary Certification Authority - G5 -============================================================ - ------BEGIN CERTIFICATE----- -MIIE0zCCA7ugAwIBAgIQGNrRniZ96LtKIVjNzGs7SjANBgkqhkiG9w0BAQUFADCB -yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL -ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJp -U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxW -ZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0 -aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCByjEL -MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW -ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2ln -biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp -U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y -aXR5IC0gRzUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvJAgIKXo1 -nmAMqudLO07cfLw8RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKzj/i5Vbex -t0uz/o9+B1fs70PbZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIz -SdhDY2pSS9KP6HBRTdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQG -BO+QueQA5N06tRn/Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+ -rCpSx4/VBEnkjWNHiDxpg8v+R70rfk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/ -NIeWiu5T6CUVAgMBAAGjgbIwga8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8E -BAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEwHzAH -BgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVy -aXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFH/TZafC3ey78DAJ80M5+gKv -MzEzMA0GCSqGSIb3DQEBBQUAA4IBAQCTJEowX2LP2BqYLz3q3JktvXf2pXkiOOzE -p6B4Eq1iDkVwZMXnl2YtmAl+X6/WzChl8gGqCBpH3vn5fJJaCGkgDdk+bW48DW7Y -5gaRQBi5+MHt39tBquCWIMnNZBU4gcmU7qKEKQsTb47bDN0lAtukixlE0kF6BWlK -WE9gyn6CagsCqiUXObXbf+eEZSqVir2G3l6BFoMtEMze/aiCKm0oHw0LxOXnGiYZ -4fQRbxC1lfznQgUy286dUV4otp6F01vvpX1FQHKOtw5rDgb7MzVIcbidJ4vEZV8N -hnacRHr2lVz2XTIIM6RUthg/aFzyQkqFOFSDX9HoLPKsEdao7WNq ------END CERTIFICATE----- - -Entrust.net Secure Server Certification Authority -================================================= - ------BEGIN CERTIFICATE----- -MIIE2DCCBEGgAwIBAgIEN0rSQzANBgkqhkiG9w0BAQUFADCBwzELMAkGA1UEBhMC -VVMxFDASBgNVBAoTC0VudHJ1c3QubmV0MTswOQYDVQQLEzJ3d3cuZW50cnVzdC5u -ZXQvQ1BTIGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxpYWIuKTElMCMGA1UECxMc -KGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDE6MDgGA1UEAxMxRW50cnVzdC5u -ZXQgU2VjdXJlIFNlcnZlciBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw05OTA1 -MjUxNjA5NDBaFw0xOTA1MjUxNjM5NDBaMIHDMQswCQYDVQQGEwJVUzEUMBIGA1UE -ChMLRW50cnVzdC5uZXQxOzA5BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5j -b3JwLiBieSByZWYuIChsaW1pdHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBF -bnRydXN0Lm5ldCBMaW1pdGVkMTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUg -U2VydmVyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGdMA0GCSqGSIb3DQEBAQUA -A4GLADCBhwKBgQDNKIM0VBuJ8w+vN5Ex/68xYMmo6LIQaO2f55M28Qpku0f1BBc/ -I0dNxScZgSYMVHINiC3ZH5oSn7yzcdOAGT9HZnuMNSjSuQrfJNqc1lB5gXpa0zf3 -wkrYKZImZNHkmGw6AIr1NJtl+O3jEP/9uElY3KDegjlrgbEWGWG5VLbmQwIBA6OC -AdcwggHTMBEGCWCGSAGG+EIBAQQEAwIABzCCARkGA1UdHwSCARAwggEMMIHeoIHb -oIHYpIHVMIHSMQswCQYDVQQGEwJVUzEUMBIGA1UEChMLRW50cnVzdC5uZXQxOzA5 -BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5jb3JwLiBieSByZWYuIChsaW1p -dHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBFbnRydXN0Lm5ldCBMaW1pdGVk -MTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUgU2VydmVyIENlcnRpZmljYXRp -b24gQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMCmgJ6AlhiNodHRwOi8vd3d3LmVu -dHJ1c3QubmV0L0NSTC9uZXQxLmNybDArBgNVHRAEJDAigA8xOTk5MDUyNTE2MDk0 -MFqBDzIwMTkwNTI1MTYwOTQwWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAU8Bdi -E1U9s/8KAGv7UISX8+1i0BowHQYDVR0OBBYEFPAXYhNVPbP/CgBr+1CEl/PtYtAa -MAwGA1UdEwQFMAMBAf8wGQYJKoZIhvZ9B0EABAwwChsEVjQuMAMCBJAwDQYJKoZI -hvcNAQEFBQADgYEAkNwwAvpkdMKnCqV8IY00F6j7Rw7/JXyNEwr75Ji174z4xRAN -95K+8cPV1ZVqBLssziY2ZcgxxufuP+NXdYR6Ee9GTxj005i7qIcyunL2POI9n9cd -2cNgQ4xYDiKWL2KjLB+6rQXvqzJ4h6BUcxm1XAX5Uj5tLUUL9wqT6u0G+bI= ------END CERTIFICATE----- - -Go Daddy Certification Authority Root Certificate Bundle -======================================================== - ------BEGIN CERTIFICATE----- -MIIE3jCCA8agAwIBAgICAwEwDQYJKoZIhvcNAQEFBQAwYzELMAkGA1UEBhMCVVMx -ITAfBgNVBAoTGFRoZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28g -RGFkZHkgQ2xhc3MgMiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjExMTYw -MTU0MzdaFw0yNjExMTYwMTU0MzdaMIHKMQswCQYDVQQGEwJVUzEQMA4GA1UECBMH -QXJpem9uYTETMBEGA1UEBxMKU2NvdHRzZGFsZTEaMBgGA1UEChMRR29EYWRkeS5j -b20sIEluYy4xMzAxBgNVBAsTKmh0dHA6Ly9jZXJ0aWZpY2F0ZXMuZ29kYWRkeS5j -b20vcmVwb3NpdG9yeTEwMC4GA1UEAxMnR28gRGFkZHkgU2VjdXJlIENlcnRpZmlj -YXRpb24gQXV0aG9yaXR5MREwDwYDVQQFEwgwNzk2OTI4NzCCASIwDQYJKoZIhvcN -AQEBBQADggEPADCCAQoCggEBAMQt1RWMnCZM7DI161+4WQFapmGBWTtwY6vj3D3H -KrjJM9N55DrtPDAjhI6zMBS2sofDPZVUBJ7fmd0LJR4h3mUpfjWoqVTr9vcyOdQm -VZWt7/v+WIbXnvQAjYwqDL1CBM6nPwT27oDyqu9SoWlm2r4arV3aLGbqGmu75RpR -SgAvSMeYddi5Kcju+GZtCpyz8/x4fKL4o/K1w/O5epHBp+YlLpyo7RJlbmr2EkRT -cDCVw5wrWCs9CHRK8r5RsL+H0EwnWGu1NcWdrxcx+AuP7q2BNgWJCJjPOq8lh8BJ -6qf9Z/dFjpfMFDniNoW1fho3/Rb2cRGadDAW/hOUoz+EDU8CAwEAAaOCATIwggEu -MB0GA1UdDgQWBBT9rGEyk2xF1uLuhV+auud2mWjM5zAfBgNVHSMEGDAWgBTSxLDS -kdRMEXGzYcs9of7dqGrU4zASBgNVHRMBAf8ECDAGAQH/AgEAMDMGCCsGAQUFBwEB -BCcwJTAjBggrBgEFBQcwAYYXaHR0cDovL29jc3AuZ29kYWRkeS5jb20wRgYDVR0f -BD8wPTA7oDmgN4Y1aHR0cDovL2NlcnRpZmljYXRlcy5nb2RhZGR5LmNvbS9yZXBv -c2l0b3J5L2dkcm9vdC5jcmwwSwYDVR0gBEQwQjBABgRVHSAAMDgwNgYIKwYBBQUH -AgEWKmh0dHA6Ly9jZXJ0aWZpY2F0ZXMuZ29kYWRkeS5jb20vcmVwb3NpdG9yeTAO -BgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBANKGwOy9+aG2Z+5mC6IG -OgRQjhVyrEp0lVPLN8tESe8HkGsz2ZbwlFalEzAFPIUyIXvJxwqoJKSQ3kbTJSMU -A2fCENZvD117esyfxVgqwcSeIaha86ykRvOe5GPLL5CkKSkB2XIsKd83ASe8T+5o -0yGPwLPk9Qnt0hCqU7S+8MxZC9Y7lhyVJEnfzuz9p0iRFEUOOjZv2kWzRaJBydTX -RE4+uXR21aITVSzGh6O1mawGhId/dQb8vxRMDsxuxN89txJx9OjxUUAiKEngHUuH -qDTMBqLdElrRhjZkAzVvb3du6/KFUJheqwNTrZEjYx8WnM25sgVjOuH0aBsXBTWV -U+4= ------END CERTIFICATE----- ------BEGIN CERTIFICATE----- -MIIE+zCCBGSgAwIBAgICAQ0wDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1Zh -bGlDZXJ0IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIElu -Yy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENsYXNzIDIgUG9saWN5IFZhbGlkYXRpb24g -QXV0aG9yaXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAe -BgkqhkiG9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTA0MDYyOTE3MDYyMFoX -DTI0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRoZSBHbyBE -YWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3MgMiBDZXJ0 -aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggENADCCAQgC -ggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCAPVYYYwhv -2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6wwdhFJ2+q -N1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXiEqITLdiO -r18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMYavx4A6lN -f4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+YihfukEH -U1jPEX44dMX4/7VpkI+EdOqXG68CAQOjggHhMIIB3TAdBgNVHQ4EFgQU0sSw0pHU -TBFxs2HLPaH+3ahq1OMwgdIGA1UdIwSByjCBx6GBwaSBvjCBuzEkMCIGA1UEBxMb -VmFsaUNlcnQgVmFsaWRhdGlvbiBOZXR3b3JrMRcwFQYDVQQKEw5WYWxpQ2VydCwg -SW5jLjE1MDMGA1UECxMsVmFsaUNlcnQgQ2xhc3MgMiBQb2xpY3kgVmFsaWRhdGlv -biBBdXRob3JpdHkxITAfBgNVBAMTGGh0dHA6Ly93d3cudmFsaWNlcnQuY29tLzEg -MB4GCSqGSIb3DQEJARYRaW5mb0B2YWxpY2VydC5jb22CAQEwDwYDVR0TAQH/BAUw -AwEB/zAzBggrBgEFBQcBAQQnMCUwIwYIKwYBBQUHMAGGF2h0dHA6Ly9vY3NwLmdv -ZGFkZHkuY29tMEQGA1UdHwQ9MDswOaA3oDWGM2h0dHA6Ly9jZXJ0aWZpY2F0ZXMu -Z29kYWRkeS5jb20vcmVwb3NpdG9yeS9yb290LmNybDBLBgNVHSAERDBCMEAGBFUd -IAAwODA2BggrBgEFBQcCARYqaHR0cDovL2NlcnRpZmljYXRlcy5nb2RhZGR5LmNv -bS9yZXBvc2l0b3J5MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOBgQC1 -QPmnHfbq/qQaQlpE9xXUhUaJwL6e4+PrxeNYiY+Sn1eocSxI0YGyeR+sBjUZsE4O -WBsUs5iB0QQeyAfJg594RAoYC5jcdnplDQ1tgMQLARzLrUc+cb53S8wGd9D0Vmsf -SxOaFIqII6hR8INMqzW/Rn453HWkrugp++85j09VZw== ------END CERTIFICATE----- ------BEGIN CERTIFICATE----- -MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0 -IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz -BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y -aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG -9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMTk1NFoXDTE5MDYy -NjAwMTk1NFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y -azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs -YXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw -Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl -cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDOOnHK5avIWZJV16vY -dA757tn2VUdZZUcOBVXc65g2PFxTXdMwzzjsvUGJ7SVCCSRrCl6zfN1SLUzm1NZ9 -WlmpZdRJEy0kTRxQb7XBhVQ7/nHk01xC+YDgkRoKWzk2Z/M/VXwbP7RfZHM047QS -v4dk+NoS/zcnwbNDu+97bi5p9wIDAQABMA0GCSqGSIb3DQEBBQUAA4GBADt/UG9v -UJSZSWI4OB9L+KXIPqeCgfYrx+jFzug6EILLGACOTb2oWH+heQC1u+mNr0HZDzTu -IYEZoDJJKPTEjlbVUjP9UNV+mWwD5MlM/Mtsq2azSiGM5bUMMj4QssxsodyamEwC -W/POuZ6lcg5Ktz885hZo+L7tdEy8W9ViH0Pd ------END CERTIFICATE----- - diff --git a/httplib2/iri2uri.py b/httplib2/iri2uri.py deleted file mode 100644 index 70667ed..0000000 --- a/httplib2/iri2uri.py +++ /dev/null @@ -1,110 +0,0 @@ -""" -iri2uri - -Converts an IRI to a URI. - -""" -__author__ = "Joe Gregorio (joe@bitworking.org)" -__copyright__ = "Copyright 2006, Joe Gregorio" -__contributors__ = [] -__version__ = "1.0.0" -__license__ = "MIT" -__history__ = """ -""" - -import urlparse - - -# Convert an IRI to a URI following the rules in RFC 3987 -# -# The characters we need to enocde and escape are defined in the spec: -# -# iprivate = %xE000-F8FF / %xF0000-FFFFD / %x100000-10FFFD -# ucschar = %xA0-D7FF / %xF900-FDCF / %xFDF0-FFEF -# / %x10000-1FFFD / %x20000-2FFFD / %x30000-3FFFD -# / %x40000-4FFFD / %x50000-5FFFD / %x60000-6FFFD -# / %x70000-7FFFD / %x80000-8FFFD / %x90000-9FFFD -# / %xA0000-AFFFD / %xB0000-BFFFD / %xC0000-CFFFD -# / %xD0000-DFFFD / %xE1000-EFFFD - -escape_range = [ - (0xA0, 0xD7FF ), - (0xE000, 0xF8FF ), - (0xF900, 0xFDCF ), - (0xFDF0, 0xFFEF), - (0x10000, 0x1FFFD ), - (0x20000, 0x2FFFD ), - (0x30000, 0x3FFFD), - (0x40000, 0x4FFFD ), - (0x50000, 0x5FFFD ), - (0x60000, 0x6FFFD), - (0x70000, 0x7FFFD ), - (0x80000, 0x8FFFD ), - (0x90000, 0x9FFFD), - (0xA0000, 0xAFFFD ), - (0xB0000, 0xBFFFD ), - (0xC0000, 0xCFFFD), - (0xD0000, 0xDFFFD ), - (0xE1000, 0xEFFFD), - (0xF0000, 0xFFFFD ), - (0x100000, 0x10FFFD) -] - -def encode(c): - retval = c - i = ord(c) - for low, high in escape_range: - if i < low: - break - if i >= low and i <= high: - retval = "".join(["%%%2X" % ord(o) for o in c.encode('utf-8')]) - break - return retval - - -def iri2uri(uri): - """Convert an IRI to a URI. Note that IRIs must be - passed in a unicode strings. That is, do not utf-8 encode - the IRI before passing it into the function.""" - if isinstance(uri ,unicode): - (scheme, authority, path, query, fragment) = urlparse.urlsplit(uri) - authority = authority.encode('idna') - # For each character in 'ucschar' or 'iprivate' - # 1. encode as utf-8 - # 2. then %-encode each octet of that utf-8 - uri = urlparse.urlunsplit((scheme, authority, path, query, fragment)) - uri = "".join([encode(c) for c in uri]) - return uri - -if __name__ == "__main__": - import unittest - - class Test(unittest.TestCase): - - def test_uris(self): - """Test that URIs are invariant under the transformation.""" - invariant = [ - u"ftp://ftp.is.co.za/rfc/rfc1808.txt", - u"http://www.ietf.org/rfc/rfc2396.txt", - u"ldap://[2001:db8::7]/c=GB?objectClass?one", - u"mailto:John.Doe@example.com", - u"news:comp.infosystems.www.servers.unix", - u"tel:+1-816-555-1212", - u"telnet://192.0.2.16:80/", - u"urn:oasis:names:specification:docbook:dtd:xml:4.1.2" ] - for uri in invariant: - self.assertEqual(uri, iri2uri(uri)) - - def test_iri(self): - """ Test that the right type of escaping is done for each part of the URI.""" - self.assertEqual("http://xn--o3h.com/%E2%98%84", iri2uri(u"http://\N{COMET}.com/\N{COMET}")) - self.assertEqual("http://bitworking.org/?fred=%E2%98%84", iri2uri(u"http://bitworking.org/?fred=\N{COMET}")) - self.assertEqual("http://bitworking.org/#%E2%98%84", iri2uri(u"http://bitworking.org/#\N{COMET}")) - self.assertEqual("#%E2%98%84", iri2uri(u"#\N{COMET}")) - self.assertEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}")) - self.assertEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}"))) - self.assertNotEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}".encode('utf-8'))) - - unittest.main() - - diff --git a/httplib2/socks.py b/httplib2/socks.py deleted file mode 100644 index 84cb93b..0000000 --- a/httplib2/socks.py +++ /dev/null @@ -1,438 +0,0 @@ -"""SocksiPy - Python SOCKS module. -Version 1.00 - -Copyright 2006 Dan-Haim. All rights reserved. - -Redistribution and use in source and binary forms, with or without modification, -are permitted provided that the following conditions are met: -1. Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. -2. Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. -3. Neither the name of Dan Haim nor the names of his contributors may be used - to endorse or promote products derived from this software without specific - prior written permission. - -THIS SOFTWARE IS PROVIDED BY DAN HAIM "AS IS" AND ANY EXPRESS OR IMPLIED -WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO -EVENT SHALL DAN HAIM OR HIS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA -OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT -OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMANGE. - - -This module provides a standard socket-like interface for Python -for tunneling connections through SOCKS proxies. - -""" - -""" - -Minor modifications made by Christopher Gilbert (http://motomastyle.com/) -for use in PyLoris (http://pyloris.sourceforge.net/) - -Minor modifications made by Mario Vilas (http://breakingcode.wordpress.com/) -mainly to merge bug fixes found in Sourceforge - -""" - -import base64 -import socket -import struct -import sys - -if getattr(socket, 'socket', None) is None: - raise ImportError('socket.socket missing, proxy support unusable') - -PROXY_TYPE_SOCKS4 = 1 -PROXY_TYPE_SOCKS5 = 2 -PROXY_TYPE_HTTP = 3 -PROXY_TYPE_HTTP_NO_TUNNEL = 4 - -_defaultproxy = None -_orgsocket = socket.socket - -class ProxyError(Exception): pass -class GeneralProxyError(ProxyError): pass -class Socks5AuthError(ProxyError): pass -class Socks5Error(ProxyError): pass -class Socks4Error(ProxyError): pass -class HTTPError(ProxyError): pass - -_generalerrors = ("success", - "invalid data", - "not connected", - "not available", - "bad proxy type", - "bad input") - -_socks5errors = ("succeeded", - "general SOCKS server failure", - "connection not allowed by ruleset", - "Network unreachable", - "Host unreachable", - "Connection refused", - "TTL expired", - "Command not supported", - "Address type not supported", - "Unknown error") - -_socks5autherrors = ("succeeded", - "authentication is required", - "all offered authentication methods were rejected", - "unknown username or invalid password", - "unknown error") - -_socks4errors = ("request granted", - "request rejected or failed", - "request rejected because SOCKS server cannot connect to identd on the client", - "request rejected because the client program and identd report different user-ids", - "unknown error") - -def setdefaultproxy(proxytype=None, addr=None, port=None, rdns=True, username=None, password=None): - """setdefaultproxy(proxytype, addr[, port[, rdns[, username[, password]]]]) - Sets a default proxy which all further socksocket objects will use, - unless explicitly changed. - """ - global _defaultproxy - _defaultproxy = (proxytype, addr, port, rdns, username, password) - -def wrapmodule(module): - """wrapmodule(module) - Attempts to replace a module's socket library with a SOCKS socket. Must set - a default proxy using setdefaultproxy(...) first. - This will only work on modules that import socket directly into the namespace; - most of the Python Standard Library falls into this category. - """ - if _defaultproxy != None: - module.socket.socket = socksocket - else: - raise GeneralProxyError((4, "no proxy specified")) - -class socksocket(socket.socket): - """socksocket([family[, type[, proto]]]) -> socket object - Open a SOCKS enabled socket. The parameters are the same as - those of the standard socket init. In order for SOCKS to work, - you must specify family=AF_INET, type=SOCK_STREAM and proto=0. - """ - - def __init__(self, family=socket.AF_INET, type=socket.SOCK_STREAM, proto=0, _sock=None): - _orgsocket.__init__(self, family, type, proto, _sock) - if _defaultproxy != None: - self.__proxy = _defaultproxy - else: - self.__proxy = (None, None, None, None, None, None) - self.__proxysockname = None - self.__proxypeername = None - self.__httptunnel = True - - def __recvall(self, count): - """__recvall(count) -> data - Receive EXACTLY the number of bytes requested from the socket. - Blocks until the required number of bytes have been received. - """ - data = self.recv(count) - while len(data) < count: - d = self.recv(count-len(data)) - if not d: raise GeneralProxyError((0, "connection closed unexpectedly")) - data = data + d - return data - - def sendall(self, content, *args): - """ override socket.socket.sendall method to rewrite the header - for non-tunneling proxies if needed - """ - if not self.__httptunnel: - content = self.__rewriteproxy(content) - return super(socksocket, self).sendall(content, *args) - - def __rewriteproxy(self, header): - """ rewrite HTTP request headers to support non-tunneling proxies - (i.e. those which do not support the CONNECT method). - This only works for HTTP (not HTTPS) since HTTPS requires tunneling. - """ - host, endpt = None, None - hdrs = header.split("\r\n") - for hdr in hdrs: - if hdr.lower().startswith("host:"): - host = hdr - elif hdr.lower().startswith("get") or hdr.lower().startswith("post"): - endpt = hdr - if host and endpt: - hdrs.remove(host) - hdrs.remove(endpt) - host = host.split(" ")[1] - endpt = endpt.split(" ") - if (self.__proxy[4] != None and self.__proxy[5] != None): - hdrs.insert(0, self.__getauthheader()) - hdrs.insert(0, "Host: %s" % host) - hdrs.insert(0, "%s http://%s%s %s" % (endpt[0], host, endpt[1], endpt[2])) - return "\r\n".join(hdrs) - - def __getauthheader(self): - auth = self.__proxy[4] + ":" + self.__proxy[5] - return "Proxy-Authorization: Basic " + base64.b64encode(auth) - - def setproxy(self, proxytype=None, addr=None, port=None, rdns=True, username=None, password=None): - """setproxy(proxytype, addr[, port[, rdns[, username[, password]]]]) - Sets the proxy to be used. - proxytype - The type of the proxy to be used. Three types - are supported: PROXY_TYPE_SOCKS4 (including socks4a), - PROXY_TYPE_SOCKS5 and PROXY_TYPE_HTTP - addr - The address of the server (IP or DNS). - port - The port of the server. Defaults to 1080 for SOCKS - servers and 8080 for HTTP proxy servers. - rdns - Should DNS queries be preformed on the remote side - (rather than the local side). The default is True. - Note: This has no effect with SOCKS4 servers. - username - Username to authenticate with to the server. - The default is no authentication. - password - Password to authenticate with to the server. - Only relevant when username is also provided. - """ - self.__proxy = (proxytype, addr, port, rdns, username, password) - - def __negotiatesocks5(self, destaddr, destport): - """__negotiatesocks5(self,destaddr,destport) - Negotiates a connection through a SOCKS5 server. - """ - # First we'll send the authentication packages we support. - if (self.__proxy[4]!=None) and (self.__proxy[5]!=None): - # The username/password details were supplied to the - # setproxy method so we support the USERNAME/PASSWORD - # authentication (in addition to the standard none). - self.sendall(struct.pack('BBBB', 0x05, 0x02, 0x00, 0x02)) - else: - # No username/password were entered, therefore we - # only support connections with no authentication. - self.sendall(struct.pack('BBB', 0x05, 0x01, 0x00)) - # We'll receive the server's response to determine which - # method was selected - chosenauth = self.__recvall(2) - if chosenauth[0:1] != chr(0x05).encode(): - self.close() - raise GeneralProxyError((1, _generalerrors[1])) - # Check the chosen authentication method - if chosenauth[1:2] == chr(0x00).encode(): - # No authentication is required - pass - elif chosenauth[1:2] == chr(0x02).encode(): - # Okay, we need to perform a basic username/password - # authentication. - self.sendall(chr(0x01).encode() + chr(len(self.__proxy[4])) + self.__proxy[4] + chr(len(self.__proxy[5])) + self.__proxy[5]) - authstat = self.__recvall(2) - if authstat[0:1] != chr(0x01).encode(): - # Bad response - self.close() - raise GeneralProxyError((1, _generalerrors[1])) - if authstat[1:2] != chr(0x00).encode(): - # Authentication failed - self.close() - raise Socks5AuthError((3, _socks5autherrors[3])) - # Authentication succeeded - else: - # Reaching here is always bad - self.close() - if chosenauth[1] == chr(0xFF).encode(): - raise Socks5AuthError((2, _socks5autherrors[2])) - else: - raise GeneralProxyError((1, _generalerrors[1])) - # Now we can request the actual connection - req = struct.pack('BBB', 0x05, 0x01, 0x00) - # If the given destination address is an IP address, we'll - # use the IPv4 address request even if remote resolving was specified. - try: - ipaddr = socket.inet_aton(destaddr) - req = req + chr(0x01).encode() + ipaddr - except socket.error: - # Well it's not an IP number, so it's probably a DNS name. - if self.__proxy[3]: - # Resolve remotely - ipaddr = None - req = req + chr(0x03).encode() + chr(len(destaddr)).encode() + destaddr - else: - # Resolve locally - ipaddr = socket.inet_aton(socket.gethostbyname(destaddr)) - req = req + chr(0x01).encode() + ipaddr - req = req + struct.pack(">H", destport) - self.sendall(req) - # Get the response - resp = self.__recvall(4) - if resp[0:1] != chr(0x05).encode(): - self.close() - raise GeneralProxyError((1, _generalerrors[1])) - elif resp[1:2] != chr(0x00).encode(): - # Connection failed - self.close() - if ord(resp[1:2])<=8: - raise Socks5Error((ord(resp[1:2]), _socks5errors[ord(resp[1:2])])) - else: - raise Socks5Error((9, _socks5errors[9])) - # Get the bound address/port - elif resp[3:4] == chr(0x01).encode(): - boundaddr = self.__recvall(4) - elif resp[3:4] == chr(0x03).encode(): - resp = resp + self.recv(1) - boundaddr = self.__recvall(ord(resp[4:5])) - else: - self.close() - raise GeneralProxyError((1,_generalerrors[1])) - boundport = struct.unpack(">H", self.__recvall(2))[0] - self.__proxysockname = (boundaddr, boundport) - if ipaddr != None: - self.__proxypeername = (socket.inet_ntoa(ipaddr), destport) - else: - self.__proxypeername = (destaddr, destport) - - def getproxysockname(self): - """getsockname() -> address info - Returns the bound IP address and port number at the proxy. - """ - return self.__proxysockname - - def getproxypeername(self): - """getproxypeername() -> address info - Returns the IP and port number of the proxy. - """ - return _orgsocket.getpeername(self) - - def getpeername(self): - """getpeername() -> address info - Returns the IP address and port number of the destination - machine (note: getproxypeername returns the proxy) - """ - return self.__proxypeername - - def __negotiatesocks4(self,destaddr,destport): - """__negotiatesocks4(self,destaddr,destport) - Negotiates a connection through a SOCKS4 server. - """ - # Check if the destination address provided is an IP address - rmtrslv = False - try: - ipaddr = socket.inet_aton(destaddr) - except socket.error: - # It's a DNS name. Check where it should be resolved. - if self.__proxy[3]: - ipaddr = struct.pack("BBBB", 0x00, 0x00, 0x00, 0x01) - rmtrslv = True - else: - ipaddr = socket.inet_aton(socket.gethostbyname(destaddr)) - # Construct the request packet - req = struct.pack(">BBH", 0x04, 0x01, destport) + ipaddr - # The username parameter is considered userid for SOCKS4 - if self.__proxy[4] != None: - req = req + self.__proxy[4] - req = req + chr(0x00).encode() - # DNS name if remote resolving is required - # NOTE: This is actually an extension to the SOCKS4 protocol - # called SOCKS4A and may not be supported in all cases. - if rmtrslv: - req = req + destaddr + chr(0x00).encode() - self.sendall(req) - # Get the response from the server - resp = self.__recvall(8) - if resp[0:1] != chr(0x00).encode(): - # Bad data - self.close() - raise GeneralProxyError((1,_generalerrors[1])) - if resp[1:2] != chr(0x5A).encode(): - # Server returned an error - self.close() - if ord(resp[1:2]) in (91, 92, 93): - self.close() - raise Socks4Error((ord(resp[1:2]), _socks4errors[ord(resp[1:2]) - 90])) - else: - raise Socks4Error((94, _socks4errors[4])) - # Get the bound address/port - self.__proxysockname = (socket.inet_ntoa(resp[4:]), struct.unpack(">H", resp[2:4])[0]) - if rmtrslv != None: - self.__proxypeername = (socket.inet_ntoa(ipaddr), destport) - else: - self.__proxypeername = (destaddr, destport) - - def __negotiatehttp(self, destaddr, destport): - """__negotiatehttp(self,destaddr,destport) - Negotiates a connection through an HTTP server. - """ - # If we need to resolve locally, we do this now - if not self.__proxy[3]: - addr = socket.gethostbyname(destaddr) - else: - addr = destaddr - headers = ["CONNECT ", addr, ":", str(destport), " HTTP/1.1\r\n"] - headers += ["Host: ", destaddr, "\r\n"] - if (self.__proxy[4] != None and self.__proxy[5] != None): - headers += [self.__getauthheader(), "\r\n"] - headers.append("\r\n") - self.sendall("".join(headers).encode()) - # We read the response until we get the string "\r\n\r\n" - resp = self.recv(1) - while resp.find("\r\n\r\n".encode()) == -1: - resp = resp + self.recv(1) - # We just need the first line to check if the connection - # was successful - statusline = resp.splitlines()[0].split(" ".encode(), 2) - if statusline[0] not in ("HTTP/1.0".encode(), "HTTP/1.1".encode()): - self.close() - raise GeneralProxyError((1, _generalerrors[1])) - try: - statuscode = int(statusline[1]) - except ValueError: - self.close() - raise GeneralProxyError((1, _generalerrors[1])) - if statuscode != 200: - self.close() - raise HTTPError((statuscode, statusline[2])) - self.__proxysockname = ("0.0.0.0", 0) - self.__proxypeername = (addr, destport) - - def connect(self, destpair): - """connect(self, despair) - Connects to the specified destination through a proxy. - destpar - A tuple of the IP/DNS address and the port number. - (identical to socket's connect). - To select the proxy server use setproxy(). - """ - # Do a minimal input check first - if (not type(destpair) in (list,tuple)) or (len(destpair) < 2) or (type(destpair[0]) != type('')) or (type(destpair[1]) != int): - raise GeneralProxyError((5, _generalerrors[5])) - if self.__proxy[0] == PROXY_TYPE_SOCKS5: - if self.__proxy[2] != None: - portnum = self.__proxy[2] - else: - portnum = 1080 - _orgsocket.connect(self, (self.__proxy[1], portnum)) - self.__negotiatesocks5(destpair[0], destpair[1]) - elif self.__proxy[0] == PROXY_TYPE_SOCKS4: - if self.__proxy[2] != None: - portnum = self.__proxy[2] - else: - portnum = 1080 - _orgsocket.connect(self,(self.__proxy[1], portnum)) - self.__negotiatesocks4(destpair[0], destpair[1]) - elif self.__proxy[0] == PROXY_TYPE_HTTP: - if self.__proxy[2] != None: - portnum = self.__proxy[2] - else: - portnum = 8080 - _orgsocket.connect(self,(self.__proxy[1], portnum)) - self.__negotiatehttp(destpair[0], destpair[1]) - elif self.__proxy[0] == PROXY_TYPE_HTTP_NO_TUNNEL: - if self.__proxy[2] != None: - portnum = self.__proxy[2] - else: - portnum = 8080 - _orgsocket.connect(self,(self.__proxy[1],portnum)) - if destpair[1] == 443: - self.__negotiatehttp(destpair[0],destpair[1]) - else: - self.__httptunnel = False - elif self.__proxy[0] == None: - _orgsocket.connect(self, (destpair[0], destpair[1])) - else: - raise GeneralProxyError((4, _generalerrors[4])) diff --git a/oauth2/__init__.py b/oauth2/__init__.py deleted file mode 100644 index 3adbd20..0000000 --- a/oauth2/__init__.py +++ /dev/null @@ -1,735 +0,0 @@ -""" -The MIT License - -Copyright (c) 2007-2010 Leah Culver, Joe Stump, Mark Paschal, Vic Fryzel - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. -""" - -import urllib -import time -import random -import urlparse -import hmac -import binascii -import httplib2 - -try: - from urlparse import parse_qs, parse_qsl -except ImportError: - from cgi import parse_qs, parse_qsl - - -VERSION = '1.0' # Hi Blaine! -HTTP_METHOD = 'GET' -SIGNATURE_METHOD = 'PLAINTEXT' - - -class Error(RuntimeError): - """Generic exception class.""" - - def __init__(self, message='OAuth error occurred.'): - self._message = message - - @property - def message(self): - """A hack to get around the deprecation errors in 2.6.""" - return self._message - - def __str__(self): - return self._message - - -class MissingSignature(Error): - pass - - -def build_authenticate_header(realm=''): - """Optional WWW-Authenticate header (401 error)""" - return {'WWW-Authenticate': 'OAuth realm="%s"' % realm} - - -def build_xoauth_string(url, consumer, token=None): - """Build an XOAUTH string for use in SMTP/IMPA authentication.""" - request = Request.from_consumer_and_token(consumer, token, - "GET", url) - - signing_method = SignatureMethod_HMAC_SHA1() - request.sign_request(signing_method, consumer, token) - - params = [] - for k, v in sorted(request.iteritems()): - if v is not None: - params.append('%s="%s"' % (k, escape(v))) - - return "%s %s %s" % ("GET", url, ','.join(params)) - - -def escape(s): - """Escape a URL including any /.""" - return urllib.quote(s, safe='~') - - -def generate_timestamp(): - """Get seconds since epoch (UTC).""" - return int(time.time()) - - -def generate_nonce(length=8): - """Generate pseudorandom number.""" - return ''.join([str(random.randint(0, 9)) for i in range(length)]) - - -def generate_verifier(length=8): - """Generate pseudorandom number.""" - return ''.join([str(random.randint(0, 9)) for i in range(length)]) - - -class Consumer(object): - """A consumer of OAuth-protected services. - - The OAuth consumer is a "third-party" service that wants to access - protected resources from an OAuth service provider on behalf of an end - user. It's kind of the OAuth client. - - Usually a consumer must be registered with the service provider by the - developer of the consumer software. As part of that process, the service - provider gives the consumer a *key* and a *secret* with which the consumer - software can identify itself to the service. The consumer will include its - key in each request to identify itself, but will use its secret only when - signing requests, to prove that the request is from that particular - registered consumer. - - Once registered, the consumer can then use its consumer credentials to ask - the service provider for a request token, kicking off the OAuth - authorization process. - """ - - key = None - secret = None - - def __init__(self, key, secret): - self.key = key - self.secret = secret - - if self.key is None or self.secret is None: - raise ValueError("Key and secret must be set.") - - def __str__(self): - data = {'oauth_consumer_key': self.key, - 'oauth_consumer_secret': self.secret} - - return urllib.urlencode(data) - - -class Token(object): - """An OAuth credential used to request authorization or a protected - resource. - - Tokens in OAuth comprise a *key* and a *secret*. The key is included in - requests to identify the token being used, but the secret is used only in - the signature, to prove that the requester is who the server gave the - token to. - - When first negotiating the authorization, the consumer asks for a *request - token* that the live user authorizes with the service provider. The - consumer then exchanges the request token for an *access token* that can - be used to access protected resources. - """ - - key = None - secret = None - callback = None - callback_confirmed = None - verifier = None - - def __init__(self, key, secret): - self.key = key - self.secret = secret - - if self.key is None or self.secret is None: - raise ValueError("Key and secret must be set.") - - def set_callback(self, callback): - self.callback = callback - self.callback_confirmed = 'true' - - def set_verifier(self, verifier=None): - if verifier is not None: - self.verifier = verifier - else: - self.verifier = generate_verifier() - - def get_callback_url(self): - if self.callback and self.verifier: - # Append the oauth_verifier. - parts = urlparse.urlparse(self.callback) - scheme, netloc, path, params, query, fragment = parts[:6] - if query: - query = '%s&oauth_verifier=%s' % (query, self.verifier) - else: - query = 'oauth_verifier=%s' % self.verifier - return urlparse.urlunparse((scheme, netloc, path, params, - query, fragment)) - return self.callback - - def to_string(self): - """Returns this token as a plain string, suitable for storage. - - The resulting string includes the token's secret, so you should never - send or store this string where a third party can read it. - """ - - data = { - 'oauth_token': self.key, - 'oauth_token_secret': self.secret, - } - - if self.callback_confirmed is not None: - data['oauth_callback_confirmed'] = self.callback_confirmed - return urllib.urlencode(data) - - @staticmethod - def from_string(s): - """Deserializes a token from a string like one returned by - `to_string()`.""" - - if not len(s): - raise ValueError("Invalid parameter string.") - - params = parse_qs(s, keep_blank_values=False) - if not len(params): - raise ValueError("Invalid parameter string.") - - try: - key = params['oauth_token'][0] - except Exception: - raise ValueError("'oauth_token' not found in OAuth request.") - - try: - secret = params['oauth_token_secret'][0] - except Exception: - raise ValueError("'oauth_token_secret' not found in " - "OAuth request.") - - token = Token(key, secret) - try: - token.callback_confirmed = params['oauth_callback_confirmed'][0] - except KeyError: - pass # 1.0, no callback confirmed. - return token - - def __str__(self): - return self.to_string() - - -def setter(attr): - name = attr.__name__ - - def getter(self): - try: - return self.__dict__[name] - except KeyError: - raise AttributeError(name) - - def deleter(self): - del self.__dict__[name] - - return property(getter, attr, deleter) - - -class Request(dict): - - """The parameters and information for an HTTP request, suitable for - authorizing with OAuth credentials. - - When a consumer wants to access a service's protected resources, it does - so using a signed HTTP request identifying itself (the consumer) with its - key, and providing an access token authorized by the end user to access - those resources. - - """ - - version = VERSION - - def __init__(self, method=HTTP_METHOD, url=None, parameters=None): - self.method = method - self.url = url - if parameters is not None: - self.update(parameters) - - @setter - def url(self, value): - self.__dict__['url'] = value - if value is not None: - scheme, netloc, path, params, query, fragment = urlparse.urlparse(value) - - # Exclude default port numbers. - if scheme == 'http' and netloc[-3:] == ':80': - netloc = netloc[:-3] - elif scheme == 'https' and netloc[-4:] == ':443': - netloc = netloc[:-4] - if scheme not in ('http', 'https'): - raise ValueError("Unsupported URL %s (%s)." % (value, scheme)) - - # Normalized URL excludes params, query, and fragment. - self.normalized_url = urlparse.urlunparse((scheme, netloc, path, None, None, None)) - else: - self.normalized_url = None - self.__dict__['url'] = None - - @setter - def method(self, value): - self.__dict__['method'] = value.upper() - - def _get_timestamp_nonce(self): - return self['oauth_timestamp'], self['oauth_nonce'] - - def get_nonoauth_parameters(self): - """Get any non-OAuth parameters.""" - return dict([(k, v) for k, v in self.iteritems() - if not k.startswith('oauth_')]) - - def to_header(self, realm=''): - """Serialize as a header for an HTTPAuth request.""" - oauth_params = ((k, v) for k, v in self.items() - if k.startswith('oauth_')) - stringy_params = ((k, escape(str(v))) for k, v in oauth_params) - header_params = ('%s="%s"' % (k, v) for k, v in stringy_params) - params_header = ', '.join(header_params) - - auth_header = 'OAuth realm="%s"' % realm - if params_header: - auth_header = "%s, %s" % (auth_header, params_header) - - return {'Authorization': auth_header} - - def to_postdata(self): - """Serialize as post data for a POST request.""" - # tell urlencode to deal with sequence values and map them correctly - # to resulting querystring. for example self["k"] = ["v1", "v2"] will - # result in 'k=v1&k=v2' and not k=%5B%27v1%27%2C+%27v2%27%5D - return urllib.urlencode(self, True) - - def to_url(self): - """Serialize as a URL for a GET request.""" - base_url = urlparse.urlparse(self.url) - query = parse_qs(base_url.query) - for k, v in self.items(): - query.setdefault(k, []).append(v) - url = (base_url.scheme, base_url.netloc, base_url.path, base_url.params, - urllib.urlencode(query, True), base_url.fragment) - return urlparse.urlunparse(url) - - def get_parameter(self, parameter): - ret = self.get(parameter) - if ret is None: - raise Error('Parameter not found: %s' % parameter) - - return ret - - def get_normalized_parameters(self): - """Return a string that contains the parameters that must be signed.""" - items = [] - for key, value in self.iteritems(): - if key == 'oauth_signature': - continue - # 1.0a/9.1.1 states that kvp must be sorted by key, then by value, - # so we unpack sequence values into multiple items for sorting. - if hasattr(value, '__iter__'): - items.extend((key, item) for item in value) - else: - items.append((key, value)) - - # Include any query string parameters from the provided URL - query = urlparse.urlparse(self.url)[4] - items.extend(self._split_url_string(query).items()) - - encoded_str = urllib.urlencode(sorted(items)) - # Encode signature parameters per Oauth Core 1.0 protocol - # spec draft 7, section 3.6 - # (http://tools.ietf.org/html/draft-hammer-oauth-07#section-3.6) - # Spaces must be encoded with "%20" instead of "+" - return encoded_str.replace('+', '%20') - - def sign_request(self, signature_method, consumer, token): - """Set the signature parameter to the result of sign.""" - - if 'oauth_consumer_key' not in self: - self['oauth_consumer_key'] = consumer.key - - if token and 'oauth_token' not in self: - self['oauth_token'] = token.key - - self['oauth_signature_method'] = signature_method.name - self['oauth_signature'] = signature_method.sign(self, consumer, token) - - @classmethod - def make_timestamp(cls): - """Get seconds since epoch (UTC).""" - return str(int(time.time())) - - @classmethod - def make_nonce(cls): - """Generate pseudorandom number.""" - return str(random.randint(0, 100000000)) - - @classmethod - def from_request(cls, http_method, http_url, headers=None, parameters=None, - query_string=None): - """Combines multiple parameter sources.""" - if parameters is None: - parameters = {} - - # Headers - if headers and 'Authorization' in headers: - auth_header = headers['Authorization'] - # Check that the authorization header is OAuth. - if auth_header[:6] == 'OAuth ': - auth_header = auth_header[6:] - try: - # Get the parameters from the header. - header_params = cls._split_header(auth_header) - parameters.update(header_params) - except: - raise Error('Unable to parse OAuth parameters from ' - 'Authorization header.') - - # GET or POST query string. - if query_string: - query_params = cls._split_url_string(query_string) - parameters.update(query_params) - - # URL parameters. - param_str = urlparse.urlparse(http_url)[4] # query - url_params = cls._split_url_string(param_str) - parameters.update(url_params) - - if parameters: - return cls(http_method, http_url, parameters) - - return None - - @classmethod - def from_consumer_and_token(cls, consumer, token=None, - http_method=HTTP_METHOD, http_url=None, parameters=None): - if not parameters: - parameters = {} - - defaults = { - 'oauth_consumer_key': consumer.key, - 'oauth_timestamp': cls.make_timestamp(), - 'oauth_nonce': cls.make_nonce(), - 'oauth_version': cls.version, - } - - defaults.update(parameters) - parameters = defaults - - if token: - parameters['oauth_token'] = token.key - if token.verifier: - parameters['oauth_verifier'] = token.verifier - - return Request(http_method, http_url, parameters) - - @classmethod - def from_token_and_callback(cls, token, callback=None, - http_method=HTTP_METHOD, http_url=None, parameters=None): - - if not parameters: - parameters = {} - - parameters['oauth_token'] = token.key - - if callback: - parameters['oauth_callback'] = callback - - return cls(http_method, http_url, parameters) - - @staticmethod - def _split_header(header): - """Turn Authorization: header into parameters.""" - params = {} - parts = header.split(',') - for param in parts: - # Ignore realm parameter. - if param.find('realm') > -1: - continue - # Remove whitespace. - param = param.strip() - # Split key-value. - param_parts = param.split('=', 1) - # Remove quotes and unescape the value. - params[param_parts[0]] = urllib.unquote(param_parts[1].strip('\"')) - return params - - @staticmethod - def _split_url_string(param_str): - """Turn URL string into parameters.""" - parameters = parse_qs(param_str, keep_blank_values=False) - for k, v in parameters.iteritems(): - parameters[k] = urllib.unquote(v[0]) - return parameters - - -class Client(httplib2.Http): - """OAuthClient is a worker to attempt to execute a request.""" - - def __init__(self, consumer, token=None, cache=None, timeout=None, - proxy_info=None): - - if consumer is not None and not isinstance(consumer, Consumer): - raise ValueError("Invalid consumer.") - - if token is not None and not isinstance(token, Token): - raise ValueError("Invalid token.") - - self.consumer = consumer - self.token = token - self.method = SignatureMethod_HMAC_SHA1() - - httplib2.Http.__init__(self, cache=cache, timeout=timeout, - proxy_info=proxy_info) - - def set_signature_method(self, method): - if not isinstance(method, SignatureMethod): - raise ValueError("Invalid signature method.") - - self.method = method - - def request(self, uri, method="GET", body=None, headers=None, - redirections=httplib2.DEFAULT_MAX_REDIRECTS, connection_type=None): - DEFAULT_CONTENT_TYPE = 'application/x-www-form-urlencoded' - - if not isinstance(headers, dict): - headers = {} - - is_multipart = method == 'POST' and headers.get('Content-Type', - DEFAULT_CONTENT_TYPE) != DEFAULT_CONTENT_TYPE - - if body and method == "POST" and not is_multipart: - parameters = dict(parse_qsl(body)) - else: - parameters = None - - req = Request.from_consumer_and_token(self.consumer, - token=self.token, http_method=method, http_url=uri, - parameters=parameters) - - req.sign_request(self.method, self.consumer, self.token) - - if method == "POST": - headers['Content-Type'] = headers.get('Content-Type', - DEFAULT_CONTENT_TYPE) - if is_multipart: - headers.update(req.to_header()) - else: - body = req.to_postdata() - elif method == "GET": - uri = req.to_url() - else: - headers.update(req.to_header()) - - return httplib2.Http.request(self, uri, method=method, body=body, - headers=headers, redirections=redirections, - connection_type=connection_type) - - -class Server(object): - """A skeletal implementation of a service provider, providing protected - resources to requests from authorized consumers. - - This class implements the logic to check requests for authorization. You - can use it with your web server or web framework to protect certain - resources with OAuth. - """ - - timestamp_threshold = 300 # In seconds, five minutes. - version = VERSION - signature_methods = None - - def __init__(self, signature_methods=None): - self.signature_methods = signature_methods or {} - - def add_signature_method(self, signature_method): - self.signature_methods[signature_method.name] = signature_method - return self.signature_methods - - def verify_request(self, request, consumer, token): - """Verifies an api call and checks all the parameters.""" - - version = self._get_version(request) - self._check_signature(request, consumer, token) - parameters = request.get_nonoauth_parameters() - return parameters - - def build_authenticate_header(self, realm=''): - """Optional support for the authenticate header.""" - return {'WWW-Authenticate': 'OAuth realm="%s"' % realm} - - def _get_version(self, request): - """Verify the correct version request for this server.""" - try: - version = request.get_parameter('oauth_version') - except: - version = VERSION - - if version and version != self.version: - raise Error('OAuth version %s not supported.' % str(version)) - - return version - - def _get_signature_method(self, request): - """Figure out the signature with some defaults.""" - try: - signature_method = request.get_parameter('oauth_signature_method') - except: - signature_method = SIGNATURE_METHOD - - try: - # Get the signature method object. - signature_method = self.signature_methods[signature_method] - except: - signature_method_names = ', '.join(self.signature_methods.keys()) - raise Error('Signature method %s not supported try one of the following: %s' % (signature_method, signature_method_names)) - - return signature_method - - def _get_verifier(self, request): - return request.get_parameter('oauth_verifier') - - def _check_signature(self, request, consumer, token): - timestamp, nonce = request._get_timestamp_nonce() - self._check_timestamp(timestamp) - signature_method = self._get_signature_method(request) - - try: - signature = request.get_parameter('oauth_signature') - except: - raise MissingSignature('Missing oauth_signature.') - - # Validate the signature. - valid = signature_method.check(request, consumer, token, signature) - - if not valid: - key, base = signature_method.signing_base(request, consumer, token) - - raise Error('Invalid signature. Expected signature base ' - 'string: %s' % base) - - built = signature_method.sign(request, consumer, token) - - def _check_timestamp(self, timestamp): - """Verify that timestamp is recentish.""" - timestamp = int(timestamp) - now = int(time.time()) - lapsed = now - timestamp - if lapsed > self.timestamp_threshold: - raise Error('Expired timestamp: given %d and now %s has a ' - 'greater difference than threshold %d' % (timestamp, now, - self.timestamp_threshold)) - - -class SignatureMethod(object): - """A way of signing requests. - - The OAuth protocol lets consumers and service providers pick a way to sign - requests. This interface shows the methods expected by the other `oauth` - modules for signing requests. Subclass it and implement its methods to - provide a new way to sign requests. - """ - - def signing_base(self, request, consumer, token): - """Calculates the string that needs to be signed. - - This method returns a 2-tuple containing the starting key for the - signing and the message to be signed. The latter may be used in error - messages to help clients debug their software. - - """ - raise NotImplementedError - - def sign(self, request, consumer, token): - """Returns the signature for the given request, based on the consumer - and token also provided. - - You should use your implementation of `signing_base()` to build the - message to sign. Otherwise it may be less useful for debugging. - - """ - raise NotImplementedError - - def check(self, request, consumer, token, signature): - """Returns whether the given signature is the correct signature for - the given consumer and token signing the given request.""" - built = self.sign(request, consumer, token) - return built == signature - - -class SignatureMethod_HMAC_SHA1(SignatureMethod): - name = 'HMAC-SHA1' - - def signing_base(self, request, consumer, token): - if request.normalized_url is None: - raise ValueError("Base URL for request is not set.") - - sig = ( - escape(request.method), - escape(request.normalized_url), - escape(request.get_normalized_parameters()), - ) - - key = '%s&' % escape(consumer.secret) - if token: - key += escape(token.secret) - raw = '&'.join(sig) - return key, raw - - def sign(self, request, consumer, token): - """Builds the base signature string.""" - key, raw = self.signing_base(request, consumer, token) - - # HMAC object. - try: - from hashlib import sha1 as sha - except ImportError: - import sha # Deprecated - - hashed = hmac.new(key, raw, sha) - - # Calculate the digest base 64. - return binascii.b2a_base64(hashed.digest())[:-1] - - -class SignatureMethod_PLAINTEXT(SignatureMethod): - - name = 'PLAINTEXT' - - def signing_base(self, request, consumer, token): - """Concatenates the consumer key and secret with the token's - secret.""" - sig = '%s&' % escape(consumer.secret) - if token: - sig = sig + escape(token.secret) - return sig, sig - - def sign(self, request, consumer, token): - key, raw = self.signing_base(request, consumer, token) - return raw diff --git a/oauth2/clients/__init__.py b/oauth2/clients/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/oauth2/clients/imap.py b/oauth2/clients/imap.py deleted file mode 100644 index 68b7cd8..0000000 --- a/oauth2/clients/imap.py +++ /dev/null @@ -1,40 +0,0 @@ -""" -The MIT License - -Copyright (c) 2007-2010 Leah Culver, Joe Stump, Mark Paschal, Vic Fryzel - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. -""" - -import oauth2 -import imaplib - - -class IMAP4_SSL(imaplib.IMAP4_SSL): - """IMAP wrapper for imaplib.IMAP4_SSL that implements XOAUTH.""" - - def authenticate(self, url, consumer, token): - if consumer is not None and not isinstance(consumer, oauth2.Consumer): - raise ValueError("Invalid consumer.") - - if token is not None and not isinstance(token, oauth2.Token): - raise ValueError("Invalid token.") - - imaplib.IMAP4_SSL.authenticate(self, 'XOAUTH', - lambda x: oauth2.build_xoauth_string(url, consumer, token)) diff --git a/oauth2/clients/smtp.py b/oauth2/clients/smtp.py deleted file mode 100644 index 3e7bf0b..0000000 --- a/oauth2/clients/smtp.py +++ /dev/null @@ -1,41 +0,0 @@ -""" -The MIT License - -Copyright (c) 2007-2010 Leah Culver, Joe Stump, Mark Paschal, Vic Fryzel - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. -""" - -import oauth2 -import smtplib -import base64 - - -class SMTP(smtplib.SMTP): - """SMTP wrapper for smtplib.SMTP that implements XOAUTH.""" - - def authenticate(self, url, consumer, token): - if consumer is not None and not isinstance(consumer, oauth2.Consumer): - raise ValueError("Invalid consumer.") - - if token is not None and not isinstance(token, oauth2.Token): - raise ValueError("Invalid token.") - - self.docmd('AUTH', 'XOAUTH %s' % \ - base64.b64encode(oauth2.build_xoauth_string(url, consumer, token))) diff --git a/samples/appengine/app.yaml b/samples/appengine/app.yaml index 03bdf81..bdc4be1 100644 --- a/samples/appengine/app.yaml +++ b/samples/appengine/app.yaml @@ -1,15 +1,11 @@ -application: m-buzz -version: 1 +application: jcg-testing-01 +version: 2 runtime: python api_version: 1 handlers: -- url: /static - static_dir: static - -- url: /google8f1adb368b7bd14c.html - upload: google8f1adb368b7bd14c.html - static_files: static/google8f1adb368b7bd14c.html +- url: /oauth2callback + script: oauth2client/appengine.py - url: .* script: main.py diff --git a/samples/appengine_with_decorator2/client_secrets.json b/samples/appengine/client_secrets.json similarity index 100% rename from samples/appengine_with_decorator2/client_secrets.json rename to samples/appengine/client_secrets.json diff --git a/samples/appengine_with_decorator2/grant.html b/samples/appengine/grant.html similarity index 83% rename from samples/appengine_with_decorator2/grant.html rename to samples/appengine/grant.html index a52ea08..0087325 100644 --- a/samples/appengine_with_decorator2/grant.html +++ b/samples/appengine/grant.html @@ -4,7 +4,7 @@ {% if has_credentials %} -

Thanks for granting us permission. Please proceed to the main +

Thanks for granting us permission. Please proceed to the main application.

{% else %}

Grant this application permission to read your diff --git a/samples/appengine/main.py b/samples/appengine/main.py old mode 100755 new mode 100644 index 184f5fa..309376c --- a/samples/appengine/main.py +++ b/samples/appengine/main.py @@ -14,6 +14,13 @@ # See the License for the specific language governing permissions and # limitations under the License. # +"""Starting template for Google App Engine applications. + +Use this project as a starting point if you are just beginning to build a Google +App Engine project. Remember to download the OAuth 2.0 client secrets which can +be obtained from the Developer Console +and save them as 'client_secrets.json' in the project directory. +""" __author__ = 'jcgregorio@google.com (Joe Gregorio)' @@ -24,84 +31,79 @@ import os import pickle from apiclient.discovery import build -from oauth2client.appengine import CredentialsProperty -from oauth2client.appengine import StorageByKeyName -from oauth2client.client import OAuth2WebServerFlow +from oauth2client.appengine import oauth2decorator_from_clientsecrets +from oauth2client.client import AccessTokenRefreshError from google.appengine.api import memcache -from google.appengine.api import users -from google.appengine.ext import db from google.appengine.ext import webapp from google.appengine.ext.webapp import template -from google.appengine.ext.webapp import util -from google.appengine.ext.webapp.util import login_required +from google.appengine.ext.webapp.util import run_wsgi_app -FLOW = OAuth2WebServerFlow( - # Visit https://code.google.com/apis/console to - # generate your client_id, client_secret and to - # register your redirect_uri. - client_id='', - client_secret='', - scope='https://www.googleapis.com/auth/buzz', - user_agent='buzz-cmdline-sample/1.0') +# CLIENT_SECRETS, name of a file containing the OAuth 2.0 information for this +# application, including client_id and client_secret, which are found +# on the API Access tab on the Google APIs +# Console +CLIENT_SECRETS = os.path.join(os.path.dirname(__file__), 'client_secrets.json') + +# Helpful message to display in the browser if the CLIENT_SECRETS file +# is missing. +MISSING_CLIENT_SECRETS_MESSAGE = """ +

Warning: Please configure OAuth 2.0

+

+To make this sample run you will need to populate the client_secrets.json file +found at: +

+

+%s. +

+

with information found on the APIs Console. +

+""" % CLIENT_SECRETS -class Credentials(db.Model): - credentials = CredentialsProperty() - +http = httplib2.Http(memcache) +service = build("plus", "v1", http=http) +decorator = oauth2decorator_from_clientsecrets( + CLIENT_SECRETS, + 'https://www.googleapis.com/auth/plus.me', + MISSING_CLIENT_SECRETS_MESSAGE) class MainHandler(webapp.RequestHandler): - @login_required + @decorator.oauth_aware def get(self): - user = users.get_current_user() - credentials = StorageByKeyName( - Credentials, user.user_id(), 'credentials').get() + path = os.path.join(os.path.dirname(__file__), 'grant.html') + variables = { + 'url': decorator.authorize_url(), + 'has_credentials': decorator.has_credentials() + } + self.response.out.write(template.render(path, variables)) + + +class AboutHandler(webapp.RequestHandler): + + @decorator.oauth_required + def get(self): + try: + http = decorator.http() + user = service.people().get(userId='me').execute(http) + text = 'Hello, %s!' % user['displayName'] - if credentials is None or credentials.invalid == True: - callback = self.request.relative_url('/oauth2callback') - authorize_url = FLOW.step1_get_authorize_url(callback) - memcache.set(user.user_id(), pickle.dumps(FLOW)) - self.redirect(authorize_url) - else: - http = httplib2.Http() - http = credentials.authorize(http) - service = build("buzz", "v1", http=http) - activities = service.activities() - activitylist = activities.list(scope='@consumption', - userId='@me').execute() path = os.path.join(os.path.dirname(__file__), 'welcome.html') - logout = users.create_logout_url('/') - self.response.out.write( - template.render( - path, {'activitylist': activitylist, - 'logout': logout - })) - - -class OAuthHandler(webapp.RequestHandler): - - @login_required - def get(self): - user = users.get_current_user() - flow = pickle.loads(memcache.get(user.user_id())) - if flow: - credentials = flow.step2_exchange(self.request.params) - StorageByKeyName( - Credentials, user.user_id(), 'credentials').put(credentials) - self.redirect("/") - else: - pass + self.response.out.write(template.render(path, {'text': text })) + except AccessTokenRefreshError: + self.redirect('/') def main(): application = webapp.WSGIApplication( [ - ('/', MainHandler), - ('/oauth2callback', OAuthHandler) + ('/', MainHandler), + ('/about', AboutHandler), ], debug=True) - util.run_wsgi_app(application) + run_wsgi_app(application) if __name__ == '__main__': diff --git a/samples/appengine/simplejson b/samples/appengine/simplejson deleted file mode 120000 index 148c7cf..0000000 --- a/samples/appengine/simplejson +++ /dev/null @@ -1 +0,0 @@ -../../simplejson/ \ No newline at end of file diff --git a/samples/appengine/static/go.png b/samples/appengine/static/go.png deleted file mode 100644 index e5aacda8c654cc9c3349639d6975741c99a368c8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1139 zcmV-(1dRKMP)Px#24YJ`L;(K){{a7>y{D4^000SaNLh0L01FcU01FcV0GgZ_00007bV*G`2ige< z4j2fDH-&Qm000?uMObu0Z*6U5Zgc=ca%Ew3Wn>_CX>@2HM@dakSAh-}000BdNkl#C^jk*3sDS21QXOkMa4jn&22wE?$zCEyk<6g>N&gTedf%WGrNo?l}e?M z#l|4Zjg>OU@0#87Z(^C8$(r9724a|!Sg)c-rkf0AU{7}(l}8Rv9q%S zj^m)$>oG7efW5sv{M7%Mv;nxgyMxc?gUx0`PEL*_pO=>xEH5vkrKJT%qY)Dm6NpBm zbOO+7wQx8b2m}JSy}iZa;v%e8D>NF7B>$_cD@;yKqPn^oR;v|DOG|iuex|bq6c-m` zXlMwbPzcx8*YJA1Fq_Sid{e0u_V@QOI5-HsUJs7ru)VzvQ55Mcf)y1N@OV53hr>8K zJHz<+I1C1ZB;Q0L0iNg4(b0j*%1Vrmj^h0MoZeR0WHMoDY6{V46bA2#8O zW3d>#UN1~06B-&CFh4&}_gSE^u@Q4~bBM)a2nK`TI8O3Ta&&Zr;o)Ir9EMnyMORlB zR#sO2Y9~?DY4E>0NhA_Mb93|ePe3LCNG6jA27?d;0h^nfNF)-<2Ox?fc6WCn2m<_m zKc1eR{_PU{DdES*#}EVoYinzW$K$em>+9=52%!(a<>e&=L4Y6#h(sc?d`nA9!Eqc2 zAuyRt^khIh9>?nHDtMm9$;pW<-@Lp$^z`(=<#M5|tqlyr(6Iy_A0M&4z7C(yhfpXa zyNzvXY62kyeSLi>EG(p>1|$**1OfpFf`F~9E!m&L3_S`I|w1NPL3G~K&p6#VbI#z3PK2adwZePYJav9{Z}ulsi}d><$}}cM0t66 z#x!5w3ZST{2o8q>gb-LP7G$EO9RSO+= - Buzz Stuff - + Welcome -

Logout

- - {% for item in activitylist.items %} - - - - - - {% endfor %} -
-
- {{ item.actor.name }}
- {{ item.object.content }} - - -
+

{{ text }}

diff --git a/samples/appengine_with_decorator/apiclient b/samples/appengine_with_decorator/apiclient deleted file mode 120000 index 24fe0bc..0000000 --- a/samples/appengine_with_decorator/apiclient +++ /dev/null @@ -1 +0,0 @@ -../appengine/apiclient \ No newline at end of file diff --git a/samples/appengine_with_decorator/app.yaml b/samples/appengine_with_decorator/app.yaml deleted file mode 100644 index c0d43d5..0000000 --- a/samples/appengine_with_decorator/app.yaml +++ /dev/null @@ -1,12 +0,0 @@ -application: jcg-testing-01 -version: 1 -runtime: python -api_version: 1 - -handlers: -- url: /oauth2callback - script: oauth2client/appengine.py - -- url: .* - script: main.py - diff --git a/samples/appengine_with_decorator/better.py b/samples/appengine_with_decorator/better.py deleted file mode 100644 index 03963f4..0000000 --- a/samples/appengine_with_decorator/better.py +++ /dev/null @@ -1,92 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2007 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -"""Starting template for Google App Engine applications. - -Use this project as a starting point if you are just beginning to build a Google -App Engine project. Remember to fill in the OAuth 2.0 client_id and -client_secret which can be obtained from the Developer Console - -""" - -__author__ = 'jcgregorio@google.com (Joe Gregorio)' - - -import httplib2 -import logging -import os -import pickle - -from apiclient.discovery import build -from oauth2client.appengine import OAuth2Decorator -from oauth2client.client import AccessTokenRefreshError -from google.appengine.api import memcache -from google.appengine.ext import webapp -from google.appengine.ext.webapp import template -from google.appengine.ext.webapp.util import run_wsgi_app - -# The client_id and client_secret are copied from the API Access tab on -# the Google APIs Console -decorator = OAuth2Decorator( - client_id='837647042410-75ifgipj95q4agpm0cs452mg7i2pn17c.apps.googleusercontent.com', - client_secret='QhxYsjM__u4vy5N0DXUFRwwI', - scope='https://www.googleapis.com/auth/buzz', - user_agent='my-sample-app/1.0') - -http = httplib2.Http(memcache) -service = build("buzz", "v1", http=http) - - -class MainHandler(webapp.RequestHandler): - - @decorator.oauth_aware - def get(self): - path = os.path.join(os.path.dirname(__file__), 'grant.html') - variables = { - 'url': decorator.authorize_url(), - 'has_credentials': decorator.has_credentials() - } - self.response.out.write(template.render(path, variables)) - - -class FollowerHandler(webapp.RequestHandler): - - @decorator.oauth_required - def get(self): - try: - http = decorator.http() - followers = service.people().list( - userId='@me', groupId='@followers').execute(http) - text = 'Hello, you have %s followers!' % followers['totalResults'] - - path = os.path.join(os.path.dirname(__file__), 'welcome.html') - self.response.out.write(template.render(path, {'text': text })) - except AccessTokenRefreshError: - self.redirect('/') - - -def main(): - application = webapp.WSGIApplication( - [ - ('/', MainHandler), - ('/followers', FollowerHandler), - ], - debug=True) - run_wsgi_app(application) - - -if __name__ == '__main__': - main() diff --git a/samples/appengine_with_decorator/gflags.py b/samples/appengine_with_decorator/gflags.py deleted file mode 120000 index 5a2ff94..0000000 --- a/samples/appengine_with_decorator/gflags.py +++ /dev/null @@ -1 +0,0 @@ -../../gflags.py \ No newline at end of file diff --git a/samples/appengine_with_decorator/gflags_validators.py b/samples/appengine_with_decorator/gflags_validators.py deleted file mode 120000 index 25d8ce8..0000000 --- a/samples/appengine_with_decorator/gflags_validators.py +++ /dev/null @@ -1 +0,0 @@ -../../gflags_validators.py \ No newline at end of file diff --git a/samples/appengine_with_decorator/httplib2 b/samples/appengine_with_decorator/httplib2 deleted file mode 120000 index 4cd2774..0000000 --- a/samples/appengine_with_decorator/httplib2 +++ /dev/null @@ -1 +0,0 @@ -../appengine/httplib2 \ No newline at end of file diff --git a/samples/appengine_with_decorator/main.py b/samples/appengine_with_decorator/main.py deleted file mode 100755 index ca08d29..0000000 --- a/samples/appengine_with_decorator/main.py +++ /dev/null @@ -1,70 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2007 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -"""Starting template for Google App Engine applications. - -Use this project as a starting point if you are just beginning to build a Google -App Engine project. Remember to fill in the OAuth 2.0 client_id and -client_secret which can be obtained from the Developer Console - -""" - -__author__ = 'jcgregorio@google.com (Joe Gregorio)' - - -import httplib2 -import logging -import os -import pickle - -from apiclient.discovery import build -from oauth2client.appengine import OAuth2Decorator -from google.appengine.api import memcache -from google.appengine.ext import webapp -from google.appengine.ext.webapp.util import run_wsgi_app - -# The client_id and client_secret are copied from the API Access tab on -# the Google APIs Console -decorator = OAuth2Decorator( - client_id='837647042410-75ifgipj95q4agpm0cs452mg7i2pn17c.apps.googleusercontent.com', - client_secret='QhxYsjM__u4vy5N0DXUFRwwI', - scope='https://www.googleapis.com/auth/buzz', - user_agent='my-sample-app/1.0') - -http = httplib2.Http(memcache) -service = build("buzz", "v1", http=http) - -class MainHandler(webapp.RequestHandler): - - @decorator.oauth_required - def get(self): - http = decorator.http() - followers = service.people().list( - userId='@me', groupId='@followers').execute(http) - self.response.out.write( - 'Hello, you have %s followers!' % followers['totalResults']) - -def main(): - application = webapp.WSGIApplication( - [ - ('/', MainHandler), - ], - debug=True) - run_wsgi_app(application) - - -if __name__ == '__main__': - main() diff --git a/samples/appengine_with_decorator/oauth2 b/samples/appengine_with_decorator/oauth2 deleted file mode 120000 index ee61c25..0000000 --- a/samples/appengine_with_decorator/oauth2 +++ /dev/null @@ -1 +0,0 @@ -../appengine/oauth2 \ No newline at end of file diff --git a/samples/appengine_with_decorator/oauth2client b/samples/appengine_with_decorator/oauth2client deleted file mode 120000 index 9013119..0000000 --- a/samples/appengine_with_decorator/oauth2client +++ /dev/null @@ -1 +0,0 @@ -../../oauth2client/ \ No newline at end of file diff --git a/samples/appengine_with_decorator/uritemplate b/samples/appengine_with_decorator/uritemplate deleted file mode 120000 index 1c98e41..0000000 --- a/samples/appengine_with_decorator/uritemplate +++ /dev/null @@ -1 +0,0 @@ -../appengine/uritemplate \ No newline at end of file diff --git a/samples/appengine_with_decorator2/apiclient b/samples/appengine_with_decorator2/apiclient deleted file mode 120000 index 24fe0bc..0000000 --- a/samples/appengine_with_decorator2/apiclient +++ /dev/null @@ -1 +0,0 @@ -../appengine/apiclient \ No newline at end of file diff --git a/samples/appengine_with_decorator2/app.yaml b/samples/appengine_with_decorator2/app.yaml deleted file mode 100644 index bdc4be1..0000000 --- a/samples/appengine_with_decorator2/app.yaml +++ /dev/null @@ -1,12 +0,0 @@ -application: jcg-testing-01 -version: 2 -runtime: python -api_version: 1 - -handlers: -- url: /oauth2callback - script: oauth2client/appengine.py - -- url: .* - script: main.py - diff --git a/samples/appengine_with_decorator2/gflags.py b/samples/appengine_with_decorator2/gflags.py deleted file mode 120000 index 5a2ff94..0000000 --- a/samples/appengine_with_decorator2/gflags.py +++ /dev/null @@ -1 +0,0 @@ -../../gflags.py \ No newline at end of file diff --git a/samples/appengine_with_decorator2/gflags_validators.py b/samples/appengine_with_decorator2/gflags_validators.py deleted file mode 120000 index 25d8ce8..0000000 --- a/samples/appengine_with_decorator2/gflags_validators.py +++ /dev/null @@ -1 +0,0 @@ -../../gflags_validators.py \ No newline at end of file diff --git a/samples/appengine_with_decorator2/httplib2 b/samples/appengine_with_decorator2/httplib2 deleted file mode 120000 index 4cd2774..0000000 --- a/samples/appengine_with_decorator2/httplib2 +++ /dev/null @@ -1 +0,0 @@ -../appengine/httplib2 \ No newline at end of file diff --git a/samples/appengine_with_decorator2/index.yaml b/samples/appengine_with_decorator2/index.yaml deleted file mode 100644 index a3b9e05..0000000 --- a/samples/appengine_with_decorator2/index.yaml +++ /dev/null @@ -1,11 +0,0 @@ -indexes: - -# AUTOGENERATED - -# This index.yaml is automatically updated whenever the dev_appserver -# detects that a new type of query is run. If you want to manage the -# index.yaml file manually, remove the above marker line (the line -# saying "# AUTOGENERATED"). If you want to manage some indexes -# manually, move them above the marker line. The index.yaml file is -# automatically uploaded to the admin console when you next deploy -# your application using appcfg.py. diff --git a/samples/appengine_with_decorator2/main.py b/samples/appengine_with_decorator2/main.py deleted file mode 100644 index 443ff64..0000000 --- a/samples/appengine_with_decorator2/main.py +++ /dev/null @@ -1,111 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2007 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -"""Starting template for Google App Engine applications. - -Use this project as a starting point if you are just beginning to build a Google -App Engine project. Remember to download the OAuth 2.0 client secrets which can -be obtained from the Developer Console -and save them as 'client_secrets.json' in the project directory. -""" - -__author__ = 'jcgregorio@google.com (Joe Gregorio)' - - -import httplib2 -import logging -import os -import pickle - -from apiclient.discovery import build -from oauth2client.appengine import oauth2decorator_from_clientsecrets -from oauth2client.client import AccessTokenRefreshError -from google.appengine.api import memcache -from google.appengine.ext import webapp -from google.appengine.ext.webapp import template -from google.appengine.ext.webapp.util import run_wsgi_app - - -# CLIENT_SECRETS, name of a file containing the OAuth 2.0 information for this -# application, including client_id and client_secret, which are found -# on the API Access tab on the Google APIs -# Console -CLIENT_SECRETS = os.path.join(os.path.dirname(__file__), 'client_secrets.json') - -# Helpful message to display in the browser if the CLIENT_SECRETS file -# is missing. -MISSING_CLIENT_SECRETS_MESSAGE = """ -

Warning: Please configure OAuth 2.0

-

-To make this sample run you will need to populate the client_secrets.json file -found at: -

-

-%s. -

-

with information found on the APIs Console. -

-""" % CLIENT_SECRETS - - -http = httplib2.Http(memcache) -service = build("buzz", "v1", http=http) -decorator = oauth2decorator_from_clientsecrets( - CLIENT_SECRETS, - 'https://www.googleapis.com/auth/buzz', - MISSING_CLIENT_SECRETS_MESSAGE) - -class MainHandler(webapp.RequestHandler): - - @decorator.oauth_aware - def get(self): - path = os.path.join(os.path.dirname(__file__), 'grant.html') - variables = { - 'url': decorator.authorize_url(), - 'has_credentials': decorator.has_credentials() - } - self.response.out.write(template.render(path, variables)) - - -class FollowerHandler(webapp.RequestHandler): - - @decorator.oauth_required - def get(self): - try: - http = decorator.http() - followers = service.people().list( - userId='@me', groupId='@followers').execute(http) - text = 'Hello, you have %s followers!' % followers['totalResults'] - - path = os.path.join(os.path.dirname(__file__), 'welcome.html') - self.response.out.write(template.render(path, {'text': text })) - except AccessTokenRefreshError: - self.redirect('/') - - -def main(): - application = webapp.WSGIApplication( - [ - ('/', MainHandler), - ('/followers', FollowerHandler), - ], - debug=True) - run_wsgi_app(application) - - -if __name__ == '__main__': - main() diff --git a/samples/appengine_with_decorator2/oauth2 b/samples/appengine_with_decorator2/oauth2 deleted file mode 120000 index ee61c25..0000000 --- a/samples/appengine_with_decorator2/oauth2 +++ /dev/null @@ -1 +0,0 @@ -../appengine/oauth2 \ No newline at end of file diff --git a/samples/appengine_with_decorator2/oauth2client b/samples/appengine_with_decorator2/oauth2client deleted file mode 120000 index 9013119..0000000 --- a/samples/appengine_with_decorator2/oauth2client +++ /dev/null @@ -1 +0,0 @@ -../../oauth2client/ \ No newline at end of file diff --git a/samples/appengine_with_decorator2/uritemplate b/samples/appengine_with_decorator2/uritemplate deleted file mode 120000 index 1c98e41..0000000 --- a/samples/appengine_with_decorator2/uritemplate +++ /dev/null @@ -1 +0,0 @@ -../appengine/uritemplate \ No newline at end of file diff --git a/samples/appengine_with_decorator2/welcome.html b/samples/appengine_with_decorator2/welcome.html deleted file mode 100644 index 57b186e..0000000 --- a/samples/appengine_with_decorator2/welcome.html +++ /dev/null @@ -1,8 +0,0 @@ - - - Welcome - - -

{{ text }}

- - diff --git a/samples/oauth2/dailymotion/app.yaml b/samples/dailymotion/app.yaml similarity index 100% rename from samples/oauth2/dailymotion/app.yaml rename to samples/dailymotion/app.yaml diff --git a/samples/oauth2/dailymotion/gflags.py b/samples/dailymotion/gflags.py similarity index 100% rename from samples/oauth2/dailymotion/gflags.py rename to samples/dailymotion/gflags.py diff --git a/samples/oauth2/dailymotion/gflags_validators.py b/samples/dailymotion/gflags_validators.py similarity index 100% rename from samples/oauth2/dailymotion/gflags_validators.py rename to samples/dailymotion/gflags_validators.py diff --git a/samples/oauth2/dailymotion/httplib2 b/samples/dailymotion/httplib2 similarity index 100% rename from samples/oauth2/dailymotion/httplib2 rename to samples/dailymotion/httplib2 diff --git a/samples/appengine_with_decorator/index.yaml b/samples/dailymotion/index.yaml similarity index 100% rename from samples/appengine_with_decorator/index.yaml rename to samples/dailymotion/index.yaml diff --git a/samples/oauth2/dailymotion/main.py b/samples/dailymotion/main.py similarity index 100% rename from samples/oauth2/dailymotion/main.py rename to samples/dailymotion/main.py diff --git a/samples/oauth2/dailymotion/oauth2client b/samples/dailymotion/oauth2client similarity index 100% rename from samples/oauth2/dailymotion/oauth2client rename to samples/dailymotion/oauth2client diff --git a/samples/oauth2/dailymotion/simplejson b/samples/dailymotion/simplejson similarity index 100% rename from samples/oauth2/dailymotion/simplejson rename to samples/dailymotion/simplejson diff --git a/samples/oauth2/dailymotion/uritemplate b/samples/dailymotion/uritemplate similarity index 100% rename from samples/oauth2/dailymotion/uritemplate rename to samples/dailymotion/uritemplate diff --git a/samples/oauth2/dailymotion/welcome.html b/samples/dailymotion/welcome.html similarity index 100% rename from samples/oauth2/dailymotion/welcome.html rename to samples/dailymotion/welcome.html diff --git a/samples/debugging/main.py b/samples/debugging/main.py deleted file mode 100644 index be82ff1..0000000 --- a/samples/debugging/main.py +++ /dev/null @@ -1,45 +0,0 @@ -#!/usr/bin/python2.4 -# -*- coding: utf-8 -*- -# -# Copyright 2010 Google Inc. All Rights Reserved. - -"""Simple command-line example for Translate. - -Command-line application that translates -some text. -""" - -__author__ = 'jcgregorio@google.com (Joe Gregorio)' - -import gflags -import logging -import pprint -import sys - -from apiclient.discovery import build -from apiclient.model import JsonModel - - -FLAGS = gflags.FLAGS -logger = logging.getLogger() -logger.setLevel(logging.INFO) - - -def main(argv): - try: - argv = FLAGS(argv) - except gflags.FlagsError, e: - print '%s\\nUsage: %s ARGS\\n%s' % (e, argv[0], FLAGS) - sys.exit(1) - - service = build('translate', 'v2', - developerKey='AIzaSyAQIKv_gwnob-YNrXV2stnY86GSGY81Zr0', - model=JsonModel()) - print service.translations().list( - source='en', - target='fr', - q=['flower', 'car'] - ).execute() - -if __name__ == '__main__': - main(sys.argv) diff --git a/samples/local/main.py b/samples/local/main.py deleted file mode 100644 index 98fb0a4..0000000 --- a/samples/local/main.py +++ /dev/null @@ -1,37 +0,0 @@ -#!/usr/bin/python2.4 -# -*- coding: utf-8 -*- -# -# Copyright 2010 Google Inc. All Rights Reserved. - -"""Simple command-line example for running against a - local server. - -""" - -__author__ = 'jcgregorio@google.com (Joe Gregorio)' - -# Enable this sample to be run from the top-level directory -import os -import sys -sys.path.insert(0, os.getcwd()) - -from apiclient.discovery import build - -import httplib2 -# httplib2.debuglevel = 4 -import pickle -import pprint - -DISCOVERY_URI = ('http://localhost:3990/discovery/v0.2beta1/describe/' - '{api}/{apiVersion}') - - -def main(): - http = httplib2.Http() - - service = build("buzz", "v1", http=http, discoveryServiceUrl=DISCOVERY_URI) - help(service.activities().list) - print service.activities().list(userId='@self', scope='@me', c='foo').uri - -if __name__ == '__main__': - main() diff --git a/samples/localdiscovery/buzz.json b/samples/localdiscovery/buzz.json deleted file mode 100644 index cd121a2..0000000 --- a/samples/localdiscovery/buzz.json +++ /dev/null @@ -1,3727 +0,0 @@ -{ - "kind": "discovery#restDescription", - "id": "buzz:v1", - "name": "buzz", - "version": "v1", - "title": "Buzz API", - "description": "Lets you share updates, photos, videos, and more with your friends around the world", - "icons": { - "x16": "http://www.google.com/images/icons/product/buzz-16.png", - "x32": "http://www.google.com/images/icons/product/buzz-32.png" - }, - "documentationLink": "http://code.google.com/apis/buzz/v1/using_rest.html", - "labels": [ - "labs" - ], - "protocol": "rest", - "basePath": "/buzz/v1/", - "auth": { - "oauth2": { - "scopes": { - "https://www.googleapis.com/auth/buzz": { - "description": "Manage your Buzz activity and address book" - }, - "https://www.googleapis.com/auth/buzz.readonly": { - "description": "View your Buzz activity and address book" - }, - "https://www.googleapis.com/auth/picasa": { - "description": "Manage your photos and videos" - } - } - } - }, - "features": [ - "dataWrapper" - ], - "schemas": { - "Activity": { - "id": "Activity", - "type": "object", - "properties": { - "actor": { - "type": "object", - "properties": { - "id": { - "type": "any" - }, - "name": { - "type": "any" - }, - "profileUrl": { - "type": "any" - }, - "thumbnailUrl": { - "type": "any" - } - } - }, - "address": { - "type": "any" - }, - "annotation": { - "type": "any" - }, - "categories": { - "type": "array", - "items": { - "type": "object", - "properties": { - "label": { - "type": "any" - }, - "schema": { - "type": "any" - }, - "term": { - "type": "any" - } - } - } - }, - "crosspostSource": { - "type": "any" - }, - "detectedlLang": { - "type": "any" - }, - "geocode": { - "type": "any" - }, - "id": { - "type": "any" - }, - "kind": { - "type": "string", - "default": "buzz#activity" - }, - "links": { - "type": "object", - "properties": { - "liked": { - "type": "array", - "items": { - "type": "object", - "properties": { - "count": { - "type": "integer" - }, - "href": { - "type": "any" - }, - "type": { - "type": "any" - } - } - } - } - }, - "additionalProperties": { - "type": "array", - "items": { - "type": "object", - "properties": { - "count": { - "type": "any" - }, - "height": { - "type": "any" - }, - "href": { - "type": "any" - }, - "title": { - "type": "any" - }, - "type": { - "type": "any" - }, - "updated": { - "type": "string" - }, - "width": { - "type": "any" - } - } - } - } - }, - "object": { - "type": "object", - "properties": { - "actor": { - "type": "object", - "properties": { - "id": { - "type": "any" - }, - "name": { - "type": "any" - }, - "profileUrl": { - "type": "any" - }, - "thumbnailUrl": { - "type": "any" - } - } - }, - "attachments": { - "type": "array", - "items": { - "type": "object", - "properties": { - "content": { - "type": "any" - }, - "id": { - "type": "any" - }, - "links": { - "type": "object", - "additionalProperties": { - "type": "array", - "items": { - "type": "object", - "properties": { - "count": { - "type": "any" - }, - "height": { - "type": "any" - }, - "href": { - "type": "any" - }, - "title": { - "type": "any" - }, - "type": { - "type": "any" - }, - "updated": { - "type": "string" - }, - "width": { - "type": "any" - } - } - } - } - }, - "title": { - "type": "any" - }, - "type": { - "type": "string" - } - } - } - }, - "comments": { - "type": "array", - "items": { - "$ref": "Comment" - } - }, - "content": { - "type": "any" - }, - "detectedlLang": { - "type": "any" - }, - "id": { - "type": "any" - }, - "liked": { - "type": "array", - "items": { - "$ref": "Person" - } - }, - "links": { - "type": "object", - "additionalProperties": { - "type": "array", - "items": { - "type": "object", - "properties": { - "href": { - "type": "any" - }, - "type": { - "type": "any" - } - } - } - } - }, - "originalContent": { - "type": "any" - }, - "shareOriginal": { - "$ref": "Activity" - }, - "targetLang": { - "type": "any" - }, - "type": { - "type": "string" - }, - "untranslatedContent": { - "type": "any" - } - } - }, - "placeId": { - "type": "any" - }, - "placeName": { - "type": "any" - }, - "placeholder": { - "type": "any" - }, - "published": { - "type": "string" - }, - "radius": { - "type": "any" - }, - "source": { - "type": "object", - "properties": { - "title": { - "type": "any" - } - } - }, - "targetLang": { - "type": "any" - }, - "title": { - "type": "any" - }, - "untranslatedTitle": { - "type": "any" - }, - "updated": { - "type": "string" - }, - "verbs": { - "type": "array", - "items": { - "type": "string" - } - }, - "visibility": { - "type": "object", - "properties": { - "entries": { - "type": "array", - "items": { - "type": "object", - "properties": { - "id": { - "type": "any" - }, - "title": { - "type": "any" - } - } - } - } - } - } - } - }, - "ActivityFeed": { - "id": "ActivityFeed", - "type": "object", - "properties": { - "id": { - "type": "any" - }, - "items": { - "type": "array", - "items": { - "$ref": "Activity" - } - }, - "kind": { - "type": "string", - "default": "buzz#activityFeed" - }, - "links": { - "type": "object", - "additionalProperties": { - "type": "array", - "items": { - "type": "object", - "properties": { - "count": { - "type": "any" - }, - "height": { - "type": "any" - }, - "href": { - "type": "any" - }, - "title": { - "type": "any" - }, - "type": { - "type": "any" - }, - "updated": { - "type": "string" - }, - "width": { - "type": "any" - } - } - } - } - }, - "title": { - "type": "any" - }, - "updated": { - "type": "string" - } - } - }, - "Album": { - "id": "Album", - "type": "object", - "properties": { - "created": { - "type": "string" - }, - "description": { - "type": "string" - }, - "firstPhotoId": { - "type": "integer" - }, - "id": { - "type": "integer" - }, - "kind": { - "type": "string", - "default": "buzz#album" - }, - "lastModified": { - "type": "string" - }, - "links": { - "type": "object", - "properties": { - "alternate": { - "$ref": "Link" - }, - "enclosure": { - "$ref": "Link" - } - } - }, - "owner": { - "type": "object", - "properties": { - "id": { - "type": "string" - }, - "name": { - "type": "string" - }, - "profileUrl": { - "type": "string" - }, - "thumbnailUrl": { - "type": "string" - } - } - }, - "tags": { - "type": "array", - "items": { - "type": "string" - } - }, - "title": { - "type": "string" - }, - "version": { - "type": "integer" - } - } - }, - "AlbumLite": { - "id": "AlbumLite", - "type": "object", - "properties": { - "collection": { - "type": "object", - "properties": { - "album": { - "type": "any" - }, - "albumId": { - "type": "any" - }, - "photo": { - "type": "object", - "properties": { - "photoUrl": { - "type": "any" - } - } - } - } - }, - "kind": { - "type": "string", - "default": "buzz#albumLite" - } - } - }, - "AlbumsFeed": { - "id": "AlbumsFeed", - "type": "object", - "properties": { - "items": { - "type": "array", - "items": { - "$ref": "Album" - } - }, - "kind": { - "type": "string", - "default": "buzz#albumsFeed" - } - } - }, - "ChiliPhotosResourceJson": { - "id": "ChiliPhotosResourceJson", - "type": "object", - "properties": { - "album": { - "type": "object", - "properties": { - "id": { - "type": "integer" - }, - "page_link": { - "$ref": "Link" - } - } - }, - "created": { - "type": "string" - }, - "description": { - "type": "string" - }, - "fileSize": { - "type": "integer" - }, - "id": { - "type": "integer" - }, - "kind": { - "type": "string" - }, - "lastModified": { - "type": "string" - }, - "links": { - "type": "object", - "properties": { - "alternate": { - "type": "array", - "items": { - "$ref": "Link" - } - } - }, - "additionalProperties": { - "type": "array", - "items": { - "$ref": "Link" - } - } - }, - "owner": { - "type": "object", - "properties": { - "id": { - "type": "string" - }, - "name": { - "type": "string" - }, - "profileUrl": { - "type": "string" - }, - "thumbnailUrl": { - "type": "string" - } - } - }, - "timestamp": { - "type": "number" - }, - "title": { - "type": "string" - }, - "version": { - "type": "integer" - }, - "video": { - "$ref": "Video" - } - } - }, - "Comment": { - "id": "Comment", - "type": "object", - "properties": { - "actor": { - "type": "object", - "properties": { - "id": { - "type": "any" - }, - "name": { - "type": "any" - }, - "profileUrl": { - "type": "any" - }, - "thumbnailUrl": { - "type": "any" - } - } - }, - "content": { - "type": "any" - }, - "detectedLang": { - "type": "any" - }, - "id": { - "type": "any" - }, - "kind": { - "type": "string", - "default": "buzz#comment" - }, - "links": { - "type": "object", - "properties": { - "inReplyTo": { - "type": "array", - "items": { - "type": "object", - "properties": { - "href": { - "type": "any" - }, - "ref": { - "type": "any" - }, - "source": { - "type": "any" - } - } - } - } - }, - "additionalProperties": { - "type": "array", - "items": { - "type": "object", - "properties": { - "count": { - "type": "any" - }, - "height": { - "type": "any" - }, - "href": { - "type": "any" - }, - "title": { - "type": "any" - }, - "type": { - "type": "any" - }, - "updated": { - "type": "string" - }, - "width": { - "type": "any" - } - } - } - } - }, - "originalContent": { - "type": "any" - }, - "placeholder": { - "type": "any" - }, - "published": { - "type": "string" - }, - "targetLang": { - "type": "any" - }, - "untranslatedContent": { - "type": "any" - }, - "updated": { - "type": "string" - } - } - }, - "CommentFeed": { - "id": "CommentFeed", - "type": "object", - "properties": { - "id": { - "type": "any" - }, - "items": { - "type": "array", - "items": { - "$ref": "Comment" - } - }, - "kind": { - "type": "string", - "default": "buzz#commentFeed" - }, - "links": { - "type": "object", - "additionalProperties": { - "type": "array", - "items": { - "type": "object", - "properties": { - "count": { - "type": "any" - }, - "height": { - "type": "any" - }, - "href": { - "type": "any" - }, - "title": { - "type": "any" - }, - "type": { - "type": "any" - }, - "updated": { - "type": "string" - }, - "width": { - "type": "any" - } - } - } - } - }, - "title": { - "type": "any" - }, - "updated": { - "type": "string" - } - } - }, - "CountFeed": { - "id": "CountFeed", - "type": "object", - "properties": { - "counts": { - "type": "object", - "additionalProperties": { - "type": "array", - "items": { - "type": "object", - "properties": { - "count": { - "type": "any" - }, - "timestamp": { - "type": "string" - } - } - } - } - }, - "kind": { - "type": "string", - "default": "buzz#countFeed" - } - } - }, - "Group": { - "id": "Group", - "type": "object", - "properties": { - "id": { - "type": "any" - }, - "kind": { - "type": "string", - "default": "buzz#group" - }, - "links": { - "type": "object", - "properties": { - "self": { - "type": "array", - "items": { - "type": "object", - "properties": { - "href": { - "type": "any" - }, - "type": { - "type": "string", - "default": "application/json" - } - } - } - } - } - }, - "memberCount": { - "type": "any" - }, - "title": { - "type": "any" - } - } - }, - "GroupFeed": { - "id": "GroupFeed", - "type": "object", - "properties": { - "items": { - "type": "array", - "items": { - "$ref": "Group" - } - }, - "kind": { - "type": "string", - "default": "buzz#groupFeed" - }, - "links": { - "type": "object", - "additionalProperties": { - "type": "array", - "items": { - "type": "object", - "properties": { - "count": { - "type": "any" - }, - "height": { - "type": "any" - }, - "href": { - "type": "any" - }, - "title": { - "type": "any" - }, - "type": { - "type": "any" - }, - "updated": { - "type": "string" - }, - "width": { - "type": "any" - } - } - } - } - } - } - }, - "Link": { - "id": "Link", - "type": "object", - "properties": { - "count": { - "type": "integer" - }, - "height": { - "type": "integer" - }, - "href": { - "type": "string" - }, - "title": { - "type": "string" - }, - "type": { - "type": "string" - }, - "updated": { - "type": "string" - }, - "width": { - "type": "integer" - } - } - }, - "PeopleFeed": { - "id": "PeopleFeed", - "type": "object", - "properties": { - "entry": { - "type": "array", - "items": { - "$ref": "Person" - } - }, - "itemsPerPage": { - "type": "any" - }, - "kind": { - "type": "string", - "default": "buzz#peopleFeed" - }, - "startIndex": { - "type": "any" - }, - "totalResults": { - "type": "any" - } - } - }, - "Person": { - "id": "Person", - "type": "object", - "properties": { - "aboutMe": { - "type": "any" - }, - "accounts": { - "type": "array", - "items": { - "type": "object", - "properties": { - "domain": { - "type": "any" - }, - "userid": { - "type": "any" - }, - "username": { - "type": "any" - } - } - } - }, - "activities": { - "type": "array", - "items": { - "type": "any" - } - }, - "addresses": { - "type": "array", - "items": { - "type": "object", - "properties": { - "country": { - "type": "any" - }, - "formatted": { - "type": "any" - }, - "locality": { - "type": "any" - }, - "postalCode": { - "type": "any" - }, - "primary": { - "type": "any" - }, - "region": { - "type": "any" - }, - "streetAddress": { - "type": "any" - }, - "type": { - "type": "any" - } - } - } - }, - "anniversary": { - "type": "any" - }, - "birthday": { - "type": "any" - }, - "bodyType": { - "type": "any" - }, - "books": { - "type": "array", - "items": { - "type": "any" - } - }, - "cars": { - "type": "array", - "items": { - "type": "any" - } - }, - "children": { - "type": "array", - "items": { - "type": "any" - } - }, - "connected": { - "type": "any" - }, - "currentLocation": { - "type": "any" - }, - "displayName": { - "type": "any" - }, - "drinker": { - "type": "any" - }, - "emails": { - "type": "array", - "items": { - "type": "object", - "properties": { - "primary": { - "type": "any" - }, - "type": { - "type": "any" - }, - "value": { - "type": "any" - } - } - } - }, - "ethnicity": { - "type": "any" - }, - "fashion": { - "type": "any" - }, - "food": { - "type": "array", - "items": { - "type": "any" - } - }, - "gender": { - "type": "any" - }, - "happiestWhen": { - "type": "any" - }, - "hasApp": { - "type": "any" - }, - "heroes": { - "type": "array", - "items": { - "type": "any" - } - }, - "humor": { - "type": "any" - }, - "id": { - "type": "any" - }, - "ims": { - "type": "array", - "items": { - "type": "object", - "properties": { - "primary": { - "type": "any" - }, - "type": { - "type": "any" - }, - "value": { - "type": "any" - } - } - } - }, - "interests": { - "type": "array", - "items": { - "type": "any" - } - }, - "jobInterests": { - "type": "array", - "items": { - "type": "any" - } - }, - "kind": { - "type": "string", - "default": "buzz#person" - }, - "languages": { - "type": "array", - "items": { - "type": "any" - } - }, - "languagesSpoken": { - "type": "array", - "items": { - "type": "any" - } - }, - "livingArrangement": { - "type": "any" - }, - "lookingFor": { - "type": "any" - }, - "movies": { - "type": "array", - "items": { - "type": "any" - } - }, - "music": { - "type": "array", - "items": { - "type": "any" - } - }, - "name": { - "type": "object", - "properties": { - "familyName": { - "type": "any" - }, - "formatted": { - "type": "any" - }, - "givenName": { - "type": "any" - }, - "honorificPrefix": { - "type": "any" - }, - "honorificSuffix": { - "type": "any" - }, - "middleName": { - "type": "any" - } - } - }, - "nickname": { - "type": "any" - }, - "note": { - "type": "any" - }, - "organizations": { - "type": "array", - "items": { - "type": "object", - "properties": { - "department": { - "type": "any" - }, - "description": { - "type": "any" - }, - "endDate": { - "type": "any" - }, - "location": { - "type": "any" - }, - "name": { - "type": "any" - }, - "primary": { - "type": "any" - }, - "startDate": { - "type": "any" - }, - "title": { - "type": "any" - }, - "type": { - "type": "any" - } - } - } - }, - "pets": { - "type": "array", - "items": { - "type": "any" - } - }, - "phoneNumbers": { - "type": "array", - "items": { - "type": "object", - "properties": { - "primary": { - "type": "any" - }, - "type": { - "type": "any" - }, - "value": { - "type": "any" - } - } - } - }, - "photos": { - "type": "array", - "items": { - "type": "object", - "properties": { - "height": { - "type": "any" - }, - "primary": { - "type": "any" - }, - "type": { - "type": "any" - }, - "value": { - "type": "any" - }, - "width": { - "type": "any" - } - } - } - }, - "politicalViews": { - "type": "array", - "items": { - "type": "any" - } - }, - "preferredUsername": { - "type": "any" - }, - "profileSong": { - "type": "any" - }, - "profileUrl": { - "type": "any" - }, - "profileVideo": { - "type": "any" - }, - "published": { - "type": "string" - }, - "quotes": { - "type": "array", - "items": { - "type": "any" - } - }, - "relationshipStatus": { - "type": "any" - }, - "relationships": { - "type": "array", - "items": { - "type": "any" - } - }, - "religion": { - "type": "any" - }, - "romance": { - "type": "any" - }, - "scaredOf": { - "type": "any" - }, - "sexualOrientation": { - "type": "any" - }, - "smoker": { - "type": "any" - }, - "sports": { - "type": "array", - "items": { - "type": "any" - } - }, - "status": { - "type": "any" - }, - "tags": { - "type": "array", - "items": { - "type": "any" - } - }, - "thumbnailUrl": { - "type": "any" - }, - "turnOffs": { - "type": "array", - "items": { - "type": "any" - } - }, - "turnOns": { - "type": "array", - "items": { - "type": "any" - } - }, - "tvShows": { - "type": "array", - "items": { - "type": "any" - } - }, - "updated": { - "type": "string" - }, - "urls": { - "type": "array", - "items": { - "type": "object", - "properties": { - "primary": { - "type": "any" - }, - "type": { - "type": "any" - }, - "value": { - "type": "any" - } - } - } - }, - "utcOffset": { - "type": "any" - } - } - }, - "PhotosFeed": { - "id": "PhotosFeed", - "type": "object", - "properties": { - "items": { - "type": "array", - "items": { - "$ref": "ChiliPhotosResourceJson" - } - }, - "kind": { - "type": "string", - "default": "buzz#photosFeed" - } - } - }, - "Related": { - "id": "Related", - "type": "object", - "properties": { - "href": { - "type": "any" - }, - "id": { - "type": "any" - }, - "kind": { - "type": "string", - "default": "buzz#related" - }, - "summary": { - "type": "any" - }, - "title": { - "type": "any" - } - } - }, - "RelatedFeed": { - "id": "RelatedFeed", - "type": "object", - "properties": { - "id": { - "type": "any" - }, - "items": { - "type": "array", - "items": { - "$ref": "Related" - } - }, - "kind": { - "type": "string", - "default": "buzz#relatedFeed" - }, - "links": { - "type": "object", - "additionalProperties": { - "type": "array", - "items": { - "type": "object", - "properties": { - "count": { - "type": "any" - }, - "height": { - "type": "any" - }, - "href": { - "type": "any" - }, - "title": { - "type": "any" - }, - "type": { - "type": "any" - }, - "updated": { - "type": "string" - }, - "width": { - "type": "any" - } - } - } - } - }, - "title": { - "type": "any" - }, - "updated": { - "type": "string" - } - } - }, - "Video": { - "id": "Video", - "type": "object", - "properties": { - "duration": { - "type": "integer" - }, - "size": { - "type": "integer" - }, - "status": { - "type": "string" - }, - "streams": { - "type": "array", - "items": { - "$ref": "Link" - } - } - } - } - }, - "resources": { - "activities": { - "methods": { - "count": { - "id": "chili.activities.count", - "path": "activities/count", - "httpMethod": "GET", - "description": "Get a count of link shares", - "parameters": { - "hl": { - "type": "string", - "description": "Language code to limit language results.", - "location": "query" - }, - "url": { - "type": "string", - "description": "URLs for which to get share counts.", - "repeated": true, - "location": "query" - } - }, - "response": { - "$ref": "CountFeed" - }, - "scopes": [ - "https://www.googleapis.com/auth/buzz", - "https://www.googleapis.com/auth/buzz.readonly" - ] - }, - "delete": { - "id": "chili.activities.delete", - "path": "activities/{userId}/{scope}/{postId}", - "httpMethod": "DELETE", - "description": "Delete an activity", - "parameters": { - "alt": { - "type": "string", - "description": "Specifies an alternative representation type.", - "default": "atom", - "enum": [ - "atom", - "json" - ], - "enumDescriptions": [ - "Use Atom XML format", - "Use JSON format" - ], - "location": "query" - }, - "hl": { - "type": "string", - "description": "Language code to limit language results.", - "location": "query" - }, - "postId": { - "type": "string", - "description": "ID of the activity to delete.", - "required": true, - "location": "path" - }, - "scope": { - "type": "string", - "description": "The collection to which the activity belongs.", - "required": true, - "enum": [ - "@liked", - "@muted", - "@self" - ], - "enumDescriptions": [ - "Activities liked by the user.", - "Activities muted by the user.", - "Activities posted by the user." - ], - "location": "path" - }, - "userId": { - "type": "string", - "description": "ID of the user whose post to delete.", - "required": true, - "location": "path" - } - }, - "parameterOrder": [ - "userId", - "scope", - "postId" - ], - "scopes": [ - "https://www.googleapis.com/auth/buzz" - ] - }, - "extractPeopleFromSearch": { - "id": "chili.activities.extractPeopleFromSearch", - "path": "activities/search/@people", - "httpMethod": "GET", - "description": "Search for people by topic", - "parameters": { - "alt": { - "type": "string", - "description": "Specifies an alternative representation type.", - "default": "atom", - "enum": [ - "atom", - "json" - ], - "enumDescriptions": [ - "Use Atom XML format", - "Use JSON format" - ], - "location": "query" - }, - "bbox": { - "type": "string", - "description": "Bounding box to use in a geographic location query.", - "location": "query" - }, - "c": { - "type": "string", - "description": "A continuation token that allows pagination.", - "location": "query" - }, - "hl": { - "type": "string", - "description": "Language code to limit language results.", - "location": "query" - }, - "lat": { - "type": "string", - "description": "Latitude to use in a geographic location query.", - "location": "query" - }, - "lon": { - "type": "string", - "description": "Longitude to use in a geographic location query.", - "location": "query" - }, - "max-results": { - "type": "integer", - "description": "Maximum number of results to include.", - "default": "20", - "minimum": "0", - "maximum": "4294967295", - "location": "query" - }, - "pid": { - "type": "string", - "description": "ID of a place to use in a geographic location query.", - "location": "query" - }, - "q": { - "type": "string", - "description": "Full-text search query string.", - "location": "query" - }, - "radius": { - "type": "string", - "description": "Radius to use in a geographic location query.", - "location": "query" - } - }, - "response": { - "$ref": "PeopleFeed" - }, - "scopes": [ - "https://www.googleapis.com/auth/buzz", - "https://www.googleapis.com/auth/buzz.readonly" - ] - }, - "get": { - "id": "chili.activities.get", - "path": "activities/{userId}/@self/{postId}", - "httpMethod": "GET", - "description": "Get an activity", - "parameters": { - "alt": { - "type": "string", - "description": "Specifies an alternative representation type.", - "default": "atom", - "enum": [ - "atom", - "json" - ], - "enumDescriptions": [ - "Use Atom XML format", - "Use JSON format" - ], - "location": "query" - }, - "hl": { - "type": "string", - "description": "Language code to limit language results.", - "location": "query" - }, - "max-comments": { - "type": "integer", - "description": "Maximum number of comments to include.", - "default": "0", - "minimum": "0", - "maximum": "4294967295", - "location": "query" - }, - "max-liked": { - "type": "integer", - "description": "Maximum number of likes to include.", - "default": "0", - "minimum": "0", - "maximum": "4294967295", - "location": "query" - }, - "postId": { - "type": "string", - "description": "ID of the post to get.", - "required": true, - "location": "path" - }, - "truncateAtom": { - "type": "boolean", - "description": "Truncate the value of the atom:content element.", - "location": "query" - }, - "userId": { - "type": "string", - "description": "ID of the user whose post to get.", - "required": true, - "location": "path" - } - }, - "parameterOrder": [ - "userId", - "postId" - ], - "response": { - "$ref": "Activity" - }, - "scopes": [ - "https://www.googleapis.com/auth/buzz", - "https://www.googleapis.com/auth/buzz.readonly" - ] - }, - "insert": { - "id": "chili.activities.insert", - "path": "activities/{userId}/@self", - "httpMethod": "POST", - "description": "Create a new activity", - "parameters": { - "alt": { - "type": "string", - "description": "Specifies an alternative representation type.", - "default": "atom", - "enum": [ - "atom", - "json" - ], - "enumDescriptions": [ - "Use Atom XML format", - "Use JSON format" - ], - "location": "query" - }, - "hl": { - "type": "string", - "description": "Language code to limit language results.", - "location": "query" - }, - "preview": { - "type": "boolean", - "description": "If true, only preview the action.", - "default": "false", - "location": "query" - }, - "userId": { - "type": "string", - "description": "ID of the user being referenced.", - "required": true, - "location": "path" - } - }, - "parameterOrder": [ - "userId" - ], - "request": { - "$ref": "Activity" - }, - "response": { - "$ref": "Activity" - }, - "scopes": [ - "https://www.googleapis.com/auth/buzz" - ] - }, - "list": { - "id": "chili.activities.list", - "path": "activities/{userId}/{scope}", - "httpMethod": "GET", - "description": "List activities", - "parameters": { - "alt": { - "type": "string", - "description": "Specifies an alternative representation type.", - "default": "atom", - "enum": [ - "atom", - "json" - ], - "enumDescriptions": [ - "Use Atom XML format", - "Use JSON format" - ], - "location": "query" - }, - "c": { - "type": "string", - "description": "A continuation token that allows pagination.", - "location": "query" - }, - "hl": { - "type": "string", - "description": "Language code to limit language results.", - "location": "query" - }, - "max-comments": { - "type": "integer", - "description": "Maximum number of comments to include.", - "default": "0", - "minimum": "0", - "maximum": "4294967295", - "location": "query" - }, - "max-liked": { - "type": "integer", - "description": "Maximum number of likes to include.", - "default": "0", - "minimum": "0", - "maximum": "4294967295", - "location": "query" - }, - "max-results": { - "type": "integer", - "description": "Maximum number of results to include.", - "default": "20", - "minimum": "0", - "maximum": "4294967295", - "location": "query" - }, - "scope": { - "type": "string", - "description": "The collection of activities to list.", - "required": true, - "enum": [ - "@comments", - "@consumption", - "@liked", - "@public", - "@self" - ], - "enumDescriptions": [ - "Limit to activities commented on by the user.", - "Limit to activities to be consumed by the user.", - "Limit to activities liked by the user.", - "Limit to public activities posted by the user.", - "Limit to activities posted by the user." - ], - "location": "path" - }, - "truncateAtom": { - "type": "boolean", - "description": "Truncate the value of the atom:content element.", - "location": "query" - }, - "userId": { - "type": "string", - "description": "ID of the user being referenced.", - "required": true, - "location": "path" - } - }, - "parameterOrder": [ - "userId", - "scope" - ], - "response": { - "$ref": "ActivityFeed" - }, - "scopes": [ - "https://www.googleapis.com/auth/buzz", - "https://www.googleapis.com/auth/buzz.readonly" - ] - }, - "search": { - "id": "chili.activities.search", - "path": "activities/search", - "httpMethod": "GET", - "description": "Search for activities", - "parameters": { - "alt": { - "type": "string", - "description": "Specifies an alternative representation type.", - "default": "atom", - "enum": [ - "atom", - "json" - ], - "enumDescriptions": [ - "Use Atom XML format", - "Use JSON format" - ], - "location": "query" - }, - "bbox": { - "type": "string", - "description": "Bounding box to use in a geographic location query.", - "location": "query" - }, - "c": { - "type": "string", - "description": "A continuation token that allows pagination.", - "location": "query" - }, - "hl": { - "type": "string", - "description": "Language code to limit language results.", - "location": "query" - }, - "lat": { - "type": "string", - "description": "Latitude to use in a geographic location query.", - "location": "query" - }, - "lon": { - "type": "string", - "description": "Longitude to use in a geographic location query.", - "location": "query" - }, - "max-results": { - "type": "integer", - "description": "Maximum number of results to include.", - "default": "20", - "minimum": "0", - "maximum": "4294967295", - "location": "query" - }, - "pid": { - "type": "string", - "description": "ID of a place to use in a geographic location query.", - "location": "query" - }, - "q": { - "type": "string", - "description": "Full-text search query string.", - "location": "query" - }, - "radius": { - "type": "string", - "description": "Radius to use in a geographic location query.", - "location": "query" - }, - "truncateAtom": { - "type": "boolean", - "description": "Truncate the value of the atom:content element.", - "location": "query" - } - }, - "response": { - "$ref": "ActivityFeed" - }, - "scopes": [ - "https://www.googleapis.com/auth/buzz", - "https://www.googleapis.com/auth/buzz.readonly" - ] - }, - "track": { - "id": "chili.activities.track", - "path": "activities/track", - "httpMethod": "GET", - "description": "Get real-time activity tracking information", - "parameters": { - "alt": { - "type": "string", - "description": "Specifies an alternative representation type.", - "default": "atom", - "enum": [ - "atom", - "json" - ], - "enumDescriptions": [ - "Use Atom XML format", - "Use JSON format" - ], - "location": "query" - }, - "bbox": { - "type": "string", - "description": "Bounding box to use in a geographic location query.", - "location": "query" - }, - "c": { - "type": "string", - "description": "A continuation token that allows pagination.", - "location": "query" - }, - "hl": { - "type": "string", - "description": "Language code to limit language results.", - "location": "query" - }, - "lat": { - "type": "string", - "description": "Latitude to use in a geographic location query.", - "location": "query" - }, - "lon": { - "type": "string", - "description": "Longitude to use in a geographic location query.", - "location": "query" - }, - "max-results": { - "type": "integer", - "description": "Maximum number of results to include.", - "default": "20", - "minimum": "0", - "maximum": "4294967295", - "location": "query" - }, - "pid": { - "type": "string", - "description": "ID of a place to use in a geographic location query.", - "location": "query" - }, - "q": { - "type": "string", - "description": "Full-text search query string.", - "location": "query" - }, - "radius": { - "type": "string", - "description": "Radius to use in a geographic location query.", - "location": "query" - } - }, - "response": { - "$ref": "ActivityFeed" - }, - "scopes": [ - "https://www.googleapis.com/auth/buzz", - "https://www.googleapis.com/auth/buzz.readonly" - ] - }, - "update": { - "id": "chili.activities.update", - "path": "activities/{userId}/{scope}/{postId}", - "httpMethod": "PUT", - "description": "Update an activity", - "parameters": { - "abuseType": { - "type": "string", - "location": "query" - }, - "alt": { - "type": "string", - "description": "Specifies an alternative representation type.", - "default": "atom", - "enum": [ - "atom", - "json" - ], - "enumDescriptions": [ - "Use Atom XML format", - "Use JSON format" - ], - "location": "query" - }, - "hl": { - "type": "string", - "description": "Language code to limit language results.", - "location": "query" - }, - "postId": { - "type": "string", - "description": "ID of the activity to update.", - "required": true, - "location": "path" - }, - "scope": { - "type": "string", - "description": "The collection to which the activity belongs.", - "required": true, - "enum": [ - "@abuse", - "@liked", - "@muted", - "@self" - ], - "enumDescriptions": [ - "Activities reported by the user.", - "Activities liked by the user.", - "Activities muted by the user.", - "Activities posted by the user." - ], - "location": "path" - }, - "userId": { - "type": "string", - "description": "ID of the user whose post to update.", - "required": true, - "location": "path" - } - }, - "parameterOrder": [ - "userId", - "scope", - "postId" - ], - "request": { - "$ref": "Activity" - }, - "response": { - "$ref": "Activity" - }, - "scopes": [ - "https://www.googleapis.com/auth/buzz" - ] - } - } - }, - "comments": { - "methods": { - "delete": { - "id": "chili.comments.delete", - "path": "activities/{userId}/@self/{postId}/@comments/{commentId}", - "httpMethod": "DELETE", - "description": "Delete a comment", - "parameters": { - "alt": { - "type": "string", - "description": "Specifies an alternative representation type.", - "default": "atom", - "enum": [ - "atom", - "json" - ], - "enumDescriptions": [ - "Use Atom XML format", - "Use JSON format" - ], - "location": "query" - }, - "commentId": { - "type": "string", - "description": "ID of the comment being referenced.", - "required": true, - "location": "path" - }, - "hl": { - "type": "string", - "description": "Language code to limit language results.", - "location": "query" - }, - "postId": { - "type": "string", - "description": "ID of the activity for which to delete the comment.", - "required": true, - "location": "path" - }, - "userId": { - "type": "string", - "description": "ID of the user being referenced.", - "required": true, - "location": "path" - } - }, - "parameterOrder": [ - "userId", - "postId", - "commentId" - ], - "scopes": [ - "https://www.googleapis.com/auth/buzz" - ] - }, - "get": { - "id": "chili.comments.get", - "path": "activities/{userId}/@self/{postId}/@comments/{commentId}", - "httpMethod": "GET", - "description": "Get a comment", - "parameters": { - "alt": { - "type": "string", - "description": "Specifies an alternative representation type.", - "default": "atom", - "enum": [ - "atom", - "json" - ], - "enumDescriptions": [ - "Use Atom XML format", - "Use JSON format" - ], - "location": "query" - }, - "commentId": { - "type": "string", - "description": "ID of the comment being referenced.", - "required": true, - "location": "path" - }, - "hl": { - "type": "string", - "description": "Language code to limit language results.", - "location": "query" - }, - "postId": { - "type": "string", - "description": "ID of the activity for which to get comments.", - "required": true, - "location": "path" - }, - "userId": { - "type": "string", - "description": "ID of the user being referenced.", - "required": true, - "location": "path" - } - }, - "parameterOrder": [ - "userId", - "postId", - "commentId" - ], - "response": { - "$ref": "Comment" - }, - "scopes": [ - "https://www.googleapis.com/auth/buzz", - "https://www.googleapis.com/auth/buzz.readonly" - ] - }, - "insert": { - "id": "chili.comments.insert", - "path": "activities/{userId}/@self/{postId}/@comments", - "httpMethod": "POST", - "description": "Create a comment", - "parameters": { - "alt": { - "type": "string", - "description": "Specifies an alternative representation type.", - "default": "atom", - "enum": [ - "atom", - "json" - ], - "enumDescriptions": [ - "Use Atom XML format", - "Use JSON format" - ], - "location": "query" - }, - "hl": { - "type": "string", - "description": "Language code to limit language results.", - "location": "query" - }, - "postId": { - "type": "string", - "description": "ID of the activity on which to comment.", - "required": true, - "location": "path" - }, - "userId": { - "type": "string", - "description": "ID of the user on whose behalf to comment.", - "required": true, - "location": "path" - } - }, - "parameterOrder": [ - "userId", - "postId" - ], - "request": { - "$ref": "Comment" - }, - "response": { - "$ref": "Comment" - }, - "scopes": [ - "https://www.googleapis.com/auth/buzz" - ] - }, - "list": { - "id": "chili.comments.list", - "path": "activities/{userId}/{scope}/{postId}/@comments", - "httpMethod": "GET", - "description": "List comments", - "parameters": { - "alt": { - "type": "string", - "description": "Specifies an alternative representation type.", - "default": "atom", - "enum": [ - "atom", - "json" - ], - "enumDescriptions": [ - "Use Atom XML format", - "Use JSON format" - ], - "location": "query" - }, - "c": { - "type": "string", - "description": "A continuation token that allows pagination.", - "location": "query" - }, - "hl": { - "type": "string", - "description": "Language code to limit language results.", - "location": "query" - }, - "max-results": { - "type": "integer", - "description": "Maximum number of results to include.", - "default": "20", - "minimum": "0", - "maximum": "4294967295", - "location": "query" - }, - "postId": { - "type": "string", - "description": "ID of the activity for which to get comments.", - "required": true, - "location": "path" - }, - "scope": { - "type": "string", - "description": "The collection to which the activity belongs.", - "required": true, - "enum": [ - "@self" - ], - "enumDescriptions": [ - "Activities posted by the user." - ], - "location": "path" - }, - "userId": { - "type": "string", - "description": "ID of the user for whose post to get comments.", - "required": true, - "location": "path" - } - }, - "parameterOrder": [ - "userId", - "scope", - "postId" - ], - "response": { - "$ref": "CommentFeed" - }, - "scopes": [ - "https://www.googleapis.com/auth/buzz", - "https://www.googleapis.com/auth/buzz.readonly" - ] - }, - "update": { - "id": "chili.comments.update", - "path": "activities/{userId}/{scope}/{postId}/@comments/{commentId}", - "httpMethod": "PUT", - "description": "Update a comment", - "parameters": { - "abuseType": { - "type": "string", - "location": "query" - }, - "alt": { - "type": "string", - "description": "Specifies an alternative representation type.", - "default": "atom", - "enum": [ - "atom", - "json" - ], - "enumDescriptions": [ - "Use Atom XML format", - "Use JSON format" - ], - "location": "query" - }, - "commentId": { - "type": "string", - "description": "ID of the comment being referenced.", - "required": true, - "location": "path" - }, - "hl": { - "type": "string", - "description": "Language code to limit language results.", - "location": "query" - }, - "postId": { - "type": "string", - "description": "ID of the activity for which to update the comment.", - "required": true, - "location": "path" - }, - "scope": { - "type": "string", - "description": "The collection to which the activity belongs.", - "required": true, - "enum": [ - "@abuse", - "@self" - ], - "enumDescriptions": [ - "Comments reported by the user.", - "Comments posted by the user." - ], - "location": "path" - }, - "userId": { - "type": "string", - "description": "ID of the user being referenced.", - "required": true, - "location": "path" - } - }, - "parameterOrder": [ - "userId", - "scope", - "postId", - "commentId" - ], - "request": { - "$ref": "Comment" - }, - "response": { - "$ref": "Comment" - }, - "scopes": [ - "https://www.googleapis.com/auth/buzz" - ] - } - } - }, - "groups": { - "methods": { - "delete": { - "id": "chili.groups.delete", - "path": "people/{userId}/@groups/{groupId}", - "httpMethod": "DELETE", - "description": "Delete a group", - "parameters": { - "alt": { - "type": "string", - "description": "Specifies an alternative representation type.", - "default": "atom", - "enum": [ - "atom", - "json" - ], - "enumDescriptions": [ - "Use Atom XML format", - "Use JSON format" - ], - "location": "query" - }, - "groupId": { - "type": "string", - "description": "ID of the group to delete.", - "required": true, - "location": "path" - }, - "hl": { - "type": "string", - "description": "Language code to limit language results.", - "location": "query" - }, - "userId": { - "type": "string", - "description": "ID of the user being referenced.", - "required": true, - "location": "path" - } - }, - "parameterOrder": [ - "userId", - "groupId" - ], - "scopes": [ - "https://www.googleapis.com/auth/buzz" - ] - }, - "get": { - "id": "chili.groups.get", - "path": "people/{userId}/@groups/{groupId}/@self", - "httpMethod": "GET", - "description": "Get a group", - "parameters": { - "alt": { - "type": "string", - "description": "Specifies an alternative representation type.", - "default": "atom", - "enum": [ - "atom", - "json" - ], - "enumDescriptions": [ - "Use Atom XML format", - "Use JSON format" - ], - "location": "query" - }, - "groupId": { - "type": "string", - "description": "ID of the group to get.", - "required": true, - "location": "path" - }, - "hl": { - "type": "string", - "description": "Language code to limit language results.", - "location": "query" - }, - "userId": { - "type": "string", - "description": "ID of the user being referenced.", - "required": true, - "location": "path" - } - }, - "parameterOrder": [ - "userId", - "groupId" - ], - "response": { - "$ref": "Group" - }, - "scopes": [ - "https://www.googleapis.com/auth/buzz", - "https://www.googleapis.com/auth/buzz.readonly" - ] - }, - "insert": { - "id": "chili.groups.insert", - "path": "people/{userId}/@groups", - "httpMethod": "POST", - "description": "Create a group", - "parameters": { - "alt": { - "type": "string", - "description": "Specifies an alternative representation type.", - "default": "atom", - "enum": [ - "atom", - "json" - ], - "enumDescriptions": [ - "Use Atom XML format", - "Use JSON format" - ], - "location": "query" - }, - "hl": { - "type": "string", - "description": "Language code to limit language results.", - "location": "query" - }, - "userId": { - "type": "string", - "description": "ID of the user being referenced.", - "required": true, - "location": "path" - } - }, - "parameterOrder": [ - "userId" - ], - "request": { - "$ref": "Group" - }, - "response": { - "$ref": "Group" - }, - "scopes": [ - "https://www.googleapis.com/auth/buzz" - ] - }, - "list": { - "id": "chili.groups.list", - "path": "people/{userId}/@groups", - "httpMethod": "GET", - "description": "Get a user's groups", - "parameters": { - "alt": { - "type": "string", - "description": "Specifies an alternative representation type.", - "default": "atom", - "enum": [ - "atom", - "json" - ], - "enumDescriptions": [ - "Use Atom XML format", - "Use JSON format" - ], - "location": "query" - }, - "c": { - "type": "string", - "description": "A continuation token that allows pagination.", - "location": "query" - }, - "hl": { - "type": "string", - "description": "Language code to limit language results.", - "location": "query" - }, - "max-results": { - "type": "integer", - "description": "Maximum number of results to include.", - "default": "20", - "minimum": "0", - "maximum": "4294967295", - "location": "query" - }, - "userId": { - "type": "string", - "description": "ID of the user being referenced.", - "required": true, - "location": "path" - } - }, - "parameterOrder": [ - "userId" - ], - "response": { - "$ref": "GroupFeed" - }, - "scopes": [ - "https://www.googleapis.com/auth/buzz", - "https://www.googleapis.com/auth/buzz.readonly" - ] - }, - "update": { - "id": "chili.groups.update", - "path": "people/{userId}/@groups/{groupId}/@self", - "httpMethod": "PUT", - "description": "Update a group", - "parameters": { - "alt": { - "type": "string", - "description": "Specifies an alternative representation type.", - "default": "atom", - "enum": [ - "atom", - "json" - ], - "enumDescriptions": [ - "Use Atom XML format", - "Use JSON format" - ], - "location": "query" - }, - "groupId": { - "type": "string", - "description": "ID of the group to update.", - "required": true, - "location": "path" - }, - "hl": { - "type": "string", - "description": "Language code to limit language results.", - "location": "query" - }, - "userId": { - "type": "string", - "description": "ID of the user being referenced.", - "required": true, - "location": "path" - } - }, - "parameterOrder": [ - "userId", - "groupId" - ], - "request": { - "$ref": "Group" - }, - "response": { - "$ref": "Group" - }, - "scopes": [ - "https://www.googleapis.com/auth/buzz" - ] - } - } - }, - "people": { - "methods": { - "delete": { - "id": "chili.people.delete", - "path": "people/{userId}/@groups/{groupId}/{personId}", - "httpMethod": "DELETE", - "description": "Remove a person from a group", - "parameters": { - "alt": { - "type": "string", - "description": "Specifies an alternative representation type.", - "default": "atom", - "enum": [ - "atom", - "json" - ], - "enumDescriptions": [ - "Use Atom XML format", - "Use JSON format" - ], - "location": "query" - }, - "groupId": { - "type": "string", - "description": "ID of the group from which to remove the person.", - "required": true, - "location": "path" - }, - "hl": { - "type": "string", - "description": "Language code to limit language results.", - "location": "query" - }, - "personId": { - "type": "string", - "description": "ID of the person to remove from the group.", - "required": true, - "location": "path" - }, - "userId": { - "type": "string", - "description": "ID of the owner of the group.", - "required": true, - "location": "path" - } - }, - "parameterOrder": [ - "userId", - "groupId", - "personId" - ], - "scopes": [ - "https://www.googleapis.com/auth/buzz" - ] - }, - "get": { - "id": "chili.people.get", - "path": "people/{userId}/@self", - "httpMethod": "GET", - "description": "Get a user profile", - "parameters": { - "alt": { - "type": "string", - "description": "Specifies an alternative representation type.", - "default": "atom", - "enum": [ - "atom", - "json" - ], - "enumDescriptions": [ - "Use Atom XML format", - "Use JSON format" - ], - "location": "query" - }, - "hl": { - "type": "string", - "description": "Language code to limit language results.", - "location": "query" - }, - "userId": { - "type": "string", - "description": "ID of the user being referenced.", - "required": true, - "location": "path" - } - }, - "parameterOrder": [ - "userId" - ], - "response": { - "$ref": "Person" - }, - "scopes": [ - "https://www.googleapis.com/auth/buzz", - "https://www.googleapis.com/auth/buzz.readonly" - ] - }, - "liked": { - "id": "chili.people.liked", - "path": "activities/{userId}/{scope}/{postId}/{groupId}", - "httpMethod": "GET", - "description": "Get people who liked an activity", - "parameters": { - "alt": { - "type": "string", - "description": "Specifies an alternative representation type.", - "default": "atom", - "enum": [ - "atom", - "json" - ], - "enumDescriptions": [ - "Use Atom XML format", - "Use JSON format" - ], - "location": "query" - }, - "c": { - "type": "string", - "description": "A continuation token that allows pagination.", - "location": "query" - }, - "groupId": { - "type": "string", - "required": true, - "enum": [ - "@liked" - ], - "enumDescriptions": [ - "People who liked this activity." - ], - "location": "path" - }, - "hl": { - "type": "string", - "description": "Language code to limit language results.", - "location": "query" - }, - "max-results": { - "type": "integer", - "description": "Maximum number of results to include.", - "default": "20", - "minimum": "0", - "maximum": "4294967295", - "location": "query" - }, - "postId": { - "type": "string", - "description": "ID of the activity that was liked.", - "required": true, - "location": "path" - }, - "scope": { - "type": "string", - "required": true, - "location": "path" - }, - "userId": { - "type": "string", - "description": "ID of the user being referenced.", - "required": true, - "location": "path" - } - }, - "parameterOrder": [ - "userId", - "scope", - "postId", - "groupId" - ], - "response": { - "$ref": "PeopleFeed" - }, - "scopes": [ - "https://www.googleapis.com/auth/buzz", - "https://www.googleapis.com/auth/buzz.readonly" - ] - }, - "list": { - "id": "chili.people.list", - "path": "people/{userId}/@groups/{groupId}", - "httpMethod": "GET", - "description": "Get people in a group", - "parameters": { - "alt": { - "type": "string", - "description": "Specifies an alternative representation type.", - "default": "atom", - "enum": [ - "atom", - "json" - ], - "enumDescriptions": [ - "Use Atom XML format", - "Use JSON format" - ], - "location": "query" - }, - "c": { - "type": "string", - "description": "A continuation token that allows pagination.", - "location": "query" - }, - "groupId": { - "type": "string", - "description": "ID of the group for which to list users.", - "required": true, - "location": "path" - }, - "hl": { - "type": "string", - "description": "Language code to limit language results.", - "location": "query" - }, - "max-results": { - "type": "integer", - "description": "Maximum number of results to include.", - "default": "20", - "minimum": "0", - "maximum": "4294967295", - "location": "query" - }, - "userId": { - "type": "string", - "description": "ID of the user being referenced.", - "required": true, - "location": "path" - } - }, - "parameterOrder": [ - "userId", - "groupId" - ], - "response": { - "$ref": "PeopleFeed" - }, - "scopes": [ - "https://www.googleapis.com/auth/buzz", - "https://www.googleapis.com/auth/buzz.readonly" - ] - }, - "reshared": { - "id": "chili.people.reshared", - "path": "activities/{userId}/{scope}/{postId}/{groupId}", - "httpMethod": "GET", - "description": "Get people who reshared an activity", - "parameters": { - "alt": { - "type": "string", - "description": "Specifies an alternative representation type.", - "default": "atom", - "enum": [ - "atom", - "json" - ], - "enumDescriptions": [ - "Use Atom XML format", - "Use JSON format" - ], - "location": "query" - }, - "c": { - "type": "string", - "description": "A continuation token that allows pagination.", - "location": "query" - }, - "groupId": { - "type": "string", - "required": true, - "enum": [ - "@reshared" - ], - "enumDescriptions": [ - "People who reshared this activity." - ], - "location": "path" - }, - "hl": { - "type": "string", - "description": "Language code to limit language results.", - "location": "query" - }, - "max-results": { - "type": "integer", - "description": "Maximum number of results to include.", - "default": "20", - "minimum": "0", - "maximum": "4294967295", - "location": "query" - }, - "postId": { - "type": "string", - "description": "ID of the activity that was reshared.", - "required": true, - "location": "path" - }, - "scope": { - "type": "string", - "required": true, - "location": "path" - }, - "userId": { - "type": "string", - "description": "ID of the user being referenced.", - "required": true, - "location": "path" - } - }, - "parameterOrder": [ - "userId", - "scope", - "postId", - "groupId" - ], - "response": { - "$ref": "PeopleFeed" - }, - "scopes": [ - "https://www.googleapis.com/auth/buzz", - "https://www.googleapis.com/auth/buzz.readonly" - ] - }, - "search": { - "id": "chili.people.search", - "path": "people/search", - "httpMethod": "GET", - "description": "Search for people", - "parameters": { - "alt": { - "type": "string", - "description": "Specifies an alternative representation type.", - "default": "atom", - "enum": [ - "atom", - "json" - ], - "enumDescriptions": [ - "Use Atom XML format", - "Use JSON format" - ], - "location": "query" - }, - "c": { - "type": "string", - "description": "A continuation token that allows pagination.", - "location": "query" - }, - "hl": { - "type": "string", - "description": "Language code to limit language results.", - "location": "query" - }, - "max-results": { - "type": "integer", - "description": "Maximum number of results to include.", - "default": "20", - "minimum": "0", - "maximum": "4294967295", - "location": "query" - }, - "q": { - "type": "string", - "description": "Full-text search query string.", - "location": "query" - } - }, - "response": { - "$ref": "PeopleFeed" - }, - "scopes": [ - "https://www.googleapis.com/auth/buzz", - "https://www.googleapis.com/auth/buzz.readonly" - ] - }, - "update": { - "id": "chili.people.update", - "path": "people/{userId}/@groups/{groupId}/{personId}", - "httpMethod": "PUT", - "description": "Add a person to a group", - "parameters": { - "alt": { - "type": "string", - "description": "Specifies an alternative representation type.", - "default": "atom", - "enum": [ - "atom", - "json" - ], - "enumDescriptions": [ - "Use Atom XML format", - "Use JSON format" - ], - "location": "query" - }, - "groupId": { - "type": "string", - "description": "ID of the group to which to add the person.", - "required": true, - "location": "path" - }, - "hl": { - "type": "string", - "description": "Language code to limit language results.", - "location": "query" - }, - "personId": { - "type": "string", - "description": "ID of the person to add to the group.", - "required": true, - "location": "path" - }, - "userId": { - "type": "string", - "description": "ID of the owner of the group.", - "required": true, - "location": "path" - } - }, - "parameterOrder": [ - "userId", - "groupId", - "personId" - ], - "request": { - "$ref": "Person" - }, - "response": { - "$ref": "Person" - }, - "scopes": [ - "https://www.googleapis.com/auth/buzz" - ] - } - } - }, - "photoAlbums": { - "methods": { - "delete": { - "id": "chili.photoAlbums.delete", - "path": "photos/{userId}/@self/{albumId}", - "httpMethod": "DELETE", - "description": "Delete a photo album", - "parameters": { - "albumId": { - "type": "string", - "description": "ID of the album to delete.", - "required": true, - "location": "path" - }, - "alt": { - "type": "string", - "description": "Specifies an alternative representation type.", - "default": "atom", - "enum": [ - "atom", - "json" - ], - "enumDescriptions": [ - "Use Atom XML format", - "Use JSON format" - ], - "location": "query" - }, - "hl": { - "type": "string", - "description": "Language code to limit language results.", - "location": "query" - }, - "userId": { - "type": "string", - "description": "ID of the user being referenced.", - "required": true, - "location": "path" - } - }, - "parameterOrder": [ - "userId", - "albumId" - ], - "scopes": [ - "https://www.googleapis.com/auth/picasa" - ] - }, - "get": { - "id": "chili.photoAlbums.get", - "path": "photos/{userId}/@self/{albumId}", - "httpMethod": "GET", - "description": "Get a photo album", - "parameters": { - "albumId": { - "type": "string", - "description": "ID of the album to get.", - "required": true, - "location": "path" - }, - "alt": { - "type": "string", - "description": "Specifies an alternative representation type.", - "default": "atom", - "enum": [ - "atom", - "json" - ], - "enumDescriptions": [ - "Use Atom XML format", - "Use JSON format" - ], - "location": "query" - }, - "hl": { - "type": "string", - "description": "Language code to limit language results.", - "location": "query" - }, - "userId": { - "type": "string", - "description": "ID of the user being referenced.", - "required": true, - "location": "path" - } - }, - "parameterOrder": [ - "userId", - "albumId" - ], - "response": { - "$ref": "Album" - }, - "scopes": [ - "https://www.googleapis.com/auth/picasa" - ] - }, - "insert": { - "id": "chili.photoAlbums.insert", - "path": "photos/{userId}/@self", - "httpMethod": "POST", - "description": "Create a photo album", - "parameters": { - "alt": { - "type": "string", - "description": "Specifies an alternative representation type.", - "default": "atom", - "enum": [ - "atom", - "json" - ], - "enumDescriptions": [ - "Use Atom XML format", - "Use JSON format" - ], - "location": "query" - }, - "hl": { - "type": "string", - "description": "Language code to limit language results.", - "location": "query" - }, - "userId": { - "type": "string", - "description": "ID of the user being referenced.", - "required": true, - "location": "path" - } - }, - "parameterOrder": [ - "userId" - ], - "request": { - "$ref": "Album" - }, - "response": { - "$ref": "Album" - }, - "scopes": [ - "https://www.googleapis.com/auth/picasa" - ] - }, - "list": { - "id": "chili.photoAlbums.list", - "path": "photos/{userId}/{scope}", - "httpMethod": "GET", - "description": "List a user's photo albums", - "parameters": { - "alt": { - "type": "string", - "description": "Specifies an alternative representation type.", - "default": "atom", - "enum": [ - "atom", - "json" - ], - "enumDescriptions": [ - "Use Atom XML format", - "Use JSON format" - ], - "location": "query" - }, - "c": { - "type": "string", - "description": "A continuation token that allows pagination.", - "location": "query" - }, - "hl": { - "type": "string", - "description": "Language code to limit language results.", - "location": "query" - }, - "max-results": { - "type": "integer", - "description": "Maximum number of results to include.", - "default": "20", - "minimum": "0", - "maximum": "4294967295", - "location": "query" - }, - "scope": { - "type": "string", - "description": "The collection of albums to list.", - "required": true, - "enum": [ - "@self" - ], - "enumDescriptions": [ - "Albums posted by the user." - ], - "location": "path" - }, - "userId": { - "type": "string", - "description": "ID of the user being referenced.", - "required": true, - "location": "path" - } - }, - "parameterOrder": [ - "userId", - "scope" - ], - "response": { - "$ref": "AlbumsFeed" - }, - "scopes": [ - "https://www.googleapis.com/auth/picasa" - ] - } - } - }, - "photos": { - "methods": { - "delete": { - "id": "chili.photos.delete", - "path": "photos/{userId}/@self/{albumId}/@photos/{photoId}", - "httpMethod": "DELETE", - "description": "Delete a photo", - "parameters": { - "albumId": { - "type": "string", - "description": "ID of the album to which to photo belongs.", - "required": true, - "location": "path" - }, - "alt": { - "type": "string", - "description": "Specifies an alternative representation type.", - "default": "atom", - "enum": [ - "atom", - "json" - ], - "enumDescriptions": [ - "Use Atom XML format", - "Use JSON format" - ], - "location": "query" - }, - "hl": { - "type": "string", - "description": "Language code to limit language results.", - "location": "query" - }, - "photoId": { - "type": "string", - "description": "ID of the photo to delete.", - "required": true, - "location": "path" - }, - "userId": { - "type": "string", - "description": "ID of the user being referenced.", - "required": true, - "location": "path" - } - }, - "parameterOrder": [ - "userId", - "albumId", - "photoId" - ], - "scopes": [ - "https://www.googleapis.com/auth/picasa" - ] - }, - "get": { - "id": "chili.photos.get", - "path": "photos/{userId}/@self/{albumId}/@photos/{photoId}", - "httpMethod": "GET", - "description": "Get photo metadata", - "parameters": { - "albumId": { - "type": "string", - "description": "ID of the album containing the photo.", - "required": true, - "location": "path" - }, - "alt": { - "type": "string", - "description": "Specifies an alternative representation type.", - "default": "atom", - "enum": [ - "atom", - "json" - ], - "enumDescriptions": [ - "Use Atom XML format", - "Use JSON format" - ], - "location": "query" - }, - "hl": { - "type": "string", - "description": "Language code to limit language results.", - "location": "query" - }, - "photoId": { - "type": "string", - "description": "ID of the photo for which to get metadata.", - "required": true, - "location": "path" - }, - "userId": { - "type": "string", - "description": "ID of the user being referenced.", - "required": true, - "location": "path" - } - }, - "parameterOrder": [ - "userId", - "albumId", - "photoId" - ], - "response": { - "$ref": "ChiliPhotosResourceJson" - }, - "scopes": [ - "https://www.googleapis.com/auth/picasa" - ] - }, - "insert": { - "id": "chili.photos.insert", - "path": "photos/{userId}/{albumId}", - "httpMethod": "POST", - "description": "Upload a photo to an album", - "parameters": { - "albumId": { - "type": "string", - "description": "ID of the album to which to upload.", - "required": true, - "location": "path" - }, - "alt": { - "type": "string", - "description": "Specifies an alternative representation type.", - "default": "atom", - "enum": [ - "atom", - "json" - ], - "enumDescriptions": [ - "Use Atom XML format", - "Use JSON format" - ], - "location": "query" - }, - "hl": { - "type": "string", - "description": "Language code to limit language results.", - "location": "query" - }, - "userId": { - "type": "string", - "description": "ID of the user being referenced.", - "required": true, - "location": "path" - } - }, - "parameterOrder": [ - "userId", - "albumId" - ], - "request": { - "$ref": "AlbumLite" - }, - "response": { - "$ref": "AlbumLite" - }, - "scopes": [ - "https://www.googleapis.com/auth/buzz" - ] - }, - "insert2": { - "id": "chili.photos.insert2", - "path": "photos/{userId}/@self/{albumId}/@photos", - "httpMethod": "POST", - "description": "Upload a photo to an album", - "parameters": { - "albumId": { - "type": "string", - "description": "ID of the album to which to upload.", - "required": true, - "location": "path" - }, - "alt": { - "type": "string", - "description": "Specifies an alternative representation type.", - "default": "atom", - "enum": [ - "atom", - "json" - ], - "enumDescriptions": [ - "Use Atom XML format", - "Use JSON format" - ], - "location": "query" - }, - "hl": { - "type": "string", - "description": "Language code to limit language results.", - "location": "query" - }, - "userId": { - "type": "string", - "description": "ID of the user being referenced.", - "required": true, - "location": "path" - } - }, - "parameterOrder": [ - "userId", - "albumId" - ], - "request": { - "$ref": "ChiliPhotosResourceJson" - }, - "response": { - "$ref": "ChiliPhotosResourceJson" - }, - "scopes": [ - "https://www.googleapis.com/auth/picasa" - ] - }, - "listByAlbum": { - "id": "chili.photos.listByAlbum", - "path": "photos/{userId}/@self/{albumId}/@photos", - "httpMethod": "GET", - "description": "List photos in an album", - "parameters": { - "albumId": { - "type": "string", - "description": "ID of the album for which to list photos.", - "required": true, - "location": "path" - }, - "alt": { - "type": "string", - "description": "Specifies an alternative representation type.", - "default": "atom", - "enum": [ - "atom", - "json" - ], - "enumDescriptions": [ - "Use Atom XML format", - "Use JSON format" - ], - "location": "query" - }, - "c": { - "type": "string", - "description": "A continuation token that allows pagination.", - "location": "query" - }, - "hl": { - "type": "string", - "description": "Language code to limit language results.", - "location": "query" - }, - "max-results": { - "type": "integer", - "description": "Maximum number of results to include.", - "default": "20", - "minimum": "0", - "maximum": "4294967295", - "location": "query" - }, - "userId": { - "type": "string", - "description": "ID of the user being referenced.", - "required": true, - "location": "path" - } - }, - "parameterOrder": [ - "userId", - "albumId" - ], - "response": { - "$ref": "PhotosFeed" - }, - "scopes": [ - "https://www.googleapis.com/auth/picasa" - ] - }, - "listByScope": { - "id": "chili.photos.listByScope", - "path": "photos/{userId}/@self/{scope}/@photos", - "httpMethod": "GET", - "description": "Get a user's photos", - "parameters": { - "alt": { - "type": "string", - "description": "Specifies an alternative representation type.", - "default": "atom", - "enum": [ - "atom", - "json" - ], - "enumDescriptions": [ - "Use Atom XML format", - "Use JSON format" - ], - "location": "query" - }, - "c": { - "type": "string", - "description": "A continuation token that allows pagination.", - "location": "query" - }, - "hl": { - "type": "string", - "description": "Language code to limit language results.", - "location": "query" - }, - "max-results": { - "type": "integer", - "description": "Maximum number of results to include.", - "default": "20", - "minimum": "0", - "maximum": "4294967295", - "location": "query" - }, - "scope": { - "type": "string", - "description": "The collection of photos to list.", - "required": true, - "enum": [ - "@recent" - ], - "enumDescriptions": [ - "Recent photos uploaded by the user." - ], - "location": "path" - }, - "userId": { - "type": "string", - "description": "ID of the user being referenced.", - "required": true, - "location": "path" - } - }, - "parameterOrder": [ - "userId", - "scope" - ], - "response": { - "$ref": "PhotosFeed" - }, - "scopes": [ - "https://www.googleapis.com/auth/picasa" - ] - } - } - }, - "related": { - "methods": { - "list": { - "id": "chili.related.list", - "path": "activities/{userId}/{scope}/{postId}/@related", - "httpMethod": "GET", - "description": "Get related links for an activity", - "parameters": { - "alt": { - "type": "string", - "description": "Specifies an alternative representation type.", - "default": "atom", - "enum": [ - "atom", - "json" - ], - "enumDescriptions": [ - "Use Atom XML format", - "Use JSON format" - ], - "location": "query" - }, - "hl": { - "type": "string", - "description": "Language code to limit language results.", - "location": "query" - }, - "postId": { - "type": "string", - "description": "ID of the activity to which to get related links.", - "required": true, - "location": "path" - }, - "scope": { - "type": "string", - "description": "The collection to which the activity belongs.", - "required": true, - "enum": [ - "@self" - ], - "enumDescriptions": [ - "Activities posted by the user." - ], - "location": "path" - }, - "userId": { - "type": "string", - "description": "ID of the user being referenced.", - "required": true, - "location": "path" - } - }, - "parameterOrder": [ - "userId", - "scope", - "postId" - ], - "response": { - "$ref": "RelatedFeed" - }, - "scopes": [ - "https://www.googleapis.com/auth/buzz", - "https://www.googleapis.com/auth/buzz.readonly" - ] - } - } - } - } -} diff --git a/samples/localdiscovery/buzz.py b/samples/localdiscovery/buzz.py deleted file mode 100644 index 411ad27..0000000 --- a/samples/localdiscovery/buzz.py +++ /dev/null @@ -1,37 +0,0 @@ -#!/usr/bin/python2.4 -# -*- coding: utf-8 -*- -# -# Copyright 2010 Google Inc. All Rights Reserved. - -"""Simple command-line example for Buzz. - -Command-line application that retrieves the users -latest content and then adds a new entry. -""" - -__author__ = 'jcgregorio@google.com (Joe Gregorio)' - -from apiclient.discovery import build_from_document - -import httplib2 -import os -import pprint - -def main(): - http = httplib2.Http() - - # Load the local copy of the discovery document - f = file(os.path.join(os.path.dirname(__file__), "buzz.json"), "r") - discovery = f.read() - f.close() - - # Construct a service from the local documents - service = build_from_document(discovery, - base="https://www.googleapis.com/", - http=http) - - pprint.pprint(service.activities().search(q='lady gaga').execute()) - - -if __name__ == '__main__': - main() diff --git a/samples/new_project_template/apiclient b/samples/new_project_template/apiclient deleted file mode 120000 index 24fe0bc..0000000 --- a/samples/new_project_template/apiclient +++ /dev/null @@ -1 +0,0 @@ -../appengine/apiclient \ No newline at end of file diff --git a/samples/new_project_template/app.yaml b/samples/new_project_template/app.yaml deleted file mode 100644 index c0d43d5..0000000 --- a/samples/new_project_template/app.yaml +++ /dev/null @@ -1,12 +0,0 @@ -application: jcg-testing-01 -version: 1 -runtime: python -api_version: 1 - -handlers: -- url: /oauth2callback - script: oauth2client/appengine.py - -- url: .* - script: main.py - diff --git a/samples/new_project_template/gflags.py b/samples/new_project_template/gflags.py deleted file mode 120000 index 5a2ff94..0000000 --- a/samples/new_project_template/gflags.py +++ /dev/null @@ -1 +0,0 @@ -../../gflags.py \ No newline at end of file diff --git a/samples/new_project_template/gflags_validators.py b/samples/new_project_template/gflags_validators.py deleted file mode 120000 index 25d8ce8..0000000 --- a/samples/new_project_template/gflags_validators.py +++ /dev/null @@ -1 +0,0 @@ -../../gflags_validators.py \ No newline at end of file diff --git a/samples/new_project_template/httplib2 b/samples/new_project_template/httplib2 deleted file mode 120000 index 4cd2774..0000000 --- a/samples/new_project_template/httplib2 +++ /dev/null @@ -1 +0,0 @@ -../appengine/httplib2 \ No newline at end of file diff --git a/samples/new_project_template/index.yaml b/samples/new_project_template/index.yaml deleted file mode 100644 index a3b9e05..0000000 --- a/samples/new_project_template/index.yaml +++ /dev/null @@ -1,11 +0,0 @@ -indexes: - -# AUTOGENERATED - -# This index.yaml is automatically updated whenever the dev_appserver -# detects that a new type of query is run. If you want to manage the -# index.yaml file manually, remove the above marker line (the line -# saying "# AUTOGENERATED"). If you want to manage some indexes -# manually, move them above the marker line. The index.yaml file is -# automatically uploaded to the admin console when you next deploy -# your application using appcfg.py. diff --git a/samples/new_project_template/main.py b/samples/new_project_template/main.py deleted file mode 100755 index c451e67..0000000 --- a/samples/new_project_template/main.py +++ /dev/null @@ -1,131 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2007 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -"""Starting template for Google App Engine applications. - -Use this project as a starting point if you are just beginning to build a Google -App Engine project. Remember to fill in the OAuth 2.0 client_id and -client_secret which can be obtained from the Developer Console - -""" - -__author__ = 'jcgregorio@google.com (Joe Gregorio)' - - -import httplib2 -import logging -import os -import pickle - -from apiclient.discovery import build -from oauth2client.appengine import CredentialsProperty -from oauth2client.appengine import StorageByKeyName -from oauth2client.client import OAuth2WebServerFlow -from google.appengine.api import memcache -from google.appengine.api import users -from google.appengine.ext import db -from google.appengine.ext import webapp -from google.appengine.ext.webapp import template -from google.appengine.ext.webapp import util -from google.appengine.ext.webapp.util import login_required - -# Set up a Flow object to be used if we need to authenticate. This -# sample uses OAuth 2.0, and we set up the OAuth2WebServerFlow with -# the information it needs to authenticate. Note that it is called -# the Web Server Flow, but it can also handle the flow for native -# applications -# The client_id and client_secret are copied from the Identity tab on -# the Google APIs Console - -FLOW = OAuth2WebServerFlow( - client_id='', - client_secret='', - scope='https://www.googleapis.com/auth/buzz', - user_agent='my-sample-app/1.0') - - -class Credentials(db.Model): - credentials = CredentialsProperty() - - -class MainHandler(webapp.RequestHandler): - - @login_required - def get(self): - user = users.get_current_user() - credentials = StorageByKeyName( - Credentials, user.user_id(), 'credentials').get() - - if not credentials or credentials.invalid: - return begin_oauth_flow(self, user) - - http = credentials.authorize(httplib2.Http()) - - # Build a service object for interacting with the API. Visit - # the Google APIs Console - # to get a developerKey for your own application. - service = build("buzz", "v1", http=http) - followers = service.people().list( - userId='@me', groupId='@followers').execute() - text = 'Hello, you have %s followers!' % followers['totalResults'] - - path = os.path.join(os.path.dirname(__file__), 'welcome.html') - self.response.out.write(template.render(path, {'text': text })) - - -def begin_oauth_flow(request_handler, user): - callback = request_handler.request.relative_url('/oauth2callback') - authorize_url = FLOW.step1_get_authorize_url(callback) - # Here we are using memcache to store the flow temporarily while the user - # is directed to authorize our service. You could also store the flow - # in the datastore depending on your utilization of memcache, just remember - # in that case to clean up the flow after you are done with it. - memcache.set(user.user_id(), pickle.dumps(FLOW)) - request_handler.redirect(authorize_url) - - -class OAuthHandler(webapp.RequestHandler): - - @login_required - def get(self): - user = users.get_current_user() - flow = pickle.loads(memcache.get(user.user_id())) - # This code should be ammended with application specific error - # handling. The following cases should be considered: - # 1. What if the flow doesn't exist in memcache? Or is corrupt? - # 2. What if the step2_exchange fails? - if flow: - credentials = flow.step2_exchange(self.request.params) - StorageByKeyName( - Credentials, user.user_id(), 'credentials').put(credentials) - self.redirect("/") - else: - # Add application specific error handling here. - pass - - -def main(): - application = webapp.WSGIApplication( - [ - ('/', MainHandler), - ('/oauth2callback', OAuthHandler) - ], - debug=True) - util.run_wsgi_app(application) - - -if __name__ == '__main__': - main() diff --git a/samples/new_project_template/oauth2 b/samples/new_project_template/oauth2 deleted file mode 120000 index ee61c25..0000000 --- a/samples/new_project_template/oauth2 +++ /dev/null @@ -1 +0,0 @@ -../appengine/oauth2 \ No newline at end of file diff --git a/samples/new_project_template/oauth2client b/samples/new_project_template/oauth2client deleted file mode 120000 index 9013119..0000000 --- a/samples/new_project_template/oauth2client +++ /dev/null @@ -1 +0,0 @@ -../../oauth2client/ \ No newline at end of file diff --git a/samples/new_project_template/uritemplate b/samples/new_project_template/uritemplate deleted file mode 120000 index 1c98e41..0000000 --- a/samples/new_project_template/uritemplate +++ /dev/null @@ -1 +0,0 @@ -../appengine/uritemplate \ No newline at end of file diff --git a/samples/new_project_template/welcome.html b/samples/new_project_template/welcome.html deleted file mode 100644 index 0117f50..0000000 --- a/samples/new_project_template/welcome.html +++ /dev/null @@ -1,8 +0,0 @@ - - - Bootcamp Translations - - -

{{ text }}

- - diff --git a/samples/oauth2/dailymotion/index.yaml b/samples/oauth2/dailymotion/index.yaml deleted file mode 100644 index a3b9e05..0000000 --- a/samples/oauth2/dailymotion/index.yaml +++ /dev/null @@ -1,11 +0,0 @@ -indexes: - -# AUTOGENERATED - -# This index.yaml is automatically updated whenever the dev_appserver -# detects that a new type of query is run. If you want to manage the -# index.yaml file manually, remove the above marker line (the line -# saying "# AUTOGENERATED"). If you want to manage some indexes -# manually, move them above the marker line. The index.yaml file is -# automatically uploaded to the admin console when you next deploy -# your application using appcfg.py. diff --git a/samples/oauth2/django_sample/__init__.py b/samples/oauth2/django_sample/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/samples/oauth2/django_sample/buzz/__init__.py b/samples/oauth2/django_sample/buzz/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/samples/oauth2/django_sample/buzz/models.py b/samples/oauth2/django_sample/buzz/models.py deleted file mode 100644 index 69c180c..0000000 --- a/samples/oauth2/django_sample/buzz/models.py +++ /dev/null @@ -1,33 +0,0 @@ -import pickle -import base64 - -from django.contrib import admin -from django.contrib.auth.models import User -from django.db import models - -from oauth2client.django_orm import FlowField -from oauth2client.django_orm import CredentialsField - -# The Flow could also be stored in memcache since it is short lived. - - -class FlowModel(models.Model): - id = models.ForeignKey(User, primary_key=True) - flow = FlowField() - - -class CredentialsModel(models.Model): - id = models.ForeignKey(User, primary_key=True) - credential = CredentialsField() - - -class CredentialsAdmin(admin.ModelAdmin): - pass - - -class FlowAdmin(admin.ModelAdmin): - pass - - -admin.site.register(CredentialsModel, CredentialsAdmin) -admin.site.register(FlowModel, FlowAdmin) diff --git a/samples/oauth2/django_sample/buzz/tests.py b/samples/oauth2/django_sample/buzz/tests.py deleted file mode 100644 index 927cadf..0000000 --- a/samples/oauth2/django_sample/buzz/tests.py +++ /dev/null @@ -1,24 +0,0 @@ -""" -This file demonstrates two different styles of tests (one doctest and one -unittest). These will both pass when you run "manage.py test". - -Replace these with more appropriate tests for your application. -""" - -from django.test import TestCase - - -class SimpleTest(TestCase): - - def test_basic_addition(self): - """ - Tests that 1 + 1 always equals 2. - """ - self.failUnlessEqual(1 + 1, 2) - -__test__ = {"doctest": """ -Another way to test that 1 + 1 is equal to 2. - ->>> 1 + 1 == 2 -True -"""} diff --git a/samples/oauth2/django_sample/buzz/views.py b/samples/oauth2/django_sample/buzz/views.py deleted file mode 100644 index 9a051ea..0000000 --- a/samples/oauth2/django_sample/buzz/views.py +++ /dev/null @@ -1,60 +0,0 @@ -import os -import logging -import httplib2 - -from django.http import HttpResponse -from django.core.urlresolvers import reverse -from django.contrib.auth.decorators import login_required - -from oauth2client.django_orm import Storage -from oauth2client.client import OAuth2WebServerFlow -from django_sample.buzz.models import CredentialsModel -from django_sample.buzz.models import FlowModel -from apiclient.discovery import build - -from django.http import HttpResponseRedirect -from django.shortcuts import render_to_response - -STEP2_URI = 'http://localhost:8000/auth_return' -FLOW = OAuth2WebServerFlow( - client_id='837647042410.apps.googleusercontent.com', - client_secret='+SWwMCL9d8gWtzPRa1lXw5R8', - scope='https://www.googleapis.com/auth/buzz', - user_agent='buzz-django-sample/1.0', - ) - -@login_required -def index(request): - storage = Storage(CredentialsModel, 'id', request.user, 'credential') - credential = storage.get() - if credential is None or credential.invalid == True: - - authorize_url = FLOW.step1_get_authorize_url(STEP2_URI) - f = FlowModel(id=request.user, flow=FLOW) - f.save() - return HttpResponseRedirect(authorize_url) - else: - http = httplib2.Http() - http = credential.authorize(http) - service = build("buzz", "v1", http=http) - activities = service.activities() - activitylist = activities.list(scope='@consumption', - userId='@me').execute() - logging.info(activitylist) - - return render_to_response('buzz/welcome.html', { - 'activitylist': activitylist, - }) - - -@login_required -def auth_return(request): - try: - f = FlowModel.objects.get(id=request.user) - credential = f.flow.step2_exchange(request.REQUEST) - storage = Storage(CredentialsModel, 'id', request.user, 'credential') - storage.put(credential) - f.delete() - return HttpResponseRedirect("/") - except FlowModel.DoesNotExist: - pass diff --git a/samples/oauth2/django_sample/manage.py b/samples/oauth2/django_sample/manage.py deleted file mode 100755 index 0b932da..0000000 --- a/samples/oauth2/django_sample/manage.py +++ /dev/null @@ -1,15 +0,0 @@ -#!/usr/bin/python -from django.core.management import execute_manager -try: - import settings # Assumed to be in the same directory. -except ImportError: - import sys - sys.stderr.write("""Error: Can't find the file 'settings.py' in the -directory containing %r. It appears you've customized things. You'll -have to run django-admin.py, passing it your settings module. -(If the file settings.py does indeed exist, it's causing an ImportError -somehow.)\n""" % __file__) - sys.exit(1) - -if __name__ == "__main__": - execute_manager(settings) diff --git a/samples/oauth2/django_sample/settings.py b/samples/oauth2/django_sample/settings.py deleted file mode 100644 index 565d2e5..0000000 --- a/samples/oauth2/django_sample/settings.py +++ /dev/null @@ -1,83 +0,0 @@ -# Django settings for django_sample project. -import os - -DEBUG = True -TEMPLATE_DEBUG = DEBUG - -ADMINS = ( - # ('Your Name', 'your_email@domain.com'), -) - -MANAGERS = ADMINS - -DATABASE_ENGINE = 'sqlite3' -DATABASE_NAME = 'database.sqlite3' -DATABASE_USER = '' -DATABASE_PASSWORD = '' -DATABASE_HOST = '' -DATABASE_PORT = '' - -# Local time zone for this installation. Choices can be found here: -# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name -# although not all choices may be available on all operating systems. -# If running in a Windows environment this must be set to the same as your -# system time zone. -TIME_ZONE = 'America/New_York' - -# Language code for this installation. All choices can be found here: -# http://www.i18nguy.com/unicode/language-identifiers.html -LANGUAGE_CODE = 'en-us' - -SITE_ID = 1 - -# If you set this to False, Django will make some optimizations so as not -# to load the internationalization machinery. -USE_I18N = True - -# Absolute path to the directory that holds media. -# Example: "/home/media/media.lawrence.com/" -MEDIA_ROOT = '' - -# URL that handles the media served from MEDIA_ROOT. Make sure to use a -# trailing slash if there is a path component (optional in other cases). -# Examples: "http://media.lawrence.com", "http://example.com/media/" -MEDIA_URL = '' - -# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a -# trailing slash. -# Examples: "http://foo.com/media/", "/media/". -ADMIN_MEDIA_PREFIX = '/media/' - -# Make this unique, and don't share it with anybody. -SECRET_KEY = '_=9hq-$t_uv1ckf&s!y2$9g$1dm*6p1cl%*!^mg=7gr)!zj32d' - -# List of callables that know how to import templates from various sources. -TEMPLATE_LOADERS = ( - 'django.template.loaders.filesystem.load_template_source', - 'django.template.loaders.app_directories.load_template_source', -# 'django.template.loaders.eggs.load_template_source', -) - -MIDDLEWARE_CLASSES = ( - 'django.middleware.common.CommonMiddleware', - 'django.contrib.sessions.middleware.SessionMiddleware', - 'django.contrib.auth.middleware.AuthenticationMiddleware', -) - -ROOT_URLCONF = 'django_sample.urls' - -TEMPLATE_DIRS = ( - # Put strings here, like "/home/html/django_templates" - # Always use forward slashes, even on Windows. - # Don't forget to use absolute paths, not relative paths. - os.path.join(os.path.dirname(__file__), 'templates') -) - -INSTALLED_APPS = ( - 'django.contrib.admin', - 'django.contrib.auth', - 'django.contrib.contenttypes', - 'django.contrib.sessions', - 'django.contrib.sites', - 'django_sample.buzz' -) diff --git a/samples/oauth2/django_sample/static/go.png b/samples/oauth2/django_sample/static/go.png deleted file mode 100644 index e5aacda8c654cc9c3349639d6975741c99a368c8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1139 zcmV-(1dRKMP)Px#24YJ`L;(K){{a7>y{D4^000SaNLh0L01FcU01FcV0GgZ_00007bV*G`2ige< z4j2fDH-&Qm000?uMObu0Z*6U5Zgc=ca%Ew3Wn>_CX>@2HM@dakSAh-}000BdNkl#C^jk*3sDS21QXOkMa4jn&22wE?$zCEyk<6g>N&gTedf%WGrNo?l}e?M z#l|4Zjg>OU@0#87Z(^C8$(r9724a|!Sg)c-rkf0AU{7}(l}8Rv9q%S zj^m)$>oG7efW5sv{M7%Mv;nxgyMxc?gUx0`PEL*_pO=>xEH5vkrKJT%qY)Dm6NpBm zbOO+7wQx8b2m}JSy}iZa;v%e8D>NF7B>$_cD@;yKqPn^oR;v|DOG|iuex|bq6c-m` zXlMwbPzcx8*YJA1Fq_Sid{e0u_V@QOI5-HsUJs7ru)VzvQ55Mcf)y1N@OV53hr>8K zJHz<+I1C1ZB;Q0L0iNg4(b0j*%1Vrmj^h0MoZeR0WHMoDY6{V46bA2#8O zW3d>#UN1~06B-&CFh4&}_gSE^u@Q4~bBM)a2nK`TI8O3Ta&&Zr;o)Ir9EMnyMORlB zR#sO2Y9~?DY4E>0NhA_Mb93|ePe3LCNG6jA27?d;0h^nfNF)-<2Ox?fc6WCn2m<_m zKc1eR{_PU{DdES*#}EVoYinzW$K$em>+9=52%!(a<>e&=L4Y6#h(sc?d`nA9!Eqc2 zAuyRt^khIh9>?nHDtMm9$;pW<-@Lp$^z`(=<#M5|tqlyr(6Iy_A0M&4z7C(yhfpXa zyNzvXY62kyeSLi>EG(p>1|$**1OfpFf`F~9E!m&L3_S`I|w1NPL3G~K&p6#VbI#z3PK2adwZePYJav9{Z}ulsi}d><$}}cM0t66 z#x!5w3ZST{2o8q>gb-LP7G$EO9RSO+=Your username and password didn't match. Please try again.

-{% endif %} - -
- - - - - - - - - -
{{ form.username.label_tag }}{{ form.username }}
{{ form.password.label_tag }}{{ form.password }}
- - - -
- -{% endblock %} diff --git a/samples/oauth2/django_sample/templates/buzz/welcome.html b/samples/oauth2/django_sample/templates/buzz/welcome.html deleted file mode 100644 index 07e8027..0000000 --- a/samples/oauth2/django_sample/templates/buzz/welcome.html +++ /dev/null @@ -1,33 +0,0 @@ - - - - Buzz Stuff - - - - - {% for item in activitylist.items %} - - - - - - {% endfor %} -
- {% if item.actor.thumbnailUrl %} - - - - {% endif %} -
- {{ item.actor.name }}
- {{ item.object.content|safe }} - - -
- - diff --git a/samples/oauth2/django_sample/urls.py b/samples/oauth2/django_sample/urls.py deleted file mode 100644 index aeba620..0000000 --- a/samples/oauth2/django_sample/urls.py +++ /dev/null @@ -1,25 +0,0 @@ -import os -from django.conf.urls.defaults import * - -# Uncomment the next two lines to enable the admin: -from django.contrib import admin -admin.autodiscover() - -urlpatterns = patterns('', - # Example: - (r'^$', 'django_sample.buzz.views.index'), - (r'^auth_return', 'django_sample.buzz.views.auth_return'), - - # Uncomment the admin/doc line below and add 'django.contrib.admindocs' - # to INSTALLED_APPS to enable admin documentation: - # (r'^admin/doc/', include('django.contrib.admindocs.urls')), - - # Uncomment the next line to enable the admin: - (r'^admin/', include(admin.site.urls)), - (r'^accounts/login/$', 'django.contrib.auth.views.login', - {'template_name': 'buzz/login.html'}), - - (r'^static/(?P.*)$', 'django.views.static.serve', - {'document_root': os.path.join(os.path.dirname(__file__), 'static') -}), -) diff --git a/simplejson/.__speedups.c b/simplejson/.__speedups.c deleted file mode 100644 index f38de2c27b9fbc529552642c506f543d48469557..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 188 zcmZQz6=P>$Vqox1Ojhs@R)|o50+1L3ClDI}aTgGS_$Vqox1Ojhs@R)|o50+1L3ClDI}aVHRi_`89a2;dkJ62#EeC<#;w(lG;w zmC>{@BE&_L^K$Vqox1Ojhs@R)|o50+1L3ClDI}aR(5C_`86Z2;dkJ62#EeC=OH#(lG;w zmC&>?BE&_L^K$Vqox1Ojhs@R)|o50+1L3ClDI}aT^eW_&b1@2;dkJ62xGpe;B9~q+c;w_hgP#PbkF3F1pxaO9o_%{ diff --git a/simplejson/._scanner.py b/simplejson/._scanner.py deleted file mode 100644 index a5b51b552ebe417b4627db65e98c3667ef40362b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 186 zcmZQz6=P>$Vqox1Ojhs@R)|o50+1L3ClDI}aR(5C_`86Z2;dkJ62#EeC<;^x(lG;w zmC&>?BE&_L^K$Vqox1Ojhs@R)|o50+1L3ClDI}aVHRi_`89a2;dkJ62xGpe;lY3q+CMFl<=O&h<7V9M^7NwRjRC9qGlANDYnwzI!t6*+s4Hn4B%u5Aw Jjg76jY60J!8U_FW diff --git a/simplejson/__init__.py b/simplejson/__init__.py deleted file mode 100644 index dcfd541..0000000 --- a/simplejson/__init__.py +++ /dev/null @@ -1,437 +0,0 @@ -r"""JSON (JavaScript Object Notation) is a subset of -JavaScript syntax (ECMA-262 3rd edition) used as a lightweight data -interchange format. - -:mod:`simplejson` exposes an API familiar to users of the standard library -:mod:`marshal` and :mod:`pickle` modules. It is the externally maintained -version of the :mod:`json` library contained in Python 2.6, but maintains -compatibility with Python 2.4 and Python 2.5 and (currently) has -significant performance advantages, even without using the optional C -extension for speedups. - -Encoding basic Python object hierarchies:: - - >>> import simplejson as json - >>> json.dumps(['foo', {'bar': ('baz', None, 1.0, 2)}]) - '["foo", {"bar": ["baz", null, 1.0, 2]}]' - >>> print json.dumps("\"foo\bar") - "\"foo\bar" - >>> print json.dumps(u'\u1234') - "\u1234" - >>> print json.dumps('\\') - "\\" - >>> print json.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True) - {"a": 0, "b": 0, "c": 0} - >>> from StringIO import StringIO - >>> io = StringIO() - >>> json.dump(['streaming API'], io) - >>> io.getvalue() - '["streaming API"]' - -Compact encoding:: - - >>> import simplejson as json - >>> json.dumps([1,2,3,{'4': 5, '6': 7}], separators=(',',':')) - '[1,2,3,{"4":5,"6":7}]' - -Pretty printing:: - - >>> import simplejson as json - >>> s = json.dumps({'4': 5, '6': 7}, sort_keys=True, indent=' ') - >>> print '\n'.join([l.rstrip() for l in s.splitlines()]) - { - "4": 5, - "6": 7 - } - -Decoding JSON:: - - >>> import simplejson as json - >>> obj = [u'foo', {u'bar': [u'baz', None, 1.0, 2]}] - >>> json.loads('["foo", {"bar":["baz", null, 1.0, 2]}]') == obj - True - >>> json.loads('"\\"foo\\bar"') == u'"foo\x08ar' - True - >>> from StringIO import StringIO - >>> io = StringIO('["streaming API"]') - >>> json.load(io)[0] == 'streaming API' - True - -Specializing JSON object decoding:: - - >>> import simplejson as json - >>> def as_complex(dct): - ... if '__complex__' in dct: - ... return complex(dct['real'], dct['imag']) - ... return dct - ... - >>> json.loads('{"__complex__": true, "real": 1, "imag": 2}', - ... object_hook=as_complex) - (1+2j) - >>> from decimal import Decimal - >>> json.loads('1.1', parse_float=Decimal) == Decimal('1.1') - True - -Specializing JSON object encoding:: - - >>> import simplejson as json - >>> def encode_complex(obj): - ... if isinstance(obj, complex): - ... return [obj.real, obj.imag] - ... raise TypeError(repr(o) + " is not JSON serializable") - ... - >>> json.dumps(2 + 1j, default=encode_complex) - '[2.0, 1.0]' - >>> json.JSONEncoder(default=encode_complex).encode(2 + 1j) - '[2.0, 1.0]' - >>> ''.join(json.JSONEncoder(default=encode_complex).iterencode(2 + 1j)) - '[2.0, 1.0]' - - -Using simplejson.tool from the shell to validate and pretty-print:: - - $ echo '{"json":"obj"}' | python -m simplejson.tool - { - "json": "obj" - } - $ echo '{ 1.2:3.4}' | python -m simplejson.tool - Expecting property name: line 1 column 2 (char 2) -""" -__version__ = '2.1.1' -__all__ = [ - 'dump', 'dumps', 'load', 'loads', - 'JSONDecoder', 'JSONDecodeError', 'JSONEncoder', - 'OrderedDict', -] - -__author__ = 'Bob Ippolito ' - -from decimal import Decimal - -from decoder import JSONDecoder, JSONDecodeError -from encoder import JSONEncoder -def _import_OrderedDict(): - import collections - try: - return collections.OrderedDict - except AttributeError: - import ordered_dict - return ordered_dict.OrderedDict -OrderedDict = _import_OrderedDict() - -def _import_c_make_encoder(): - try: - from simplejson._speedups import make_encoder - return make_encoder - except ImportError: - return None - -_default_encoder = JSONEncoder( - skipkeys=False, - ensure_ascii=True, - check_circular=True, - allow_nan=True, - indent=None, - separators=None, - encoding='utf-8', - default=None, - use_decimal=False, -) - -def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True, - allow_nan=True, cls=None, indent=None, separators=None, - encoding='utf-8', default=None, use_decimal=False, **kw): - """Serialize ``obj`` as a JSON formatted stream to ``fp`` (a - ``.write()``-supporting file-like object). - - If ``skipkeys`` is true then ``dict`` keys that are not basic types - (``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``) - will be skipped instead of raising a ``TypeError``. - - If ``ensure_ascii`` is false, then the some chunks written to ``fp`` - may be ``unicode`` instances, subject to normal Python ``str`` to - ``unicode`` coercion rules. Unless ``fp.write()`` explicitly - understands ``unicode`` (as in ``codecs.getwriter()``) this is likely - to cause an error. - - If ``check_circular`` is false, then the circular reference check - for container types will be skipped and a circular reference will - result in an ``OverflowError`` (or worse). - - If ``allow_nan`` is false, then it will be a ``ValueError`` to - serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) - in strict compliance of the JSON specification, instead of using the - JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``). - - If *indent* is a string, then JSON array elements and object members - will be pretty-printed with a newline followed by that string repeated - for each level of nesting. ``None`` (the default) selects the most compact - representation without any newlines. For backwards compatibility with - versions of simplejson earlier than 2.1.0, an integer is also accepted - and is converted to a string with that many spaces. - - If ``separators`` is an ``(item_separator, dict_separator)`` tuple - then it will be used instead of the default ``(', ', ': ')`` separators. - ``(',', ':')`` is the most compact JSON representation. - - ``encoding`` is the character encoding for str instances, default is UTF-8. - - ``default(obj)`` is a function that should return a serializable version - of obj or raise TypeError. The default simply raises TypeError. - - If *use_decimal* is true (default: ``False``) then decimal.Decimal - will be natively serialized to JSON with full precision. - - To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the - ``.default()`` method to serialize additional types), specify it with - the ``cls`` kwarg. - - """ - # cached encoder - if (not skipkeys and ensure_ascii and - check_circular and allow_nan and - cls is None and indent is None and separators is None and - encoding == 'utf-8' and default is None and not kw): - iterable = _default_encoder.iterencode(obj) - else: - if cls is None: - cls = JSONEncoder - iterable = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii, - check_circular=check_circular, allow_nan=allow_nan, indent=indent, - separators=separators, encoding=encoding, - default=default, use_decimal=use_decimal, **kw).iterencode(obj) - # could accelerate with writelines in some versions of Python, at - # a debuggability cost - for chunk in iterable: - fp.write(chunk) - - -def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True, - allow_nan=True, cls=None, indent=None, separators=None, - encoding='utf-8', default=None, use_decimal=False, **kw): - """Serialize ``obj`` to a JSON formatted ``str``. - - If ``skipkeys`` is false then ``dict`` keys that are not basic types - (``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``) - will be skipped instead of raising a ``TypeError``. - - If ``ensure_ascii`` is false, then the return value will be a - ``unicode`` instance subject to normal Python ``str`` to ``unicode`` - coercion rules instead of being escaped to an ASCII ``str``. - - If ``check_circular`` is false, then the circular reference check - for container types will be skipped and a circular reference will - result in an ``OverflowError`` (or worse). - - If ``allow_nan`` is false, then it will be a ``ValueError`` to - serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in - strict compliance of the JSON specification, instead of using the - JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``). - - If ``indent`` is a string, then JSON array elements and object members - will be pretty-printed with a newline followed by that string repeated - for each level of nesting. ``None`` (the default) selects the most compact - representation without any newlines. For backwards compatibility with - versions of simplejson earlier than 2.1.0, an integer is also accepted - and is converted to a string with that many spaces. - - If ``separators`` is an ``(item_separator, dict_separator)`` tuple - then it will be used instead of the default ``(', ', ': ')`` separators. - ``(',', ':')`` is the most compact JSON representation. - - ``encoding`` is the character encoding for str instances, default is UTF-8. - - ``default(obj)`` is a function that should return a serializable version - of obj or raise TypeError. The default simply raises TypeError. - - If *use_decimal* is true (default: ``False``) then decimal.Decimal - will be natively serialized to JSON with full precision. - - To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the - ``.default()`` method to serialize additional types), specify it with - the ``cls`` kwarg. - - """ - # cached encoder - if (not skipkeys and ensure_ascii and - check_circular and allow_nan and - cls is None and indent is None and separators is None and - encoding == 'utf-8' and default is None and not use_decimal - and not kw): - return _default_encoder.encode(obj) - if cls is None: - cls = JSONEncoder - return cls( - skipkeys=skipkeys, ensure_ascii=ensure_ascii, - check_circular=check_circular, allow_nan=allow_nan, indent=indent, - separators=separators, encoding=encoding, default=default, - use_decimal=use_decimal, **kw).encode(obj) - - -_default_decoder = JSONDecoder(encoding=None, object_hook=None, - object_pairs_hook=None) - - -def load(fp, encoding=None, cls=None, object_hook=None, parse_float=None, - parse_int=None, parse_constant=None, object_pairs_hook=None, - use_decimal=False, **kw): - """Deserialize ``fp`` (a ``.read()``-supporting file-like object containing - a JSON document) to a Python object. - - *encoding* determines the encoding used to interpret any - :class:`str` objects decoded by this instance (``'utf-8'`` by - default). It has no effect when decoding :class:`unicode` objects. - - Note that currently only encodings that are a superset of ASCII work, - strings of other encodings should be passed in as :class:`unicode`. - - *object_hook*, if specified, will be called with the result of every - JSON object decoded and its return value will be used in place of the - given :class:`dict`. This can be used to provide custom - deserializations (e.g. to support JSON-RPC class hinting). - - *object_pairs_hook* is an optional function that will be called with - the result of any object literal decode with an ordered list of pairs. - The return value of *object_pairs_hook* will be used instead of the - :class:`dict`. This feature can be used to implement custom decoders - that rely on the order that the key and value pairs are decoded (for - example, :func:`collections.OrderedDict` will remember the order of - insertion). If *object_hook* is also defined, the *object_pairs_hook* - takes priority. - - *parse_float*, if specified, will be called with the string of every - JSON float to be decoded. By default, this is equivalent to - ``float(num_str)``. This can be used to use another datatype or parser - for JSON floats (e.g. :class:`decimal.Decimal`). - - *parse_int*, if specified, will be called with the string of every - JSON int to be decoded. By default, this is equivalent to - ``int(num_str)``. This can be used to use another datatype or parser - for JSON integers (e.g. :class:`float`). - - *parse_constant*, if specified, will be called with one of the - following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This - can be used to raise an exception if invalid JSON numbers are - encountered. - - If *use_decimal* is true (default: ``False``) then it implies - parse_float=decimal.Decimal for parity with ``dump``. - - To use a custom ``JSONDecoder`` subclass, specify it with the ``cls`` - kwarg. - - """ - return loads(fp.read(), - encoding=encoding, cls=cls, object_hook=object_hook, - parse_float=parse_float, parse_int=parse_int, - parse_constant=parse_constant, object_pairs_hook=object_pairs_hook, - use_decimal=use_decimal, **kw) - - -def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None, - parse_int=None, parse_constant=None, object_pairs_hook=None, - use_decimal=False, **kw): - """Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON - document) to a Python object. - - *encoding* determines the encoding used to interpret any - :class:`str` objects decoded by this instance (``'utf-8'`` by - default). It has no effect when decoding :class:`unicode` objects. - - Note that currently only encodings that are a superset of ASCII work, - strings of other encodings should be passed in as :class:`unicode`. - - *object_hook*, if specified, will be called with the result of every - JSON object decoded and its return value will be used in place of the - given :class:`dict`. This can be used to provide custom - deserializations (e.g. to support JSON-RPC class hinting). - - *object_pairs_hook* is an optional function that will be called with - the result of any object literal decode with an ordered list of pairs. - The return value of *object_pairs_hook* will be used instead of the - :class:`dict`. This feature can be used to implement custom decoders - that rely on the order that the key and value pairs are decoded (for - example, :func:`collections.OrderedDict` will remember the order of - insertion). If *object_hook* is also defined, the *object_pairs_hook* - takes priority. - - *parse_float*, if specified, will be called with the string of every - JSON float to be decoded. By default, this is equivalent to - ``float(num_str)``. This can be used to use another datatype or parser - for JSON floats (e.g. :class:`decimal.Decimal`). - - *parse_int*, if specified, will be called with the string of every - JSON int to be decoded. By default, this is equivalent to - ``int(num_str)``. This can be used to use another datatype or parser - for JSON integers (e.g. :class:`float`). - - *parse_constant*, if specified, will be called with one of the - following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This - can be used to raise an exception if invalid JSON numbers are - encountered. - - If *use_decimal* is true (default: ``False``) then it implies - parse_float=decimal.Decimal for parity with ``dump``. - - To use a custom ``JSONDecoder`` subclass, specify it with the ``cls`` - kwarg. - - """ - if (cls is None and encoding is None and object_hook is None and - parse_int is None and parse_float is None and - parse_constant is None and object_pairs_hook is None - and not use_decimal and not kw): - return _default_decoder.decode(s) - if cls is None: - cls = JSONDecoder - if object_hook is not None: - kw['object_hook'] = object_hook - if object_pairs_hook is not None: - kw['object_pairs_hook'] = object_pairs_hook - if parse_float is not None: - kw['parse_float'] = parse_float - if parse_int is not None: - kw['parse_int'] = parse_int - if parse_constant is not None: - kw['parse_constant'] = parse_constant - if use_decimal: - if parse_float is not None: - raise TypeError("use_decimal=True implies parse_float=Decimal") - kw['parse_float'] = Decimal - return cls(encoding=encoding, **kw).decode(s) - - -def _toggle_speedups(enabled): - import simplejson.decoder as dec - import simplejson.encoder as enc - import simplejson.scanner as scan - c_make_encoder = _import_c_make_encoder() - if enabled: - dec.scanstring = dec.c_scanstring or dec.py_scanstring - enc.c_make_encoder = c_make_encoder - enc.encode_basestring_ascii = (enc.c_encode_basestring_ascii or - enc.py_encode_basestring_ascii) - scan.make_scanner = scan.c_make_scanner or scan.py_make_scanner - else: - dec.scanstring = dec.py_scanstring - enc.c_make_encoder = None - enc.encode_basestring_ascii = enc.py_encode_basestring_ascii - scan.make_scanner = scan.py_make_scanner - dec.make_scanner = scan.make_scanner - global _default_decoder - _default_decoder = JSONDecoder( - encoding=None, - object_hook=None, - object_pairs_hook=None, - ) - global _default_encoder - _default_encoder = JSONEncoder( - skipkeys=False, - ensure_ascii=True, - check_circular=True, - allow_nan=True, - indent=None, - separators=None, - encoding='utf-8', - default=None, - ) diff --git a/simplejson/_speedups.c b/simplejson/_speedups.c deleted file mode 100644 index b06ba50..0000000 --- a/simplejson/_speedups.c +++ /dev/null @@ -1,2561 +0,0 @@ -#include "Python.h" -#include "structmember.h" -#if PY_VERSION_HEX < 0x02070000 && !defined(PyOS_string_to_double) -#define PyOS_string_to_double json_PyOS_string_to_double -static double -json_PyOS_string_to_double(const char *s, char **endptr, PyObject *overflow_exception); -static double -json_PyOS_string_to_double(const char *s, char **endptr, PyObject *overflow_exception) { - double x; - assert(endptr == NULL); - assert(overflow_exception == NULL); - PyFPE_START_PROTECT("json_PyOS_string_to_double", return -1.0;) - x = PyOS_ascii_atof(s); - PyFPE_END_PROTECT(x) - return x; -} -#endif -#if PY_VERSION_HEX < 0x02060000 && !defined(Py_TYPE) -#define Py_TYPE(ob) (((PyObject*)(ob))->ob_type) -#endif -#if PY_VERSION_HEX < 0x02050000 && !defined(PY_SSIZE_T_MIN) -typedef int Py_ssize_t; -#define PY_SSIZE_T_MAX INT_MAX -#define PY_SSIZE_T_MIN INT_MIN -#define PyInt_FromSsize_t PyInt_FromLong -#define PyInt_AsSsize_t PyInt_AsLong -#endif -#ifndef Py_IS_FINITE -#define Py_IS_FINITE(X) (!Py_IS_INFINITY(X) && !Py_IS_NAN(X)) -#endif - -#ifdef __GNUC__ -#define UNUSED __attribute__((__unused__)) -#else -#define UNUSED -#endif - -#define DEFAULT_ENCODING "utf-8" - -#define PyScanner_Check(op) PyObject_TypeCheck(op, &PyScannerType) -#define PyScanner_CheckExact(op) (Py_TYPE(op) == &PyScannerType) -#define PyEncoder_Check(op) PyObject_TypeCheck(op, &PyEncoderType) -#define PyEncoder_CheckExact(op) (Py_TYPE(op) == &PyEncoderType) -#define Decimal_Check(op) (PyObject_TypeCheck(op, DecimalTypePtr)) - -static PyTypeObject PyScannerType; -static PyTypeObject PyEncoderType; -static PyTypeObject *DecimalTypePtr; - -typedef struct _PyScannerObject { - PyObject_HEAD - PyObject *encoding; - PyObject *strict; - PyObject *object_hook; - PyObject *pairs_hook; - PyObject *parse_float; - PyObject *parse_int; - PyObject *parse_constant; - PyObject *memo; -} PyScannerObject; - -static PyMemberDef scanner_members[] = { - {"encoding", T_OBJECT, offsetof(PyScannerObject, encoding), READONLY, "encoding"}, - {"strict", T_OBJECT, offsetof(PyScannerObject, strict), READONLY, "strict"}, - {"object_hook", T_OBJECT, offsetof(PyScannerObject, object_hook), READONLY, "object_hook"}, - {"object_pairs_hook", T_OBJECT, offsetof(PyScannerObject, pairs_hook), READONLY, "object_pairs_hook"}, - {"parse_float", T_OBJECT, offsetof(PyScannerObject, parse_float), READONLY, "parse_float"}, - {"parse_int", T_OBJECT, offsetof(PyScannerObject, parse_int), READONLY, "parse_int"}, - {"parse_constant", T_OBJECT, offsetof(PyScannerObject, parse_constant), READONLY, "parse_constant"}, - {NULL} -}; - -typedef struct _PyEncoderObject { - PyObject_HEAD - PyObject *markers; - PyObject *defaultfn; - PyObject *encoder; - PyObject *indent; - PyObject *key_separator; - PyObject *item_separator; - PyObject *sort_keys; - PyObject *skipkeys; - PyObject *key_memo; - int fast_encode; - int allow_nan; - int use_decimal; -} PyEncoderObject; - -static PyMemberDef encoder_members[] = { - {"markers", T_OBJECT, offsetof(PyEncoderObject, markers), READONLY, "markers"}, - {"default", T_OBJECT, offsetof(PyEncoderObject, defaultfn), READONLY, "default"}, - {"encoder", T_OBJECT, offsetof(PyEncoderObject, encoder), READONLY, "encoder"}, - {"indent", T_OBJECT, offsetof(PyEncoderObject, indent), READONLY, "indent"}, - {"key_separator", T_OBJECT, offsetof(PyEncoderObject, key_separator), READONLY, "key_separator"}, - {"item_separator", T_OBJECT, offsetof(PyEncoderObject, item_separator), READONLY, "item_separator"}, - {"sort_keys", T_OBJECT, offsetof(PyEncoderObject, sort_keys), READONLY, "sort_keys"}, - {"skipkeys", T_OBJECT, offsetof(PyEncoderObject, skipkeys), READONLY, "skipkeys"}, - {"key_memo", T_OBJECT, offsetof(PyEncoderObject, key_memo), READONLY, "key_memo"}, - {NULL} -}; - -static Py_ssize_t -ascii_escape_char(Py_UNICODE c, char *output, Py_ssize_t chars); -static PyObject * -ascii_escape_unicode(PyObject *pystr); -static PyObject * -ascii_escape_str(PyObject *pystr); -static PyObject * -py_encode_basestring_ascii(PyObject* self UNUSED, PyObject *pystr); -void init_speedups(void); -static PyObject * -scan_once_str(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_t *next_idx_ptr); -static PyObject * -scan_once_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_t *next_idx_ptr); -static PyObject * -_build_rval_index_tuple(PyObject *rval, Py_ssize_t idx); -static PyObject * -scanner_new(PyTypeObject *type, PyObject *args, PyObject *kwds); -static int -scanner_init(PyObject *self, PyObject *args, PyObject *kwds); -static void -scanner_dealloc(PyObject *self); -static int -scanner_clear(PyObject *self); -static PyObject * -encoder_new(PyTypeObject *type, PyObject *args, PyObject *kwds); -static int -encoder_init(PyObject *self, PyObject *args, PyObject *kwds); -static void -encoder_dealloc(PyObject *self); -static int -encoder_clear(PyObject *self); -static int -encoder_listencode_list(PyEncoderObject *s, PyObject *rval, PyObject *seq, Py_ssize_t indent_level); -static int -encoder_listencode_obj(PyEncoderObject *s, PyObject *rval, PyObject *obj, Py_ssize_t indent_level); -static int -encoder_listencode_dict(PyEncoderObject *s, PyObject *rval, PyObject *dct, Py_ssize_t indent_level); -static PyObject * -_encoded_const(PyObject *obj); -static void -raise_errmsg(char *msg, PyObject *s, Py_ssize_t end); -static PyObject * -encoder_encode_string(PyEncoderObject *s, PyObject *obj); -static int -_convertPyInt_AsSsize_t(PyObject *o, Py_ssize_t *size_ptr); -static PyObject * -_convertPyInt_FromSsize_t(Py_ssize_t *size_ptr); -static PyObject * -encoder_encode_float(PyEncoderObject *s, PyObject *obj); - -#define S_CHAR(c) (c >= ' ' && c <= '~' && c != '\\' && c != '"') -#define IS_WHITESPACE(c) (((c) == ' ') || ((c) == '\t') || ((c) == '\n') || ((c) == '\r')) - -#define MIN_EXPANSION 6 -#ifdef Py_UNICODE_WIDE -#define MAX_EXPANSION (2 * MIN_EXPANSION) -#else -#define MAX_EXPANSION MIN_EXPANSION -#endif - -static int -_convertPyInt_AsSsize_t(PyObject *o, Py_ssize_t *size_ptr) -{ - /* PyObject to Py_ssize_t converter */ - *size_ptr = PyInt_AsSsize_t(o); - if (*size_ptr == -1 && PyErr_Occurred()) - return 0; - return 1; -} - -static PyObject * -_convertPyInt_FromSsize_t(Py_ssize_t *size_ptr) -{ - /* Py_ssize_t to PyObject converter */ - return PyInt_FromSsize_t(*size_ptr); -} - -static Py_ssize_t -ascii_escape_char(Py_UNICODE c, char *output, Py_ssize_t chars) -{ - /* Escape unicode code point c to ASCII escape sequences - in char *output. output must have at least 12 bytes unused to - accommodate an escaped surrogate pair "\uXXXX\uXXXX" */ - output[chars++] = '\\'; - switch (c) { - case '\\': output[chars++] = (char)c; break; - case '"': output[chars++] = (char)c; break; - case '\b': output[chars++] = 'b'; break; - case '\f': output[chars++] = 'f'; break; - case '\n': output[chars++] = 'n'; break; - case '\r': output[chars++] = 'r'; break; - case '\t': output[chars++] = 't'; break; - default: -#ifdef Py_UNICODE_WIDE - if (c >= 0x10000) { - /* UTF-16 surrogate pair */ - Py_UNICODE v = c - 0x10000; - c = 0xd800 | ((v >> 10) & 0x3ff); - output[chars++] = 'u'; - output[chars++] = "0123456789abcdef"[(c >> 12) & 0xf]; - output[chars++] = "0123456789abcdef"[(c >> 8) & 0xf]; - output[chars++] = "0123456789abcdef"[(c >> 4) & 0xf]; - output[chars++] = "0123456789abcdef"[(c ) & 0xf]; - c = 0xdc00 | (v & 0x3ff); - output[chars++] = '\\'; - } -#endif - output[chars++] = 'u'; - output[chars++] = "0123456789abcdef"[(c >> 12) & 0xf]; - output[chars++] = "0123456789abcdef"[(c >> 8) & 0xf]; - output[chars++] = "0123456789abcdef"[(c >> 4) & 0xf]; - output[chars++] = "0123456789abcdef"[(c ) & 0xf]; - } - return chars; -} - -static PyObject * -ascii_escape_unicode(PyObject *pystr) -{ - /* Take a PyUnicode pystr and return a new ASCII-only escaped PyString */ - Py_ssize_t i; - Py_ssize_t input_chars; - Py_ssize_t output_size; - Py_ssize_t max_output_size; - Py_ssize_t chars; - PyObject *rval; - char *output; - Py_UNICODE *input_unicode; - - input_chars = PyUnicode_GET_SIZE(pystr); - input_unicode = PyUnicode_AS_UNICODE(pystr); - - /* One char input can be up to 6 chars output, estimate 4 of these */ - output_size = 2 + (MIN_EXPANSION * 4) + input_chars; - max_output_size = 2 + (input_chars * MAX_EXPANSION); - rval = PyString_FromStringAndSize(NULL, output_size); - if (rval == NULL) { - return NULL; - } - output = PyString_AS_STRING(rval); - chars = 0; - output[chars++] = '"'; - for (i = 0; i < input_chars; i++) { - Py_UNICODE c = input_unicode[i]; - if (S_CHAR(c)) { - output[chars++] = (char)c; - } - else { - chars = ascii_escape_char(c, output, chars); - } - if (output_size - chars < (1 + MAX_EXPANSION)) { - /* There's more than four, so let's resize by a lot */ - Py_ssize_t new_output_size = output_size * 2; - /* This is an upper bound */ - if (new_output_size > max_output_size) { - new_output_size = max_output_size; - } - /* Make sure that the output size changed before resizing */ - if (new_output_size != output_size) { - output_size = new_output_size; - if (_PyString_Resize(&rval, output_size) == -1) { - return NULL; - } - output = PyString_AS_STRING(rval); - } - } - } - output[chars++] = '"'; - if (_PyString_Resize(&rval, chars) == -1) { - return NULL; - } - return rval; -} - -static PyObject * -ascii_escape_str(PyObject *pystr) -{ - /* Take a PyString pystr and return a new ASCII-only escaped PyString */ - Py_ssize_t i; - Py_ssize_t input_chars; - Py_ssize_t output_size; - Py_ssize_t chars; - PyObject *rval; - char *output; - char *input_str; - - input_chars = PyString_GET_SIZE(pystr); - input_str = PyString_AS_STRING(pystr); - - /* Fast path for a string that's already ASCII */ - for (i = 0; i < input_chars; i++) { - Py_UNICODE c = (Py_UNICODE)(unsigned char)input_str[i]; - if (!S_CHAR(c)) { - /* If we have to escape something, scan the string for unicode */ - Py_ssize_t j; - for (j = i; j < input_chars; j++) { - c = (Py_UNICODE)(unsigned char)input_str[j]; - if (c > 0x7f) { - /* We hit a non-ASCII character, bail to unicode mode */ - PyObject *uni; - uni = PyUnicode_DecodeUTF8(input_str, input_chars, "strict"); - if (uni == NULL) { - return NULL; - } - rval = ascii_escape_unicode(uni); - Py_DECREF(uni); - return rval; - } - } - break; - } - } - - if (i == input_chars) { - /* Input is already ASCII */ - output_size = 2 + input_chars; - } - else { - /* One char input can be up to 6 chars output, estimate 4 of these */ - output_size = 2 + (MIN_EXPANSION * 4) + input_chars; - } - rval = PyString_FromStringAndSize(NULL, output_size); - if (rval == NULL) { - return NULL; - } - output = PyString_AS_STRING(rval); - output[0] = '"'; - - /* We know that everything up to i is ASCII already */ - chars = i + 1; - memcpy(&output[1], input_str, i); - - for (; i < input_chars; i++) { - Py_UNICODE c = (Py_UNICODE)(unsigned char)input_str[i]; - if (S_CHAR(c)) { - output[chars++] = (char)c; - } - else { - chars = ascii_escape_char(c, output, chars); - } - /* An ASCII char can't possibly expand to a surrogate! */ - if (output_size - chars < (1 + MIN_EXPANSION)) { - /* There's more than four, so let's resize by a lot */ - output_size *= 2; - if (output_size > 2 + (input_chars * MIN_EXPANSION)) { - output_size = 2 + (input_chars * MIN_EXPANSION); - } - if (_PyString_Resize(&rval, output_size) == -1) { - return NULL; - } - output = PyString_AS_STRING(rval); - } - } - output[chars++] = '"'; - if (_PyString_Resize(&rval, chars) == -1) { - return NULL; - } - return rval; -} - -static void -raise_errmsg(char *msg, PyObject *s, Py_ssize_t end) -{ - /* Use the Python function simplejson.decoder.errmsg to raise a nice - looking ValueError exception */ - static PyObject *JSONDecodeError = NULL; - PyObject *exc; - if (JSONDecodeError == NULL) { - PyObject *decoder = PyImport_ImportModule("simplejson.decoder"); - if (decoder == NULL) - return; - JSONDecodeError = PyObject_GetAttrString(decoder, "JSONDecodeError"); - Py_DECREF(decoder); - if (JSONDecodeError == NULL) - return; - } - exc = PyObject_CallFunction(JSONDecodeError, "(zOO&)", msg, s, _convertPyInt_FromSsize_t, &end); - if (exc) { - PyErr_SetObject(JSONDecodeError, exc); - Py_DECREF(exc); - } -} - -static PyObject * -join_list_unicode(PyObject *lst) -{ - /* return u''.join(lst) */ - static PyObject *joinfn = NULL; - if (joinfn == NULL) { - PyObject *ustr = PyUnicode_FromUnicode(NULL, 0); - if (ustr == NULL) - return NULL; - - joinfn = PyObject_GetAttrString(ustr, "join"); - Py_DECREF(ustr); - if (joinfn == NULL) - return NULL; - } - return PyObject_CallFunctionObjArgs(joinfn, lst, NULL); -} - -static PyObject * -join_list_string(PyObject *lst) -{ - /* return ''.join(lst) */ - static PyObject *joinfn = NULL; - if (joinfn == NULL) { - PyObject *ustr = PyString_FromStringAndSize(NULL, 0); - if (ustr == NULL) - return NULL; - - joinfn = PyObject_GetAttrString(ustr, "join"); - Py_DECREF(ustr); - if (joinfn == NULL) - return NULL; - } - return PyObject_CallFunctionObjArgs(joinfn, lst, NULL); -} - -static PyObject * -_build_rval_index_tuple(PyObject *rval, Py_ssize_t idx) { - /* return (rval, idx) tuple, stealing reference to rval */ - PyObject *tpl; - PyObject *pyidx; - /* - steal a reference to rval, returns (rval, idx) - */ - if (rval == NULL) { - return NULL; - } - pyidx = PyInt_FromSsize_t(idx); - if (pyidx == NULL) { - Py_DECREF(rval); - return NULL; - } - tpl = PyTuple_New(2); - if (tpl == NULL) { - Py_DECREF(pyidx); - Py_DECREF(rval); - return NULL; - } - PyTuple_SET_ITEM(tpl, 0, rval); - PyTuple_SET_ITEM(tpl, 1, pyidx); - return tpl; -} - -#define APPEND_OLD_CHUNK \ - if (chunk != NULL) { \ - if (chunks == NULL) { \ - chunks = PyList_New(0); \ - if (chunks == NULL) { \ - goto bail; \ - } \ - } \ - if (PyList_Append(chunks, chunk)) { \ - goto bail; \ - } \ - Py_CLEAR(chunk); \ - } - -static PyObject * -scanstring_str(PyObject *pystr, Py_ssize_t end, char *encoding, int strict, Py_ssize_t *next_end_ptr) -{ - /* Read the JSON string from PyString pystr. - end is the index of the first character after the quote. - encoding is the encoding of pystr (must be an ASCII superset) - if strict is zero then literal control characters are allowed - *next_end_ptr is a return-by-reference index of the character - after the end quote - - Return value is a new PyString (if ASCII-only) or PyUnicode - */ - PyObject *rval; - Py_ssize_t len = PyString_GET_SIZE(pystr); - Py_ssize_t begin = end - 1; - Py_ssize_t next = begin; - int has_unicode = 0; - char *buf = PyString_AS_STRING(pystr); - PyObject *chunks = NULL; - PyObject *chunk = NULL; - - if (end < 0 || len <= end) { - PyErr_SetString(PyExc_ValueError, "end is out of bounds"); - goto bail; - } - while (1) { - /* Find the end of the string or the next escape */ - Py_UNICODE c = 0; - for (next = end; next < len; next++) { - c = (unsigned char)buf[next]; - if (c == '"' || c == '\\') { - break; - } - else if (strict && c <= 0x1f) { - raise_errmsg("Invalid control character at", pystr, next); - goto bail; - } - else if (c > 0x7f) { - has_unicode = 1; - } - } - if (!(c == '"' || c == '\\')) { - raise_errmsg("Unterminated string starting at", pystr, begin); - goto bail; - } - /* Pick up this chunk if it's not zero length */ - if (next != end) { - PyObject *strchunk; - APPEND_OLD_CHUNK - strchunk = PyString_FromStringAndSize(&buf[end], next - end); - if (strchunk == NULL) { - goto bail; - } - if (has_unicode) { - chunk = PyUnicode_FromEncodedObject(strchunk, encoding, NULL); - Py_DECREF(strchunk); - if (chunk == NULL) { - goto bail; - } - } - else { - chunk = strchunk; - } - } - next++; - if (c == '"') { - end = next; - break; - } - if (next == len) { - raise_errmsg("Unterminated string starting at", pystr, begin); - goto bail; - } - c = buf[next]; - if (c != 'u') { - /* Non-unicode backslash escapes */ - end = next + 1; - switch (c) { - case '"': break; - case '\\': break; - case '/': break; - case 'b': c = '\b'; break; - case 'f': c = '\f'; break; - case 'n': c = '\n'; break; - case 'r': c = '\r'; break; - case 't': c = '\t'; break; - default: c = 0; - } - if (c == 0) { - raise_errmsg("Invalid \\escape", pystr, end - 2); - goto bail; - } - } - else { - c = 0; - next++; - end = next + 4; - if (end >= len) { - raise_errmsg("Invalid \\uXXXX escape", pystr, next - 1); - goto bail; - } - /* Decode 4 hex digits */ - for (; next < end; next++) { - Py_UNICODE digit = buf[next]; - c <<= 4; - switch (digit) { - case '0': case '1': case '2': case '3': case '4': - case '5': case '6': case '7': case '8': case '9': - c |= (digit - '0'); break; - case 'a': case 'b': case 'c': case 'd': case 'e': - case 'f': - c |= (digit - 'a' + 10); break; - case 'A': case 'B': case 'C': case 'D': case 'E': - case 'F': - c |= (digit - 'A' + 10); break; - default: - raise_errmsg("Invalid \\uXXXX escape", pystr, end - 5); - goto bail; - } - } -#ifdef Py_UNICODE_WIDE - /* Surrogate pair */ - if ((c & 0xfc00) == 0xd800) { - Py_UNICODE c2 = 0; - if (end + 6 >= len) { - raise_errmsg("Unpaired high surrogate", pystr, end - 5); - goto bail; - } - if (buf[next++] != '\\' || buf[next++] != 'u') { - raise_errmsg("Unpaired high surrogate", pystr, end - 5); - goto bail; - } - end += 6; - /* Decode 4 hex digits */ - for (; next < end; next++) { - c2 <<= 4; - Py_UNICODE digit = buf[next]; - switch (digit) { - case '0': case '1': case '2': case '3': case '4': - case '5': case '6': case '7': case '8': case '9': - c2 |= (digit - '0'); break; - case 'a': case 'b': case 'c': case 'd': case 'e': - case 'f': - c2 |= (digit - 'a' + 10); break; - case 'A': case 'B': case 'C': case 'D': case 'E': - case 'F': - c2 |= (digit - 'A' + 10); break; - default: - raise_errmsg("Invalid \\uXXXX escape", pystr, end - 5); - goto bail; - } - } - if ((c2 & 0xfc00) != 0xdc00) { - raise_errmsg("Unpaired high surrogate", pystr, end - 5); - goto bail; - } - c = 0x10000 + (((c - 0xd800) << 10) | (c2 - 0xdc00)); - } - else if ((c & 0xfc00) == 0xdc00) { - raise_errmsg("Unpaired low surrogate", pystr, end - 5); - goto bail; - } -#endif - } - if (c > 0x7f) { - has_unicode = 1; - } - APPEND_OLD_CHUNK - if (has_unicode) { - chunk = PyUnicode_FromUnicode(&c, 1); - if (chunk == NULL) { - goto bail; - } - } - else { - char c_char = Py_CHARMASK(c); - chunk = PyString_FromStringAndSize(&c_char, 1); - if (chunk == NULL) { - goto bail; - } - } - } - - if (chunks == NULL) { - if (chunk != NULL) - rval = chunk; - else - rval = PyString_FromStringAndSize("", 0); - } - else { - APPEND_OLD_CHUNK - rval = join_list_string(chunks); - if (rval == NULL) { - goto bail; - } - Py_CLEAR(chunks); - } - - *next_end_ptr = end; - return rval; -bail: - *next_end_ptr = -1; - Py_XDECREF(chunk); - Py_XDECREF(chunks); - return NULL; -} - - -static PyObject * -scanstring_unicode(PyObject *pystr, Py_ssize_t end, int strict, Py_ssize_t *next_end_ptr) -{ - /* Read the JSON string from PyUnicode pystr. - end is the index of the first character after the quote. - if strict is zero then literal control characters are allowed - *next_end_ptr is a return-by-reference index of the character - after the end quote - - Return value is a new PyUnicode - */ - PyObject *rval; - Py_ssize_t len = PyUnicode_GET_SIZE(pystr); - Py_ssize_t begin = end - 1; - Py_ssize_t next = begin; - const Py_UNICODE *buf = PyUnicode_AS_UNICODE(pystr); - PyObject *chunks = NULL; - PyObject *chunk = NULL; - - if (end < 0 || len <= end) { - PyErr_SetString(PyExc_ValueError, "end is out of bounds"); - goto bail; - } - while (1) { - /* Find the end of the string or the next escape */ - Py_UNICODE c = 0; - for (next = end; next < len; next++) { - c = buf[next]; - if (c == '"' || c == '\\') { - break; - } - else if (strict && c <= 0x1f) { - raise_errmsg("Invalid control character at", pystr, next); - goto bail; - } - } - if (!(c == '"' || c == '\\')) { - raise_errmsg("Unterminated string starting at", pystr, begin); - goto bail; - } - /* Pick up this chunk if it's not zero length */ - if (next != end) { - APPEND_OLD_CHUNK - chunk = PyUnicode_FromUnicode(&buf[end], next - end); - if (chunk == NULL) { - goto bail; - } - } - next++; - if (c == '"') { - end = next; - break; - } - if (next == len) { - raise_errmsg("Unterminated string starting at", pystr, begin); - goto bail; - } - c = buf[next]; - if (c != 'u') { - /* Non-unicode backslash escapes */ - end = next + 1; - switch (c) { - case '"': break; - case '\\': break; - case '/': break; - case 'b': c = '\b'; break; - case 'f': c = '\f'; break; - case 'n': c = '\n'; break; - case 'r': c = '\r'; break; - case 't': c = '\t'; break; - default: c = 0; - } - if (c == 0) { - raise_errmsg("Invalid \\escape", pystr, end - 2); - goto bail; - } - } - else { - c = 0; - next++; - end = next + 4; - if (end >= len) { - raise_errmsg("Invalid \\uXXXX escape", pystr, next - 1); - goto bail; - } - /* Decode 4 hex digits */ - for (; next < end; next++) { - Py_UNICODE digit = buf[next]; - c <<= 4; - switch (digit) { - case '0': case '1': case '2': case '3': case '4': - case '5': case '6': case '7': case '8': case '9': - c |= (digit - '0'); break; - case 'a': case 'b': case 'c': case 'd': case 'e': - case 'f': - c |= (digit - 'a' + 10); break; - case 'A': case 'B': case 'C': case 'D': case 'E': - case 'F': - c |= (digit - 'A' + 10); break; - default: - raise_errmsg("Invalid \\uXXXX escape", pystr, end - 5); - goto bail; - } - } -#ifdef Py_UNICODE_WIDE - /* Surrogate pair */ - if ((c & 0xfc00) == 0xd800) { - Py_UNICODE c2 = 0; - if (end + 6 >= len) { - raise_errmsg("Unpaired high surrogate", pystr, end - 5); - goto bail; - } - if (buf[next++] != '\\' || buf[next++] != 'u') { - raise_errmsg("Unpaired high surrogate", pystr, end - 5); - goto bail; - } - end += 6; - /* Decode 4 hex digits */ - for (; next < end; next++) { - c2 <<= 4; - Py_UNICODE digit = buf[next]; - switch (digit) { - case '0': case '1': case '2': case '3': case '4': - case '5': case '6': case '7': case '8': case '9': - c2 |= (digit - '0'); break; - case 'a': case 'b': case 'c': case 'd': case 'e': - case 'f': - c2 |= (digit - 'a' + 10); break; - case 'A': case 'B': case 'C': case 'D': case 'E': - case 'F': - c2 |= (digit - 'A' + 10); break; - default: - raise_errmsg("Invalid \\uXXXX escape", pystr, end - 5); - goto bail; - } - } - if ((c2 & 0xfc00) != 0xdc00) { - raise_errmsg("Unpaired high surrogate", pystr, end - 5); - goto bail; - } - c = 0x10000 + (((c - 0xd800) << 10) | (c2 - 0xdc00)); - } - else if ((c & 0xfc00) == 0xdc00) { - raise_errmsg("Unpaired low surrogate", pystr, end - 5); - goto bail; - } -#endif - } - APPEND_OLD_CHUNK - chunk = PyUnicode_FromUnicode(&c, 1); - if (chunk == NULL) { - goto bail; - } - } - - if (chunks == NULL) { - if (chunk != NULL) - rval = chunk; - else - rval = PyUnicode_FromUnicode(NULL, 0); - } - else { - APPEND_OLD_CHUNK - rval = join_list_unicode(chunks); - if (rval == NULL) { - goto bail; - } - Py_CLEAR(chunks); - } - *next_end_ptr = end; - return rval; -bail: - *next_end_ptr = -1; - Py_XDECREF(chunk); - Py_XDECREF(chunks); - return NULL; -} - -PyDoc_STRVAR(pydoc_scanstring, - "scanstring(basestring, end, encoding, strict=True) -> (str, end)\n" - "\n" - "Scan the string s for a JSON string. End is the index of the\n" - "character in s after the quote that started the JSON string.\n" - "Unescapes all valid JSON string escape sequences and raises ValueError\n" - "on attempt to decode an invalid string. If strict is False then literal\n" - "control characters are allowed in the string.\n" - "\n" - "Returns a tuple of the decoded string and the index of the character in s\n" - "after the end quote." -); - -static PyObject * -py_scanstring(PyObject* self UNUSED, PyObject *args) -{ - PyObject *pystr; - PyObject *rval; - Py_ssize_t end; - Py_ssize_t next_end = -1; - char *encoding = NULL; - int strict = 1; - if (!PyArg_ParseTuple(args, "OO&|zi:scanstring", &pystr, _convertPyInt_AsSsize_t, &end, &encoding, &strict)) { - return NULL; - } - if (encoding == NULL) { - encoding = DEFAULT_ENCODING; - } - if (PyString_Check(pystr)) { - rval = scanstring_str(pystr, end, encoding, strict, &next_end); - } - else if (PyUnicode_Check(pystr)) { - rval = scanstring_unicode(pystr, end, strict, &next_end); - } - else { - PyErr_Format(PyExc_TypeError, - "first argument must be a string, not %.80s", - Py_TYPE(pystr)->tp_name); - return NULL; - } - return _build_rval_index_tuple(rval, next_end); -} - -PyDoc_STRVAR(pydoc_encode_basestring_ascii, - "encode_basestring_ascii(basestring) -> str\n" - "\n" - "Return an ASCII-only JSON representation of a Python string" -); - -static PyObject * -py_encode_basestring_ascii(PyObject* self UNUSED, PyObject *pystr) -{ - /* Return an ASCII-only JSON representation of a Python string */ - /* METH_O */ - if (PyString_Check(pystr)) { - return ascii_escape_str(pystr); - } - else if (PyUnicode_Check(pystr)) { - return ascii_escape_unicode(pystr); - } - else { - PyErr_Format(PyExc_TypeError, - "first argument must be a string, not %.80s", - Py_TYPE(pystr)->tp_name); - return NULL; - } -} - -static void -scanner_dealloc(PyObject *self) -{ - /* Deallocate scanner object */ - scanner_clear(self); - Py_TYPE(self)->tp_free(self); -} - -static int -scanner_traverse(PyObject *self, visitproc visit, void *arg) -{ - PyScannerObject *s; - assert(PyScanner_Check(self)); - s = (PyScannerObject *)self; - Py_VISIT(s->encoding); - Py_VISIT(s->strict); - Py_VISIT(s->object_hook); - Py_VISIT(s->pairs_hook); - Py_VISIT(s->parse_float); - Py_VISIT(s->parse_int); - Py_VISIT(s->parse_constant); - Py_VISIT(s->memo); - return 0; -} - -static int -scanner_clear(PyObject *self) -{ - PyScannerObject *s; - assert(PyScanner_Check(self)); - s = (PyScannerObject *)self; - Py_CLEAR(s->encoding); - Py_CLEAR(s->strict); - Py_CLEAR(s->object_hook); - Py_CLEAR(s->pairs_hook); - Py_CLEAR(s->parse_float); - Py_CLEAR(s->parse_int); - Py_CLEAR(s->parse_constant); - Py_CLEAR(s->memo); - return 0; -} - -static PyObject * -_parse_object_str(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_t *next_idx_ptr) { - /* Read a JSON object from PyString pystr. - idx is the index of the first character after the opening curly brace. - *next_idx_ptr is a return-by-reference index to the first character after - the closing curly brace. - - Returns a new PyObject (usually a dict, but object_hook or - object_pairs_hook can change that) - */ - char *str = PyString_AS_STRING(pystr); - Py_ssize_t end_idx = PyString_GET_SIZE(pystr) - 1; - PyObject *rval = NULL; - PyObject *pairs = NULL; - PyObject *item; - PyObject *key = NULL; - PyObject *val = NULL; - char *encoding = PyString_AS_STRING(s->encoding); - int strict = PyObject_IsTrue(s->strict); - int has_pairs_hook = (s->pairs_hook != Py_None); - Py_ssize_t next_idx; - if (has_pairs_hook) { - pairs = PyList_New(0); - if (pairs == NULL) - return NULL; - } - else { - rval = PyDict_New(); - if (rval == NULL) - return NULL; - } - - /* skip whitespace after { */ - while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++; - - /* only loop if the object is non-empty */ - if (idx <= end_idx && str[idx] != '}') { - while (idx <= end_idx) { - PyObject *memokey; - - /* read key */ - if (str[idx] != '"') { - raise_errmsg("Expecting property name", pystr, idx); - goto bail; - } - key = scanstring_str(pystr, idx + 1, encoding, strict, &next_idx); - if (key == NULL) - goto bail; - memokey = PyDict_GetItem(s->memo, key); - if (memokey != NULL) { - Py_INCREF(memokey); - Py_DECREF(key); - key = memokey; - } - else { - if (PyDict_SetItem(s->memo, key, key) < 0) - goto bail; - } - idx = next_idx; - - /* skip whitespace between key and : delimiter, read :, skip whitespace */ - while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++; - if (idx > end_idx || str[idx] != ':') { - raise_errmsg("Expecting : delimiter", pystr, idx); - goto bail; - } - idx++; - while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++; - - /* read any JSON data type */ - val = scan_once_str(s, pystr, idx, &next_idx); - if (val == NULL) - goto bail; - - if (has_pairs_hook) { - item = PyTuple_Pack(2, key, val); - if (item == NULL) - goto bail; - Py_CLEAR(key); - Py_CLEAR(val); - if (PyList_Append(pairs, item) == -1) { - Py_DECREF(item); - goto bail; - } - Py_DECREF(item); - } - else { - if (PyDict_SetItem(rval, key, val) < 0) - goto bail; - Py_CLEAR(key); - Py_CLEAR(val); - } - idx = next_idx; - - /* skip whitespace before } or , */ - while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++; - - /* bail if the object is closed or we didn't get the , delimiter */ - if (idx > end_idx) break; - if (str[idx] == '}') { - break; - } - else if (str[idx] != ',') { - raise_errmsg("Expecting , delimiter", pystr, idx); - goto bail; - } - idx++; - - /* skip whitespace after , delimiter */ - while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++; - } - } - /* verify that idx < end_idx, str[idx] should be '}' */ - if (idx > end_idx || str[idx] != '}') { - raise_errmsg("Expecting object", pystr, end_idx); - goto bail; - } - - /* if pairs_hook is not None: rval = object_pairs_hook(pairs) */ - if (s->pairs_hook != Py_None) { - val = PyObject_CallFunctionObjArgs(s->pairs_hook, pairs, NULL); - if (val == NULL) - goto bail; - Py_DECREF(pairs); - *next_idx_ptr = idx + 1; - return val; - } - - /* if object_hook is not None: rval = object_hook(rval) */ - if (s->object_hook != Py_None) { - val = PyObject_CallFunctionObjArgs(s->object_hook, rval, NULL); - if (val == NULL) - goto bail; - Py_DECREF(rval); - rval = val; - val = NULL; - } - *next_idx_ptr = idx + 1; - return rval; -bail: - Py_XDECREF(rval); - Py_XDECREF(key); - Py_XDECREF(val); - Py_XDECREF(pairs); - return NULL; -} - -static PyObject * -_parse_object_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_t *next_idx_ptr) { - /* Read a JSON object from PyUnicode pystr. - idx is the index of the first character after the opening curly brace. - *next_idx_ptr is a return-by-reference index to the first character after - the closing curly brace. - - Returns a new PyObject (usually a dict, but object_hook can change that) - */ - Py_UNICODE *str = PyUnicode_AS_UNICODE(pystr); - Py_ssize_t end_idx = PyUnicode_GET_SIZE(pystr) - 1; - PyObject *rval = NULL; - PyObject *pairs = NULL; - PyObject *item; - PyObject *key = NULL; - PyObject *val = NULL; - int strict = PyObject_IsTrue(s->strict); - int has_pairs_hook = (s->pairs_hook != Py_None); - Py_ssize_t next_idx; - - if (has_pairs_hook) { - pairs = PyList_New(0); - if (pairs == NULL) - return NULL; - } - else { - rval = PyDict_New(); - if (rval == NULL) - return NULL; - } - - /* skip whitespace after { */ - while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++; - - /* only loop if the object is non-empty */ - if (idx <= end_idx && str[idx] != '}') { - while (idx <= end_idx) { - PyObject *memokey; - - /* read key */ - if (str[idx] != '"') { - raise_errmsg("Expecting property name", pystr, idx); - goto bail; - } - key = scanstring_unicode(pystr, idx + 1, strict, &next_idx); - if (key == NULL) - goto bail; - memokey = PyDict_GetItem(s->memo, key); - if (memokey != NULL) { - Py_INCREF(memokey); - Py_DECREF(key); - key = memokey; - } - else { - if (PyDict_SetItem(s->memo, key, key) < 0) - goto bail; - } - idx = next_idx; - - /* skip whitespace between key and : delimiter, read :, skip whitespace */ - while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++; - if (idx > end_idx || str[idx] != ':') { - raise_errmsg("Expecting : delimiter", pystr, idx); - goto bail; - } - idx++; - while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++; - - /* read any JSON term */ - val = scan_once_unicode(s, pystr, idx, &next_idx); - if (val == NULL) - goto bail; - - if (has_pairs_hook) { - item = PyTuple_Pack(2, key, val); - if (item == NULL) - goto bail; - Py_CLEAR(key); - Py_CLEAR(val); - if (PyList_Append(pairs, item) == -1) { - Py_DECREF(item); - goto bail; - } - Py_DECREF(item); - } - else { - if (PyDict_SetItem(rval, key, val) < 0) - goto bail; - Py_CLEAR(key); - Py_CLEAR(val); - } - idx = next_idx; - - /* skip whitespace before } or , */ - while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++; - - /* bail if the object is closed or we didn't get the , delimiter */ - if (idx > end_idx) break; - if (str[idx] == '}') { - break; - } - else if (str[idx] != ',') { - raise_errmsg("Expecting , delimiter", pystr, idx); - goto bail; - } - idx++; - - /* skip whitespace after , delimiter */ - while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++; - } - } - - /* verify that idx < end_idx, str[idx] should be '}' */ - if (idx > end_idx || str[idx] != '}') { - raise_errmsg("Expecting object", pystr, end_idx); - goto bail; - } - - /* if pairs_hook is not None: rval = object_pairs_hook(pairs) */ - if (s->pairs_hook != Py_None) { - val = PyObject_CallFunctionObjArgs(s->pairs_hook, pairs, NULL); - if (val == NULL) - goto bail; - Py_DECREF(pairs); - *next_idx_ptr = idx + 1; - return val; - } - - /* if object_hook is not None: rval = object_hook(rval) */ - if (s->object_hook != Py_None) { - val = PyObject_CallFunctionObjArgs(s->object_hook, rval, NULL); - if (val == NULL) - goto bail; - Py_DECREF(rval); - rval = val; - val = NULL; - } - *next_idx_ptr = idx + 1; - return rval; -bail: - Py_XDECREF(rval); - Py_XDECREF(key); - Py_XDECREF(val); - Py_XDECREF(pairs); - return NULL; -} - -static PyObject * -_parse_array_str(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_t *next_idx_ptr) { - /* Read a JSON array from PyString pystr. - idx is the index of the first character after the opening brace. - *next_idx_ptr is a return-by-reference index to the first character after - the closing brace. - - Returns a new PyList - */ - char *str = PyString_AS_STRING(pystr); - Py_ssize_t end_idx = PyString_GET_SIZE(pystr) - 1; - PyObject *val = NULL; - PyObject *rval = PyList_New(0); - Py_ssize_t next_idx; - if (rval == NULL) - return NULL; - - /* skip whitespace after [ */ - while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++; - - /* only loop if the array is non-empty */ - if (idx <= end_idx && str[idx] != ']') { - while (idx <= end_idx) { - - /* read any JSON term and de-tuplefy the (rval, idx) */ - val = scan_once_str(s, pystr, idx, &next_idx); - if (val == NULL) { - if (PyErr_ExceptionMatches(PyExc_StopIteration)) { - PyErr_Clear(); - raise_errmsg("Expecting object", pystr, idx); - } - goto bail; - } - - if (PyList_Append(rval, val) == -1) - goto bail; - - Py_CLEAR(val); - idx = next_idx; - - /* skip whitespace between term and , */ - while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++; - - /* bail if the array is closed or we didn't get the , delimiter */ - if (idx > end_idx) break; - if (str[idx] == ']') { - break; - } - else if (str[idx] != ',') { - raise_errmsg("Expecting , delimiter", pystr, idx); - goto bail; - } - idx++; - - /* skip whitespace after , */ - while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++; - } - } - - /* verify that idx < end_idx, str[idx] should be ']' */ - if (idx > end_idx || str[idx] != ']') { - raise_errmsg("Expecting object", pystr, end_idx); - goto bail; - } - *next_idx_ptr = idx + 1; - return rval; -bail: - Py_XDECREF(val); - Py_DECREF(rval); - return NULL; -} - -static PyObject * -_parse_array_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_t *next_idx_ptr) { - /* Read a JSON array from PyString pystr. - idx is the index of the first character after the opening brace. - *next_idx_ptr is a return-by-reference index to the first character after - the closing brace. - - Returns a new PyList - */ - Py_UNICODE *str = PyUnicode_AS_UNICODE(pystr); - Py_ssize_t end_idx = PyUnicode_GET_SIZE(pystr) - 1; - PyObject *val = NULL; - PyObject *rval = PyList_New(0); - Py_ssize_t next_idx; - if (rval == NULL) - return NULL; - - /* skip whitespace after [ */ - while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++; - - /* only loop if the array is non-empty */ - if (idx <= end_idx && str[idx] != ']') { - while (idx <= end_idx) { - - /* read any JSON term */ - val = scan_once_unicode(s, pystr, idx, &next_idx); - if (val == NULL) { - if (PyErr_ExceptionMatches(PyExc_StopIteration)) { - PyErr_Clear(); - raise_errmsg("Expecting object", pystr, idx); - } - goto bail; - } - - if (PyList_Append(rval, val) == -1) - goto bail; - - Py_CLEAR(val); - idx = next_idx; - - /* skip whitespace between term and , */ - while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++; - - /* bail if the array is closed or we didn't get the , delimiter */ - if (idx > end_idx) break; - if (str[idx] == ']') { - break; - } - else if (str[idx] != ',') { - raise_errmsg("Expecting , delimiter", pystr, idx); - goto bail; - } - idx++; - - /* skip whitespace after , */ - while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++; - } - } - - /* verify that idx < end_idx, str[idx] should be ']' */ - if (idx > end_idx || str[idx] != ']') { - raise_errmsg("Expecting object", pystr, end_idx); - goto bail; - } - *next_idx_ptr = idx + 1; - return rval; -bail: - Py_XDECREF(val); - Py_DECREF(rval); - return NULL; -} - -static PyObject * -_parse_constant(PyScannerObject *s, char *constant, Py_ssize_t idx, Py_ssize_t *next_idx_ptr) { - /* Read a JSON constant from PyString pystr. - constant is the constant string that was found - ("NaN", "Infinity", "-Infinity"). - idx is the index of the first character of the constant - *next_idx_ptr is a return-by-reference index to the first character after - the constant. - - Returns the result of parse_constant - */ - PyObject *cstr; - PyObject *rval; - /* constant is "NaN", "Infinity", or "-Infinity" */ - cstr = PyString_InternFromString(constant); - if (cstr == NULL) - return NULL; - - /* rval = parse_constant(constant) */ - rval = PyObject_CallFunctionObjArgs(s->parse_constant, cstr, NULL); - idx += PyString_GET_SIZE(cstr); - Py_DECREF(cstr); - *next_idx_ptr = idx; - return rval; -} - -static PyObject * -_match_number_str(PyScannerObject *s, PyObject *pystr, Py_ssize_t start, Py_ssize_t *next_idx_ptr) { - /* Read a JSON number from PyString pystr. - idx is the index of the first character of the number - *next_idx_ptr is a return-by-reference index to the first character after - the number. - - Returns a new PyObject representation of that number: - PyInt, PyLong, or PyFloat. - May return other types if parse_int or parse_float are set - */ - char *str = PyString_AS_STRING(pystr); - Py_ssize_t end_idx = PyString_GET_SIZE(pystr) - 1; - Py_ssize_t idx = start; - int is_float = 0; - PyObject *rval; - PyObject *numstr; - - /* read a sign if it's there, make sure it's not the end of the string */ - if (str[idx] == '-') { - idx++; - if (idx > end_idx) { - PyErr_SetNone(PyExc_StopIteration); - return NULL; - } - } - - /* read as many integer digits as we find as long as it doesn't start with 0 */ - if (str[idx] >= '1' && str[idx] <= '9') { - idx++; - while (idx <= end_idx && str[idx] >= '0' && str[idx] <= '9') idx++; - } - /* if it starts with 0 we only expect one integer digit */ - else if (str[idx] == '0') { - idx++; - } - /* no integer digits, error */ - else { - PyErr_SetNone(PyExc_StopIteration); - return NULL; - } - - /* if the next char is '.' followed by a digit then read all float digits */ - if (idx < end_idx && str[idx] == '.' && str[idx + 1] >= '0' && str[idx + 1] <= '9') { - is_float = 1; - idx += 2; - while (idx <= end_idx && str[idx] >= '0' && str[idx] <= '9') idx++; - } - - /* if the next char is 'e' or 'E' then maybe read the exponent (or backtrack) */ - if (idx < end_idx && (str[idx] == 'e' || str[idx] == 'E')) { - - /* save the index of the 'e' or 'E' just in case we need to backtrack */ - Py_ssize_t e_start = idx; - idx++; - - /* read an exponent sign if present */ - if (idx < end_idx && (str[idx] == '-' || str[idx] == '+')) idx++; - - /* read all digits */ - while (idx <= end_idx && str[idx] >= '0' && str[idx] <= '9') idx++; - - /* if we got a digit, then parse as float. if not, backtrack */ - if (str[idx - 1] >= '0' && str[idx - 1] <= '9') { - is_float = 1; - } - else { - idx = e_start; - } - } - - /* copy the section we determined to be a number */ - numstr = PyString_FromStringAndSize(&str[start], idx - start); - if (numstr == NULL) - return NULL; - if (is_float) { - /* parse as a float using a fast path if available, otherwise call user defined method */ - if (s->parse_float != (PyObject *)&PyFloat_Type) { - rval = PyObject_CallFunctionObjArgs(s->parse_float, numstr, NULL); - } - else { - /* rval = PyFloat_FromDouble(PyOS_ascii_atof(PyString_AS_STRING(numstr))); */ - double d = PyOS_string_to_double(PyString_AS_STRING(numstr), - NULL, NULL); - if (d == -1.0 && PyErr_Occurred()) - return NULL; - rval = PyFloat_FromDouble(d); - } - } - else { - /* parse as an int using a fast path if available, otherwise call user defined method */ - if (s->parse_int != (PyObject *)&PyInt_Type) { - rval = PyObject_CallFunctionObjArgs(s->parse_int, numstr, NULL); - } - else { - rval = PyInt_FromString(PyString_AS_STRING(numstr), NULL, 10); - } - } - Py_DECREF(numstr); - *next_idx_ptr = idx; - return rval; -} - -static PyObject * -_match_number_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t start, Py_ssize_t *next_idx_ptr) { - /* Read a JSON number from PyUnicode pystr. - idx is the index of the first character of the number - *next_idx_ptr is a return-by-reference index to the first character after - the number. - - Returns a new PyObject representation of that number: - PyInt, PyLong, or PyFloat. - May return other types if parse_int or parse_float are set - */ - Py_UNICODE *str = PyUnicode_AS_UNICODE(pystr); - Py_ssize_t end_idx = PyUnicode_GET_SIZE(pystr) - 1; - Py_ssize_t idx = start; - int is_float = 0; - PyObject *rval; - PyObject *numstr; - - /* read a sign if it's there, make sure it's not the end of the string */ - if (str[idx] == '-') { - idx++; - if (idx > end_idx) { - PyErr_SetNone(PyExc_StopIteration); - return NULL; - } - } - - /* read as many integer digits as we find as long as it doesn't start with 0 */ - if (str[idx] >= '1' && str[idx] <= '9') { - idx++; - while (idx <= end_idx && str[idx] >= '0' && str[idx] <= '9') idx++; - } - /* if it starts with 0 we only expect one integer digit */ - else if (str[idx] == '0') { - idx++; - } - /* no integer digits, error */ - else { - PyErr_SetNone(PyExc_StopIteration); - return NULL; - } - - /* if the next char is '.' followed by a digit then read all float digits */ - if (idx < end_idx && str[idx] == '.' && str[idx + 1] >= '0' && str[idx + 1] <= '9') { - is_float = 1; - idx += 2; - while (idx <= end_idx && str[idx] >= '0' && str[idx] <= '9') idx++; - } - - /* if the next char is 'e' or 'E' then maybe read the exponent (or backtrack) */ - if (idx < end_idx && (str[idx] == 'e' || str[idx] == 'E')) { - Py_ssize_t e_start = idx; - idx++; - - /* read an exponent sign if present */ - if (idx < end_idx && (str[idx] == '-' || str[idx] == '+')) idx++; - - /* read all digits */ - while (idx <= end_idx && str[idx] >= '0' && str[idx] <= '9') idx++; - - /* if we got a digit, then parse as float. if not, backtrack */ - if (str[idx - 1] >= '0' && str[idx - 1] <= '9') { - is_float = 1; - } - else { - idx = e_start; - } - } - - /* copy the section we determined to be a number */ - numstr = PyUnicode_FromUnicode(&str[start], idx - start); - if (numstr == NULL) - return NULL; - if (is_float) { - /* parse as a float using a fast path if available, otherwise call user defined method */ - if (s->parse_float != (PyObject *)&PyFloat_Type) { - rval = PyObject_CallFunctionObjArgs(s->parse_float, numstr, NULL); - } - else { - rval = PyFloat_FromString(numstr, NULL); - } - } - else { - /* no fast path for unicode -> int, just call */ - rval = PyObject_CallFunctionObjArgs(s->parse_int, numstr, NULL); - } - Py_DECREF(numstr); - *next_idx_ptr = idx; - return rval; -} - -static PyObject * -scan_once_str(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_t *next_idx_ptr) -{ - /* Read one JSON term (of any kind) from PyString pystr. - idx is the index of the first character of the term - *next_idx_ptr is a return-by-reference index to the first character after - the number. - - Returns a new PyObject representation of the term. - */ - char *str = PyString_AS_STRING(pystr); - Py_ssize_t length = PyString_GET_SIZE(pystr); - if (idx >= length) { - PyErr_SetNone(PyExc_StopIteration); - return NULL; - } - switch (str[idx]) { - case '"': - /* string */ - return scanstring_str(pystr, idx + 1, - PyString_AS_STRING(s->encoding), - PyObject_IsTrue(s->strict), - next_idx_ptr); - case '{': - /* object */ - return _parse_object_str(s, pystr, idx + 1, next_idx_ptr); - case '[': - /* array */ - return _parse_array_str(s, pystr, idx + 1, next_idx_ptr); - case 'n': - /* null */ - if ((idx + 3 < length) && str[idx + 1] == 'u' && str[idx + 2] == 'l' && str[idx + 3] == 'l') { - Py_INCREF(Py_None); - *next_idx_ptr = idx + 4; - return Py_None; - } - break; - case 't': - /* true */ - if ((idx + 3 < length) && str[idx + 1] == 'r' && str[idx + 2] == 'u' && str[idx + 3] == 'e') { - Py_INCREF(Py_True); - *next_idx_ptr = idx + 4; - return Py_True; - } - break; - case 'f': - /* false */ - if ((idx + 4 < length) && str[idx + 1] == 'a' && str[idx + 2] == 'l' && str[idx + 3] == 's' && str[idx + 4] == 'e') { - Py_INCREF(Py_False); - *next_idx_ptr = idx + 5; - return Py_False; - } - break; - case 'N': - /* NaN */ - if ((idx + 2 < length) && str[idx + 1] == 'a' && str[idx + 2] == 'N') { - return _parse_constant(s, "NaN", idx, next_idx_ptr); - } - break; - case 'I': - /* Infinity */ - if ((idx + 7 < length) && str[idx + 1] == 'n' && str[idx + 2] == 'f' && str[idx + 3] == 'i' && str[idx + 4] == 'n' && str[idx + 5] == 'i' && str[idx + 6] == 't' && str[idx + 7] == 'y') { - return _parse_constant(s, "Infinity", idx, next_idx_ptr); - } - break; - case '-': - /* -Infinity */ - if ((idx + 8 < length) && str[idx + 1] == 'I' && str[idx + 2] == 'n' && str[idx + 3] == 'f' && str[idx + 4] == 'i' && str[idx + 5] == 'n' && str[idx + 6] == 'i' && str[idx + 7] == 't' && str[idx + 8] == 'y') { - return _parse_constant(s, "-Infinity", idx, next_idx_ptr); - } - break; - } - /* Didn't find a string, object, array, or named constant. Look for a number. */ - return _match_number_str(s, pystr, idx, next_idx_ptr); -} - -static PyObject * -scan_once_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_t *next_idx_ptr) -{ - /* Read one JSON term (of any kind) from PyUnicode pystr. - idx is the index of the first character of the term - *next_idx_ptr is a return-by-reference index to the first character after - the number. - - Returns a new PyObject representation of the term. - */ - Py_UNICODE *str = PyUnicode_AS_UNICODE(pystr); - Py_ssize_t length = PyUnicode_GET_SIZE(pystr); - if (idx >= length) { - PyErr_SetNone(PyExc_StopIteration); - return NULL; - } - switch (str[idx]) { - case '"': - /* string */ - return scanstring_unicode(pystr, idx + 1, - PyObject_IsTrue(s->strict), - next_idx_ptr); - case '{': - /* object */ - return _parse_object_unicode(s, pystr, idx + 1, next_idx_ptr); - case '[': - /* array */ - return _parse_array_unicode(s, pystr, idx + 1, next_idx_ptr); - case 'n': - /* null */ - if ((idx + 3 < length) && str[idx + 1] == 'u' && str[idx + 2] == 'l' && str[idx + 3] == 'l') { - Py_INCREF(Py_None); - *next_idx_ptr = idx + 4; - return Py_None; - } - break; - case 't': - /* true */ - if ((idx + 3 < length) && str[idx + 1] == 'r' && str[idx + 2] == 'u' && str[idx + 3] == 'e') { - Py_INCREF(Py_True); - *next_idx_ptr = idx + 4; - return Py_True; - } - break; - case 'f': - /* false */ - if ((idx + 4 < length) && str[idx + 1] == 'a' && str[idx + 2] == 'l' && str[idx + 3] == 's' && str[idx + 4] == 'e') { - Py_INCREF(Py_False); - *next_idx_ptr = idx + 5; - return Py_False; - } - break; - case 'N': - /* NaN */ - if ((idx + 2 < length) && str[idx + 1] == 'a' && str[idx + 2] == 'N') { - return _parse_constant(s, "NaN", idx, next_idx_ptr); - } - break; - case 'I': - /* Infinity */ - if ((idx + 7 < length) && str[idx + 1] == 'n' && str[idx + 2] == 'f' && str[idx + 3] == 'i' && str[idx + 4] == 'n' && str[idx + 5] == 'i' && str[idx + 6] == 't' && str[idx + 7] == 'y') { - return _parse_constant(s, "Infinity", idx, next_idx_ptr); - } - break; - case '-': - /* -Infinity */ - if ((idx + 8 < length) && str[idx + 1] == 'I' && str[idx + 2] == 'n' && str[idx + 3] == 'f' && str[idx + 4] == 'i' && str[idx + 5] == 'n' && str[idx + 6] == 'i' && str[idx + 7] == 't' && str[idx + 8] == 'y') { - return _parse_constant(s, "-Infinity", idx, next_idx_ptr); - } - break; - } - /* Didn't find a string, object, array, or named constant. Look for a number. */ - return _match_number_unicode(s, pystr, idx, next_idx_ptr); -} - -static PyObject * -scanner_call(PyObject *self, PyObject *args, PyObject *kwds) -{ - /* Python callable interface to scan_once_{str,unicode} */ - PyObject *pystr; - PyObject *rval; - Py_ssize_t idx; - Py_ssize_t next_idx = -1; - static char *kwlist[] = {"string", "idx", NULL}; - PyScannerObject *s; - assert(PyScanner_Check(self)); - s = (PyScannerObject *)self; - if (!PyArg_ParseTupleAndKeywords(args, kwds, "OO&:scan_once", kwlist, &pystr, _convertPyInt_AsSsize_t, &idx)) - return NULL; - - if (PyString_Check(pystr)) { - rval = scan_once_str(s, pystr, idx, &next_idx); - } - else if (PyUnicode_Check(pystr)) { - rval = scan_once_unicode(s, pystr, idx, &next_idx); - } - else { - PyErr_Format(PyExc_TypeError, - "first argument must be a string, not %.80s", - Py_TYPE(pystr)->tp_name); - return NULL; - } - PyDict_Clear(s->memo); - return _build_rval_index_tuple(rval, next_idx); -} - -static PyObject * -scanner_new(PyTypeObject *type, PyObject *args, PyObject *kwds) -{ - PyScannerObject *s; - s = (PyScannerObject *)type->tp_alloc(type, 0); - if (s != NULL) { - s->encoding = NULL; - s->strict = NULL; - s->object_hook = NULL; - s->pairs_hook = NULL; - s->parse_float = NULL; - s->parse_int = NULL; - s->parse_constant = NULL; - } - return (PyObject *)s; -} - -static int -scanner_init(PyObject *self, PyObject *args, PyObject *kwds) -{ - /* Initialize Scanner object */ - PyObject *ctx; - static char *kwlist[] = {"context", NULL}; - PyScannerObject *s; - - assert(PyScanner_Check(self)); - s = (PyScannerObject *)self; - - if (!PyArg_ParseTupleAndKeywords(args, kwds, "O:make_scanner", kwlist, &ctx)) - return -1; - - if (s->memo == NULL) { - s->memo = PyDict_New(); - if (s->memo == NULL) - goto bail; - } - - /* PyString_AS_STRING is used on encoding */ - s->encoding = PyObject_GetAttrString(ctx, "encoding"); - if (s->encoding == NULL) - goto bail; - if (s->encoding == Py_None) { - Py_DECREF(Py_None); - s->encoding = PyString_InternFromString(DEFAULT_ENCODING); - } - else if (PyUnicode_Check(s->encoding)) { - PyObject *tmp = PyUnicode_AsEncodedString(s->encoding, NULL, NULL); - Py_DECREF(s->encoding); - s->encoding = tmp; - } - if (s->encoding == NULL || !PyString_Check(s->encoding)) - goto bail; - - /* All of these will fail "gracefully" so we don't need to verify them */ - s->strict = PyObject_GetAttrString(ctx, "strict"); - if (s->strict == NULL) - goto bail; - s->object_hook = PyObject_GetAttrString(ctx, "object_hook"); - if (s->object_hook == NULL) - goto bail; - s->pairs_hook = PyObject_GetAttrString(ctx, "object_pairs_hook"); - if (s->pairs_hook == NULL) - goto bail; - s->parse_float = PyObject_GetAttrString(ctx, "parse_float"); - if (s->parse_float == NULL) - goto bail; - s->parse_int = PyObject_GetAttrString(ctx, "parse_int"); - if (s->parse_int == NULL) - goto bail; - s->parse_constant = PyObject_GetAttrString(ctx, "parse_constant"); - if (s->parse_constant == NULL) - goto bail; - - return 0; - -bail: - Py_CLEAR(s->encoding); - Py_CLEAR(s->strict); - Py_CLEAR(s->object_hook); - Py_CLEAR(s->pairs_hook); - Py_CLEAR(s->parse_float); - Py_CLEAR(s->parse_int); - Py_CLEAR(s->parse_constant); - return -1; -} - -PyDoc_STRVAR(scanner_doc, "JSON scanner object"); - -static -PyTypeObject PyScannerType = { - PyObject_HEAD_INIT(NULL) - 0, /* tp_internal */ - "simplejson._speedups.Scanner", /* tp_name */ - sizeof(PyScannerObject), /* tp_basicsize */ - 0, /* tp_itemsize */ - scanner_dealloc, /* tp_dealloc */ - 0, /* tp_print */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_compare */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - scanner_call, /* tp_call */ - 0, /* tp_str */ - 0,/* PyObject_GenericGetAttr, */ /* tp_getattro */ - 0,/* PyObject_GenericSetAttr, */ /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, /* tp_flags */ - scanner_doc, /* tp_doc */ - scanner_traverse, /* tp_traverse */ - scanner_clear, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - 0, /* tp_methods */ - scanner_members, /* tp_members */ - 0, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - scanner_init, /* tp_init */ - 0,/* PyType_GenericAlloc, */ /* tp_alloc */ - scanner_new, /* tp_new */ - 0,/* PyObject_GC_Del, */ /* tp_free */ -}; - -static PyObject * -encoder_new(PyTypeObject *type, PyObject *args, PyObject *kwds) -{ - PyEncoderObject *s; - s = (PyEncoderObject *)type->tp_alloc(type, 0); - if (s != NULL) { - s->markers = NULL; - s->defaultfn = NULL; - s->encoder = NULL; - s->indent = NULL; - s->key_separator = NULL; - s->item_separator = NULL; - s->sort_keys = NULL; - s->skipkeys = NULL; - s->key_memo = NULL; - } - return (PyObject *)s; -} - -static int -encoder_init(PyObject *self, PyObject *args, PyObject *kwds) -{ - /* initialize Encoder object */ - static char *kwlist[] = {"markers", "default", "encoder", "indent", "key_separator", "item_separator", "sort_keys", "skipkeys", "allow_nan", "key_memo", "use_decimal", NULL}; - - PyEncoderObject *s; - PyObject *markers, *defaultfn, *encoder, *indent, *key_separator; - PyObject *item_separator, *sort_keys, *skipkeys, *allow_nan, *key_memo, *use_decimal; - - assert(PyEncoder_Check(self)); - s = (PyEncoderObject *)self; - - if (!PyArg_ParseTupleAndKeywords(args, kwds, "OOOOOOOOOOO:make_encoder", kwlist, - &markers, &defaultfn, &encoder, &indent, &key_separator, &item_separator, - &sort_keys, &skipkeys, &allow_nan, &key_memo, &use_decimal)) - return -1; - - s->markers = markers; - s->defaultfn = defaultfn; - s->encoder = encoder; - s->indent = indent; - s->key_separator = key_separator; - s->item_separator = item_separator; - s->sort_keys = sort_keys; - s->skipkeys = skipkeys; - s->key_memo = key_memo; - s->fast_encode = (PyCFunction_Check(s->encoder) && PyCFunction_GetFunction(s->encoder) == (PyCFunction)py_encode_basestring_ascii); - s->allow_nan = PyObject_IsTrue(allow_nan); - s->use_decimal = PyObject_IsTrue(use_decimal); - - Py_INCREF(s->markers); - Py_INCREF(s->defaultfn); - Py_INCREF(s->encoder); - Py_INCREF(s->indent); - Py_INCREF(s->key_separator); - Py_INCREF(s->item_separator); - Py_INCREF(s->sort_keys); - Py_INCREF(s->skipkeys); - Py_INCREF(s->key_memo); - return 0; -} - -static PyObject * -encoder_call(PyObject *self, PyObject *args, PyObject *kwds) -{ - /* Python callable interface to encode_listencode_obj */ - static char *kwlist[] = {"obj", "_current_indent_level", NULL}; - PyObject *obj; - PyObject *rval; - Py_ssize_t indent_level; - PyEncoderObject *s; - assert(PyEncoder_Check(self)); - s = (PyEncoderObject *)self; - if (!PyArg_ParseTupleAndKeywords(args, kwds, "OO&:_iterencode", kwlist, - &obj, _convertPyInt_AsSsize_t, &indent_level)) - return NULL; - rval = PyList_New(0); - if (rval == NULL) - return NULL; - if (encoder_listencode_obj(s, rval, obj, indent_level)) { - Py_DECREF(rval); - return NULL; - } - return rval; -} - -static PyObject * -_encoded_const(PyObject *obj) -{ - /* Return the JSON string representation of None, True, False */ - if (obj == Py_None) { - static PyObject *s_null = NULL; - if (s_null == NULL) { - s_null = PyString_InternFromString("null"); - } - Py_INCREF(s_null); - return s_null; - } - else if (obj == Py_True) { - static PyObject *s_true = NULL; - if (s_true == NULL) { - s_true = PyString_InternFromString("true"); - } - Py_INCREF(s_true); - return s_true; - } - else if (obj == Py_False) { - static PyObject *s_false = NULL; - if (s_false == NULL) { - s_false = PyString_InternFromString("false"); - } - Py_INCREF(s_false); - return s_false; - } - else { - PyErr_SetString(PyExc_ValueError, "not a const"); - return NULL; - } -} - -static PyObject * -encoder_encode_float(PyEncoderObject *s, PyObject *obj) -{ - /* Return the JSON representation of a PyFloat */ - double i = PyFloat_AS_DOUBLE(obj); - if (!Py_IS_FINITE(i)) { - if (!s->allow_nan) { - PyErr_SetString(PyExc_ValueError, "Out of range float values are not JSON compliant"); - return NULL; - } - if (i > 0) { - return PyString_FromString("Infinity"); - } - else if (i < 0) { - return PyString_FromString("-Infinity"); - } - else { - return PyString_FromString("NaN"); - } - } - /* Use a better float format here? */ - return PyObject_Repr(obj); -} - -static PyObject * -encoder_encode_string(PyEncoderObject *s, PyObject *obj) -{ - /* Return the JSON representation of a string */ - if (s->fast_encode) - return py_encode_basestring_ascii(NULL, obj); - else - return PyObject_CallFunctionObjArgs(s->encoder, obj, NULL); -} - -static int -_steal_list_append(PyObject *lst, PyObject *stolen) -{ - /* Append stolen and then decrement its reference count */ - int rval = PyList_Append(lst, stolen); - Py_DECREF(stolen); - return rval; -} - -static int -encoder_listencode_obj(PyEncoderObject *s, PyObject *rval, PyObject *obj, Py_ssize_t indent_level) -{ - /* Encode Python object obj to a JSON term, rval is a PyList */ - PyObject *newobj; - int rv; - - if (obj == Py_None || obj == Py_True || obj == Py_False) { - PyObject *cstr = _encoded_const(obj); - if (cstr == NULL) - return -1; - return _steal_list_append(rval, cstr); - } - else if (PyString_Check(obj) || PyUnicode_Check(obj)) - { - PyObject *encoded = encoder_encode_string(s, obj); - if (encoded == NULL) - return -1; - return _steal_list_append(rval, encoded); - } - else if (PyInt_Check(obj) || PyLong_Check(obj)) { - PyObject *encoded = PyObject_Str(obj); - if (encoded == NULL) - return -1; - return _steal_list_append(rval, encoded); - } - else if (PyFloat_Check(obj)) { - PyObject *encoded = encoder_encode_float(s, obj); - if (encoded == NULL) - return -1; - return _steal_list_append(rval, encoded); - } - else if (PyList_Check(obj) || PyTuple_Check(obj)) { - return encoder_listencode_list(s, rval, obj, indent_level); - } - else if (PyDict_Check(obj)) { - return encoder_listencode_dict(s, rval, obj, indent_level); - } - else if (s->use_decimal && Decimal_Check(obj)) { - PyObject *encoded = PyObject_Str(obj); - if (encoded == NULL) - return -1; - return _steal_list_append(rval, encoded); - } - else { - PyObject *ident = NULL; - if (s->markers != Py_None) { - int has_key; - ident = PyLong_FromVoidPtr(obj); - if (ident == NULL) - return -1; - has_key = PyDict_Contains(s->markers, ident); - if (has_key) { - if (has_key != -1) - PyErr_SetString(PyExc_ValueError, "Circular reference detected"); - Py_DECREF(ident); - return -1; - } - if (PyDict_SetItem(s->markers, ident, obj)) { - Py_DECREF(ident); - return -1; - } - } - newobj = PyObject_CallFunctionObjArgs(s->defaultfn, obj, NULL); - if (newobj == NULL) { - Py_XDECREF(ident); - return -1; - } - rv = encoder_listencode_obj(s, rval, newobj, indent_level); - Py_DECREF(newobj); - if (rv) { - Py_XDECREF(ident); - return -1; - } - if (ident != NULL) { - if (PyDict_DelItem(s->markers, ident)) { - Py_XDECREF(ident); - return -1; - } - Py_XDECREF(ident); - } - return rv; - } -} - -static int -encoder_listencode_dict(PyEncoderObject *s, PyObject *rval, PyObject *dct, Py_ssize_t indent_level) -{ - /* Encode Python dict dct a JSON term, rval is a PyList */ - static PyObject *open_dict = NULL; - static PyObject *close_dict = NULL; - static PyObject *empty_dict = NULL; - static PyObject *iteritems = NULL; - PyObject *kstr = NULL; - PyObject *ident = NULL; - PyObject *key, *value; - PyObject *iter = NULL; - PyObject *item = NULL; - PyObject *encoded = NULL; - int skipkeys; - Py_ssize_t idx; - - if (open_dict == NULL || close_dict == NULL || empty_dict == NULL || iteritems == NULL) { - open_dict = PyString_InternFromString("{"); - close_dict = PyString_InternFromString("}"); - empty_dict = PyString_InternFromString("{}"); - iteritems = PyString_InternFromString("iteritems"); - if (open_dict == NULL || close_dict == NULL || empty_dict == NULL || iteritems == NULL) - return -1; - } - if (PyDict_Size(dct) == 0) - return PyList_Append(rval, empty_dict); - - if (s->markers != Py_None) { - int has_key; - ident = PyLong_FromVoidPtr(dct); - if (ident == NULL) - goto bail; - has_key = PyDict_Contains(s->markers, ident); - if (has_key) { - if (has_key != -1) - PyErr_SetString(PyExc_ValueError, "Circular reference detected"); - goto bail; - } - if (PyDict_SetItem(s->markers, ident, dct)) { - goto bail; - } - } - - if (PyList_Append(rval, open_dict)) - goto bail; - - if (s->indent != Py_None) { - /* TODO: DOES NOT RUN */ - indent_level += 1; - /* - newline_indent = '\n' + (_indent * _current_indent_level) - separator = _item_separator + newline_indent - buf += newline_indent - */ - } - - /* TODO: C speedup not implemented for sort_keys */ - - skipkeys = PyObject_IsTrue(s->skipkeys); - idx = 0; - iter = PyObject_CallMethodObjArgs(dct, iteritems, NULL); - if (iter == NULL) - goto bail; - while ((item = PyIter_Next(iter))) { - - key = PyTuple_GetItem(item, 0); - if (key == NULL) - goto bail; - value = PyTuple_GetItem(item, 1); - if (value == NULL) - goto bail; - - encoded = PyDict_GetItem(s->key_memo, key); - if (encoded != NULL) { - Py_INCREF(encoded); - } - else if (PyString_Check(key) || PyUnicode_Check(key)) { - Py_INCREF(key); - kstr = key; - } - else if (PyFloat_Check(key)) { - kstr = encoder_encode_float(s, key); - if (kstr == NULL) - goto bail; - } - else if (PyInt_Check(key) || PyLong_Check(key)) { - kstr = PyObject_Str(key); - if (kstr == NULL) - goto bail; - } - else if (key == Py_True || key == Py_False || key == Py_None) { - kstr = _encoded_const(key); - if (kstr == NULL) - goto bail; - } - else if (skipkeys) { - Py_DECREF(item); - continue; - } - else { - /* TODO: include repr of key */ - PyErr_SetString(PyExc_ValueError, "keys must be a string"); - goto bail; - } - - if (idx) { - if (PyList_Append(rval, s->item_separator)) - goto bail; - } - - if (encoded == NULL) { - encoded = encoder_encode_string(s, kstr); - Py_CLEAR(kstr); - if (encoded == NULL) - goto bail; - if (PyDict_SetItem(s->key_memo, key, encoded)) - goto bail; - } - if (PyList_Append(rval, encoded)) { - goto bail; - } - Py_CLEAR(encoded); - if (PyList_Append(rval, s->key_separator)) - goto bail; - if (encoder_listencode_obj(s, rval, value, indent_level)) - goto bail; - Py_CLEAR(item); - idx += 1; - } - Py_CLEAR(iter); - if (PyErr_Occurred()) - goto bail; - if (ident != NULL) { - if (PyDict_DelItem(s->markers, ident)) - goto bail; - Py_CLEAR(ident); - } - if (s->indent != Py_None) { - /* TODO: DOES NOT RUN */ - indent_level -= 1; - /* - yield '\n' + (_indent * _current_indent_level) - */ - } - if (PyList_Append(rval, close_dict)) - goto bail; - return 0; - -bail: - Py_XDECREF(encoded); - Py_XDECREF(item); - Py_XDECREF(iter); - Py_XDECREF(kstr); - Py_XDECREF(ident); - return -1; -} - - -static int -encoder_listencode_list(PyEncoderObject *s, PyObject *rval, PyObject *seq, Py_ssize_t indent_level) -{ - /* Encode Python list seq to a JSON term, rval is a PyList */ - static PyObject *open_array = NULL; - static PyObject *close_array = NULL; - static PyObject *empty_array = NULL; - PyObject *ident = NULL; - PyObject *iter = NULL; - PyObject *obj = NULL; - int is_true; - int i = 0; - - if (open_array == NULL || close_array == NULL || empty_array == NULL) { - open_array = PyString_InternFromString("["); - close_array = PyString_InternFromString("]"); - empty_array = PyString_InternFromString("[]"); - if (open_array == NULL || close_array == NULL || empty_array == NULL) - return -1; - } - ident = NULL; - is_true = PyObject_IsTrue(seq); - if (is_true == -1) - return -1; - else if (is_true == 0) - return PyList_Append(rval, empty_array); - - if (s->markers != Py_None) { - int has_key; - ident = PyLong_FromVoidPtr(seq); - if (ident == NULL) - goto bail; - has_key = PyDict_Contains(s->markers, ident); - if (has_key) { - if (has_key != -1) - PyErr_SetString(PyExc_ValueError, "Circular reference detected"); - goto bail; - } - if (PyDict_SetItem(s->markers, ident, seq)) { - goto bail; - } - } - - iter = PyObject_GetIter(seq); - if (iter == NULL) - goto bail; - - if (PyList_Append(rval, open_array)) - goto bail; - if (s->indent != Py_None) { - /* TODO: DOES NOT RUN */ - indent_level += 1; - /* - newline_indent = '\n' + (_indent * _current_indent_level) - separator = _item_separator + newline_indent - buf += newline_indent - */ - } - while ((obj = PyIter_Next(iter))) { - if (i) { - if (PyList_Append(rval, s->item_separator)) - goto bail; - } - if (encoder_listencode_obj(s, rval, obj, indent_level)) - goto bail; - i++; - Py_CLEAR(obj); - } - Py_CLEAR(iter); - if (PyErr_Occurred()) - goto bail; - if (ident != NULL) { - if (PyDict_DelItem(s->markers, ident)) - goto bail; - Py_CLEAR(ident); - } - if (s->indent != Py_None) { - /* TODO: DOES NOT RUN */ - indent_level -= 1; - /* - yield '\n' + (_indent * _current_indent_level) - */ - } - if (PyList_Append(rval, close_array)) - goto bail; - return 0; - -bail: - Py_XDECREF(obj); - Py_XDECREF(iter); - Py_XDECREF(ident); - return -1; -} - -static void -encoder_dealloc(PyObject *self) -{ - /* Deallocate Encoder */ - encoder_clear(self); - Py_TYPE(self)->tp_free(self); -} - -static int -encoder_traverse(PyObject *self, visitproc visit, void *arg) -{ - PyEncoderObject *s; - assert(PyEncoder_Check(self)); - s = (PyEncoderObject *)self; - Py_VISIT(s->markers); - Py_VISIT(s->defaultfn); - Py_VISIT(s->encoder); - Py_VISIT(s->indent); - Py_VISIT(s->key_separator); - Py_VISIT(s->item_separator); - Py_VISIT(s->sort_keys); - Py_VISIT(s->skipkeys); - Py_VISIT(s->key_memo); - return 0; -} - -static int -encoder_clear(PyObject *self) -{ - /* Deallocate Encoder */ - PyEncoderObject *s; - assert(PyEncoder_Check(self)); - s = (PyEncoderObject *)self; - Py_CLEAR(s->markers); - Py_CLEAR(s->defaultfn); - Py_CLEAR(s->encoder); - Py_CLEAR(s->indent); - Py_CLEAR(s->key_separator); - Py_CLEAR(s->item_separator); - Py_CLEAR(s->sort_keys); - Py_CLEAR(s->skipkeys); - Py_CLEAR(s->key_memo); - return 0; -} - -PyDoc_STRVAR(encoder_doc, "_iterencode(obj, _current_indent_level) -> iterable"); - -static -PyTypeObject PyEncoderType = { - PyObject_HEAD_INIT(NULL) - 0, /* tp_internal */ - "simplejson._speedups.Encoder", /* tp_name */ - sizeof(PyEncoderObject), /* tp_basicsize */ - 0, /* tp_itemsize */ - encoder_dealloc, /* tp_dealloc */ - 0, /* tp_print */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_compare */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - encoder_call, /* tp_call */ - 0, /* tp_str */ - 0, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, /* tp_flags */ - encoder_doc, /* tp_doc */ - encoder_traverse, /* tp_traverse */ - encoder_clear, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - 0, /* tp_methods */ - encoder_members, /* tp_members */ - 0, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - encoder_init, /* tp_init */ - 0, /* tp_alloc */ - encoder_new, /* tp_new */ - 0, /* tp_free */ -}; - -static PyMethodDef speedups_methods[] = { - {"encode_basestring_ascii", - (PyCFunction)py_encode_basestring_ascii, - METH_O, - pydoc_encode_basestring_ascii}, - {"scanstring", - (PyCFunction)py_scanstring, - METH_VARARGS, - pydoc_scanstring}, - {NULL, NULL, 0, NULL} -}; - -PyDoc_STRVAR(module_doc, -"simplejson speedups\n"); - -void -init_speedups(void) -{ - PyObject *m, *decimal; - PyScannerType.tp_new = PyType_GenericNew; - if (PyType_Ready(&PyScannerType) < 0) - return; - PyEncoderType.tp_new = PyType_GenericNew; - if (PyType_Ready(&PyEncoderType) < 0) - return; - - decimal = PyImport_ImportModule("decimal"); - if (decimal == NULL) - return; - DecimalTypePtr = (PyTypeObject*)PyObject_GetAttrString(decimal, "Decimal"); - Py_DECREF(decimal); - if (DecimalTypePtr == NULL) - return; - - m = Py_InitModule3("_speedups", speedups_methods, module_doc); - Py_INCREF((PyObject*)&PyScannerType); - PyModule_AddObject(m, "make_scanner", (PyObject*)&PyScannerType); - Py_INCREF((PyObject*)&PyEncoderType); - PyModule_AddObject(m, "make_encoder", (PyObject*)&PyEncoderType); -} diff --git a/simplejson/decoder.py b/simplejson/decoder.py deleted file mode 100644 index 4cf4015..0000000 --- a/simplejson/decoder.py +++ /dev/null @@ -1,421 +0,0 @@ -"""Implementation of JSONDecoder -""" -import re -import sys -import struct - -from simplejson.scanner import make_scanner -def _import_c_scanstring(): - try: - from simplejson._speedups import scanstring - return scanstring - except ImportError: - return None -c_scanstring = _import_c_scanstring() - -__all__ = ['JSONDecoder'] - -FLAGS = re.VERBOSE | re.MULTILINE | re.DOTALL - -def _floatconstants(): - _BYTES = '7FF80000000000007FF0000000000000'.decode('hex') - # The struct module in Python 2.4 would get frexp() out of range here - # when an endian is specified in the format string. Fixed in Python 2.5+ - if sys.byteorder != 'big': - _BYTES = _BYTES[:8][::-1] + _BYTES[8:][::-1] - nan, inf = struct.unpack('dd', _BYTES) - return nan, inf, -inf - -NaN, PosInf, NegInf = _floatconstants() - - -class JSONDecodeError(ValueError): - """Subclass of ValueError with the following additional properties: - - msg: The unformatted error message - doc: The JSON document being parsed - pos: The start index of doc where parsing failed - end: The end index of doc where parsing failed (may be None) - lineno: The line corresponding to pos - colno: The column corresponding to pos - endlineno: The line corresponding to end (may be None) - endcolno: The column corresponding to end (may be None) - - """ - def __init__(self, msg, doc, pos, end=None): - ValueError.__init__(self, errmsg(msg, doc, pos, end=end)) - self.msg = msg - self.doc = doc - self.pos = pos - self.end = end - self.lineno, self.colno = linecol(doc, pos) - if end is not None: - self.endlineno, self.endcolno = linecol(doc, pos) - else: - self.endlineno, self.endcolno = None, None - - -def linecol(doc, pos): - lineno = doc.count('\n', 0, pos) + 1 - if lineno == 1: - colno = pos - else: - colno = pos - doc.rindex('\n', 0, pos) - return lineno, colno - - -def errmsg(msg, doc, pos, end=None): - # Note that this function is called from _speedups - lineno, colno = linecol(doc, pos) - if end is None: - #fmt = '{0}: line {1} column {2} (char {3})' - #return fmt.format(msg, lineno, colno, pos) - fmt = '%s: line %d column %d (char %d)' - return fmt % (msg, lineno, colno, pos) - endlineno, endcolno = linecol(doc, end) - #fmt = '{0}: line {1} column {2} - line {3} column {4} (char {5} - {6})' - #return fmt.format(msg, lineno, colno, endlineno, endcolno, pos, end) - fmt = '%s: line %d column %d - line %d column %d (char %d - %d)' - return fmt % (msg, lineno, colno, endlineno, endcolno, pos, end) - - -_CONSTANTS = { - '-Infinity': NegInf, - 'Infinity': PosInf, - 'NaN': NaN, -} - -STRINGCHUNK = re.compile(r'(.*?)(["\\\x00-\x1f])', FLAGS) -BACKSLASH = { - '"': u'"', '\\': u'\\', '/': u'/', - 'b': u'\b', 'f': u'\f', 'n': u'\n', 'r': u'\r', 't': u'\t', -} - -DEFAULT_ENCODING = "utf-8" - -def py_scanstring(s, end, encoding=None, strict=True, - _b=BACKSLASH, _m=STRINGCHUNK.match): - """Scan the string s for a JSON string. End is the index of the - character in s after the quote that started the JSON string. - Unescapes all valid JSON string escape sequences and raises ValueError - on attempt to decode an invalid string. If strict is False then literal - control characters are allowed in the string. - - Returns a tuple of the decoded string and the index of the character in s - after the end quote.""" - if encoding is None: - encoding = DEFAULT_ENCODING - chunks = [] - _append = chunks.append - begin = end - 1 - while 1: - chunk = _m(s, end) - if chunk is None: - raise JSONDecodeError( - "Unterminated string starting at", s, begin) - end = chunk.end() - content, terminator = chunk.groups() - # Content is contains zero or more unescaped string characters - if content: - if not isinstance(content, unicode): - content = unicode(content, encoding) - _append(content) - # Terminator is the end of string, a literal control character, - # or a backslash denoting that an escape sequence follows - if terminator == '"': - break - elif terminator != '\\': - if strict: - msg = "Invalid control character %r at" % (terminator,) - #msg = "Invalid control character {0!r} at".format(terminator) - raise JSONDecodeError(msg, s, end) - else: - _append(terminator) - continue - try: - esc = s[end] - except IndexError: - raise JSONDecodeError( - "Unterminated string starting at", s, begin) - # If not a unicode escape sequence, must be in the lookup table - if esc != 'u': - try: - char = _b[esc] - except KeyError: - msg = "Invalid \\escape: " + repr(esc) - raise JSONDecodeError(msg, s, end) - end += 1 - else: - # Unicode escape sequence - esc = s[end + 1:end + 5] - next_end = end + 5 - if len(esc) != 4: - msg = "Invalid \\uXXXX escape" - raise JSONDecodeError(msg, s, end) - uni = int(esc, 16) - # Check for surrogate pair on UCS-4 systems - if 0xd800 <= uni <= 0xdbff and sys.maxunicode > 65535: - msg = "Invalid \\uXXXX\\uXXXX surrogate pair" - if not s[end + 5:end + 7] == '\\u': - raise JSONDecodeError(msg, s, end) - esc2 = s[end + 7:end + 11] - if len(esc2) != 4: - raise JSONDecodeError(msg, s, end) - uni2 = int(esc2, 16) - uni = 0x10000 + (((uni - 0xd800) << 10) | (uni2 - 0xdc00)) - next_end += 6 - char = unichr(uni) - end = next_end - # Append the unescaped character - _append(char) - return u''.join(chunks), end - - -# Use speedup if available -scanstring = c_scanstring or py_scanstring - -WHITESPACE = re.compile(r'[ \t\n\r]*', FLAGS) -WHITESPACE_STR = ' \t\n\r' - -def JSONObject((s, end), encoding, strict, scan_once, object_hook, - object_pairs_hook, memo=None, - _w=WHITESPACE.match, _ws=WHITESPACE_STR): - # Backwards compatibility - if memo is None: - memo = {} - memo_get = memo.setdefault - pairs = [] - # Use a slice to prevent IndexError from being raised, the following - # check will raise a more specific ValueError if the string is empty - nextchar = s[end:end + 1] - # Normally we expect nextchar == '"' - if nextchar != '"': - if nextchar in _ws: - end = _w(s, end).end() - nextchar = s[end:end + 1] - # Trivial empty object - if nextchar == '}': - if object_pairs_hook is not None: - result = object_pairs_hook(pairs) - return result, end - pairs = {} - if object_hook is not None: - pairs = object_hook(pairs) - return pairs, end + 1 - elif nextchar != '"': - raise JSONDecodeError("Expecting property name", s, end) - end += 1 - while True: - key, end = scanstring(s, end, encoding, strict) - key = memo_get(key, key) - - # To skip some function call overhead we optimize the fast paths where - # the JSON key separator is ": " or just ":". - if s[end:end + 1] != ':': - end = _w(s, end).end() - if s[end:end + 1] != ':': - raise JSONDecodeError("Expecting : delimiter", s, end) - - end += 1 - - try: - if s[end] in _ws: - end += 1 - if s[end] in _ws: - end = _w(s, end + 1).end() - except IndexError: - pass - - try: - value, end = scan_once(s, end) - except StopIteration: - raise JSONDecodeError("Expecting object", s, end) - pairs.append((key, value)) - - try: - nextchar = s[end] - if nextchar in _ws: - end = _w(s, end + 1).end() - nextchar = s[end] - except IndexError: - nextchar = '' - end += 1 - - if nextchar == '}': - break - elif nextchar != ',': - raise JSONDecodeError("Expecting , delimiter", s, end - 1) - - try: - nextchar = s[end] - if nextchar in _ws: - end += 1 - nextchar = s[end] - if nextchar in _ws: - end = _w(s, end + 1).end() - nextchar = s[end] - except IndexError: - nextchar = '' - - end += 1 - if nextchar != '"': - raise JSONDecodeError("Expecting property name", s, end - 1) - - if object_pairs_hook is not None: - result = object_pairs_hook(pairs) - return result, end - pairs = dict(pairs) - if object_hook is not None: - pairs = object_hook(pairs) - return pairs, end - -def JSONArray((s, end), scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR): - values = [] - nextchar = s[end:end + 1] - if nextchar in _ws: - end = _w(s, end + 1).end() - nextchar = s[end:end + 1] - # Look-ahead for trivial empty array - if nextchar == ']': - return values, end + 1 - _append = values.append - while True: - try: - value, end = scan_once(s, end) - except StopIteration: - raise JSONDecodeError("Expecting object", s, end) - _append(value) - nextchar = s[end:end + 1] - if nextchar in _ws: - end = _w(s, end + 1).end() - nextchar = s[end:end + 1] - end += 1 - if nextchar == ']': - break - elif nextchar != ',': - raise JSONDecodeError("Expecting , delimiter", s, end) - - try: - if s[end] in _ws: - end += 1 - if s[end] in _ws: - end = _w(s, end + 1).end() - except IndexError: - pass - - return values, end - -class JSONDecoder(object): - """Simple JSON decoder - - Performs the following translations in decoding by default: - - +---------------+-------------------+ - | JSON | Python | - +===============+===================+ - | object | dict | - +---------------+-------------------+ - | array | list | - +---------------+-------------------+ - | string | unicode | - +---------------+-------------------+ - | number (int) | int, long | - +---------------+-------------------+ - | number (real) | float | - +---------------+-------------------+ - | true | True | - +---------------+-------------------+ - | false | False | - +---------------+-------------------+ - | null | None | - +---------------+-------------------+ - - It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as - their corresponding ``float`` values, which is outside the JSON spec. - - """ - - def __init__(self, encoding=None, object_hook=None, parse_float=None, - parse_int=None, parse_constant=None, strict=True, - object_pairs_hook=None): - """ - *encoding* determines the encoding used to interpret any - :class:`str` objects decoded by this instance (``'utf-8'`` by - default). It has no effect when decoding :class:`unicode` objects. - - Note that currently only encodings that are a superset of ASCII work, - strings of other encodings should be passed in as :class:`unicode`. - - *object_hook*, if specified, will be called with the result of every - JSON object decoded and its return value will be used in place of the - given :class:`dict`. This can be used to provide custom - deserializations (e.g. to support JSON-RPC class hinting). - - *object_pairs_hook* is an optional function that will be called with - the result of any object literal decode with an ordered list of pairs. - The return value of *object_pairs_hook* will be used instead of the - :class:`dict`. This feature can be used to implement custom decoders - that rely on the order that the key and value pairs are decoded (for - example, :func:`collections.OrderedDict` will remember the order of - insertion). If *object_hook* is also defined, the *object_pairs_hook* - takes priority. - - *parse_float*, if specified, will be called with the string of every - JSON float to be decoded. By default, this is equivalent to - ``float(num_str)``. This can be used to use another datatype or parser - for JSON floats (e.g. :class:`decimal.Decimal`). - - *parse_int*, if specified, will be called with the string of every - JSON int to be decoded. By default, this is equivalent to - ``int(num_str)``. This can be used to use another datatype or parser - for JSON integers (e.g. :class:`float`). - - *parse_constant*, if specified, will be called with one of the - following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This - can be used to raise an exception if invalid JSON numbers are - encountered. - - *strict* controls the parser's behavior when it encounters an - invalid control character in a string. The default setting of - ``True`` means that unescaped control characters are parse errors, if - ``False`` then control characters will be allowed in strings. - - """ - self.encoding = encoding - self.object_hook = object_hook - self.object_pairs_hook = object_pairs_hook - self.parse_float = parse_float or float - self.parse_int = parse_int or int - self.parse_constant = parse_constant or _CONSTANTS.__getitem__ - self.strict = strict - self.parse_object = JSONObject - self.parse_array = JSONArray - self.parse_string = scanstring - self.memo = {} - self.scan_once = make_scanner(self) - - def decode(self, s, _w=WHITESPACE.match): - """Return the Python representation of ``s`` (a ``str`` or ``unicode`` - instance containing a JSON document) - - """ - obj, end = self.raw_decode(s, idx=_w(s, 0).end()) - end = _w(s, end).end() - if end != len(s): - raise JSONDecodeError("Extra data", s, end, len(s)) - return obj - - def raw_decode(self, s, idx=0): - """Decode a JSON document from ``s`` (a ``str`` or ``unicode`` - beginning with a JSON document) and return a 2-tuple of the Python - representation and the index in ``s`` where the document ended. - - This can be used to decode a JSON document from a string that may - have extraneous data at the end. - - """ - try: - obj, end = self.scan_once(s, idx) - except StopIteration: - raise JSONDecodeError("No JSON object could be decoded", s, idx) - return obj, end diff --git a/simplejson/encoder.py b/simplejson/encoder.py deleted file mode 100644 index cab8456..0000000 --- a/simplejson/encoder.py +++ /dev/null @@ -1,501 +0,0 @@ -"""Implementation of JSONEncoder -""" -import re -from decimal import Decimal - -def _import_speedups(): - try: - from simplejson import _speedups - return _speedups.encode_basestring_ascii, _speedups.make_encoder - except ImportError: - return None, None -c_encode_basestring_ascii, c_make_encoder = _import_speedups() - -from simplejson.decoder import PosInf - -ESCAPE = re.compile(r'[\x00-\x1f\\"\b\f\n\r\t]') -ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])') -HAS_UTF8 = re.compile(r'[\x80-\xff]') -ESCAPE_DCT = { - '\\': '\\\\', - '"': '\\"', - '\b': '\\b', - '\f': '\\f', - '\n': '\\n', - '\r': '\\r', - '\t': '\\t', -} -for i in range(0x20): - #ESCAPE_DCT.setdefault(chr(i), '\\u{0:04x}'.format(i)) - ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,)) - -FLOAT_REPR = repr - -def encode_basestring(s): - """Return a JSON representation of a Python string - - """ - if isinstance(s, str) and HAS_UTF8.search(s) is not None: - s = s.decode('utf-8') - def replace(match): - return ESCAPE_DCT[match.group(0)] - return u'"' + ESCAPE.sub(replace, s) + u'"' - - -def py_encode_basestring_ascii(s): - """Return an ASCII-only JSON representation of a Python string - - """ - if isinstance(s, str) and HAS_UTF8.search(s) is not None: - s = s.decode('utf-8') - def replace(match): - s = match.group(0) - try: - return ESCAPE_DCT[s] - except KeyError: - n = ord(s) - if n < 0x10000: - #return '\\u{0:04x}'.format(n) - return '\\u%04x' % (n,) - else: - # surrogate pair - n -= 0x10000 - s1 = 0xd800 | ((n >> 10) & 0x3ff) - s2 = 0xdc00 | (n & 0x3ff) - #return '\\u{0:04x}\\u{1:04x}'.format(s1, s2) - return '\\u%04x\\u%04x' % (s1, s2) - return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"' - - -encode_basestring_ascii = ( - c_encode_basestring_ascii or py_encode_basestring_ascii) - -class JSONEncoder(object): - """Extensible JSON encoder for Python data structures. - - Supports the following objects and types by default: - - +-------------------+---------------+ - | Python | JSON | - +===================+===============+ - | dict | object | - +-------------------+---------------+ - | list, tuple | array | - +-------------------+---------------+ - | str, unicode | string | - +-------------------+---------------+ - | int, long, float | number | - +-------------------+---------------+ - | True | true | - +-------------------+---------------+ - | False | false | - +-------------------+---------------+ - | None | null | - +-------------------+---------------+ - - To extend this to recognize other objects, subclass and implement a - ``.default()`` method with another method that returns a serializable - object for ``o`` if possible, otherwise it should call the superclass - implementation (to raise ``TypeError``). - - """ - item_separator = ', ' - key_separator = ': ' - def __init__(self, skipkeys=False, ensure_ascii=True, - check_circular=True, allow_nan=True, sort_keys=False, - indent=None, separators=None, encoding='utf-8', default=None, - use_decimal=False): - """Constructor for JSONEncoder, with sensible defaults. - - If skipkeys is false, then it is a TypeError to attempt - encoding of keys that are not str, int, long, float or None. If - skipkeys is True, such items are simply skipped. - - If ensure_ascii is true, the output is guaranteed to be str - objects with all incoming unicode characters escaped. If - ensure_ascii is false, the output will be unicode object. - - If check_circular is true, then lists, dicts, and custom encoded - objects will be checked for circular references during encoding to - prevent an infinite recursion (which would cause an OverflowError). - Otherwise, no such check takes place. - - If allow_nan is true, then NaN, Infinity, and -Infinity will be - encoded as such. This behavior is not JSON specification compliant, - but is consistent with most JavaScript based encoders and decoders. - Otherwise, it will be a ValueError to encode such floats. - - If sort_keys is true, then the output of dictionaries will be - sorted by key; this is useful for regression tests to ensure - that JSON serializations can be compared on a day-to-day basis. - - If indent is a string, then JSON array elements and object members - will be pretty-printed with a newline followed by that string repeated - for each level of nesting. ``None`` (the default) selects the most compact - representation without any newlines. For backwards compatibility with - versions of simplejson earlier than 2.1.0, an integer is also accepted - and is converted to a string with that many spaces. - - If specified, separators should be a (item_separator, key_separator) - tuple. The default is (', ', ': '). To get the most compact JSON - representation you should specify (',', ':') to eliminate whitespace. - - If specified, default is a function that gets called for objects - that can't otherwise be serialized. It should return a JSON encodable - version of the object or raise a ``TypeError``. - - If encoding is not None, then all input strings will be - transformed into unicode using that encoding prior to JSON-encoding. - The default is UTF-8. - - If use_decimal is true (not the default), ``decimal.Decimal`` will - be supported directly by the encoder. For the inverse, decode JSON - with ``parse_float=decimal.Decimal``. - - """ - - self.skipkeys = skipkeys - self.ensure_ascii = ensure_ascii - self.check_circular = check_circular - self.allow_nan = allow_nan - self.sort_keys = sort_keys - self.use_decimal = use_decimal - if isinstance(indent, (int, long)): - indent = ' ' * indent - self.indent = indent - if separators is not None: - self.item_separator, self.key_separator = separators - if default is not None: - self.default = default - self.encoding = encoding - - def default(self, o): - """Implement this method in a subclass such that it returns - a serializable object for ``o``, or calls the base implementation - (to raise a ``TypeError``). - - For example, to support arbitrary iterators, you could - implement default like this:: - - def default(self, o): - try: - iterable = iter(o) - except TypeError: - pass - else: - return list(iterable) - return JSONEncoder.default(self, o) - - """ - raise TypeError(repr(o) + " is not JSON serializable") - - def encode(self, o): - """Return a JSON string representation of a Python data structure. - - >>> from simplejson import JSONEncoder - >>> JSONEncoder().encode({"foo": ["bar", "baz"]}) - '{"foo": ["bar", "baz"]}' - - """ - # This is for extremely simple cases and benchmarks. - if isinstance(o, basestring): - if isinstance(o, str): - _encoding = self.encoding - if (_encoding is not None - and not (_encoding == 'utf-8')): - o = o.decode(_encoding) - if self.ensure_ascii: - return encode_basestring_ascii(o) - else: - return encode_basestring(o) - # This doesn't pass the iterator directly to ''.join() because the - # exceptions aren't as detailed. The list call should be roughly - # equivalent to the PySequence_Fast that ''.join() would do. - chunks = self.iterencode(o, _one_shot=True) - if not isinstance(chunks, (list, tuple)): - chunks = list(chunks) - if self.ensure_ascii: - return ''.join(chunks) - else: - return u''.join(chunks) - - def iterencode(self, o, _one_shot=False): - """Encode the given object and yield each string - representation as available. - - For example:: - - for chunk in JSONEncoder().iterencode(bigobject): - mysocket.write(chunk) - - """ - if self.check_circular: - markers = {} - else: - markers = None - if self.ensure_ascii: - _encoder = encode_basestring_ascii - else: - _encoder = encode_basestring - if self.encoding != 'utf-8': - def _encoder(o, _orig_encoder=_encoder, _encoding=self.encoding): - if isinstance(o, str): - o = o.decode(_encoding) - return _orig_encoder(o) - - def floatstr(o, allow_nan=self.allow_nan, - _repr=FLOAT_REPR, _inf=PosInf, _neginf=-PosInf): - # Check for specials. Note that this type of test is processor - # and/or platform-specific, so do tests which don't depend on - # the internals. - - if o != o: - text = 'NaN' - elif o == _inf: - text = 'Infinity' - elif o == _neginf: - text = '-Infinity' - else: - return _repr(o) - - if not allow_nan: - raise ValueError( - "Out of range float values are not JSON compliant: " + - repr(o)) - - return text - - - key_memo = {} - if (_one_shot and c_make_encoder is not None - and not self.indent and not self.sort_keys): - _iterencode = c_make_encoder( - markers, self.default, _encoder, self.indent, - self.key_separator, self.item_separator, self.sort_keys, - self.skipkeys, self.allow_nan, key_memo, self.use_decimal) - else: - _iterencode = _make_iterencode( - markers, self.default, _encoder, self.indent, floatstr, - self.key_separator, self.item_separator, self.sort_keys, - self.skipkeys, _one_shot, self.use_decimal) - try: - return _iterencode(o, 0) - finally: - key_memo.clear() - - -class JSONEncoderForHTML(JSONEncoder): - """An encoder that produces JSON safe to embed in HTML. - - To embed JSON content in, say, a script tag on a web page, the - characters &, < and > should be escaped. They cannot be escaped - with the usual entities (e.g. &) because they are not expanded - within ' - self.assertEqual( - r'"\u003c/script\u003e\u003cscript\u003e' - r'alert(\"gotcha\")\u003c/script\u003e"', - self.encoder.encode(bad_string)) - self.assertEqual( - bad_string, self.decoder.decode( - self.encoder.encode(bad_string))) diff --git a/simplejson/tests/test_fail.py b/simplejson/tests/test_fail.py deleted file mode 100644 index 646c0f4..0000000 --- a/simplejson/tests/test_fail.py +++ /dev/null @@ -1,91 +0,0 @@ -from unittest import TestCase - -import simplejson as json - -# Fri Dec 30 18:57:26 2005 -JSONDOCS = [ - # http://json.org/JSON_checker/test/fail1.json - '"A JSON payload should be an object or array, not a string."', - # http://json.org/JSON_checker/test/fail2.json - '["Unclosed array"', - # http://json.org/JSON_checker/test/fail3.json - '{unquoted_key: "keys must be quoted}', - # http://json.org/JSON_checker/test/fail4.json - '["extra comma",]', - # http://json.org/JSON_checker/test/fail5.json - '["double extra comma",,]', - # http://json.org/JSON_checker/test/fail6.json - '[ , "<-- missing value"]', - # http://json.org/JSON_checker/test/fail7.json - '["Comma after the close"],', - # http://json.org/JSON_checker/test/fail8.json - '["Extra close"]]', - # http://json.org/JSON_checker/test/fail9.json - '{"Extra comma": true,}', - # http://json.org/JSON_checker/test/fail10.json - '{"Extra value after close": true} "misplaced quoted value"', - # http://json.org/JSON_checker/test/fail11.json - '{"Illegal expression": 1 + 2}', - # http://json.org/JSON_checker/test/fail12.json - '{"Illegal invocation": alert()}', - # http://json.org/JSON_checker/test/fail13.json - '{"Numbers cannot have leading zeroes": 013}', - # http://json.org/JSON_checker/test/fail14.json - '{"Numbers cannot be hex": 0x14}', - # http://json.org/JSON_checker/test/fail15.json - '["Illegal backslash escape: \\x15"]', - # http://json.org/JSON_checker/test/fail16.json - '["Illegal backslash escape: \\\'"]', - # http://json.org/JSON_checker/test/fail17.json - '["Illegal backslash escape: \\017"]', - # http://json.org/JSON_checker/test/fail18.json - '[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]]', - # http://json.org/JSON_checker/test/fail19.json - '{"Missing colon" null}', - # http://json.org/JSON_checker/test/fail20.json - '{"Double colon":: null}', - # http://json.org/JSON_checker/test/fail21.json - '{"Comma instead of colon", null}', - # http://json.org/JSON_checker/test/fail22.json - '["Colon instead of comma": false]', - # http://json.org/JSON_checker/test/fail23.json - '["Bad value", truth]', - # http://json.org/JSON_checker/test/fail24.json - "['single quote']", - # http://code.google.com/p/simplejson/issues/detail?id=3 - u'["A\u001FZ control characters in string"]', -] - -SKIPS = { - 1: "why not have a string payload?", - 18: "spec doesn't specify any nesting limitations", -} - -class TestFail(TestCase): - def test_failures(self): - for idx, doc in enumerate(JSONDOCS): - idx = idx + 1 - if idx in SKIPS: - json.loads(doc) - continue - try: - json.loads(doc) - except json.JSONDecodeError: - pass - else: - #self.fail("Expected failure for fail{0}.json: {1!r}".format(idx, doc)) - self.fail("Expected failure for fail%d.json: %r" % (idx, doc)) - - def test_array_decoder_issue46(self): - # http://code.google.com/p/simplejson/issues/detail?id=46 - for doc in [u'[,]', '[,]']: - try: - json.loads(doc) - except json.JSONDecodeError, e: - self.assertEquals(e.pos, 1) - self.assertEquals(e.lineno, 1) - self.assertEquals(e.colno, 1) - except Exception, e: - self.fail("Unexpected exception raised %r %s" % (e, e)) - else: - self.fail("Unexpected success parsing '[,]'") \ No newline at end of file diff --git a/simplejson/tests/test_float.py b/simplejson/tests/test_float.py deleted file mode 100644 index 94502c6..0000000 --- a/simplejson/tests/test_float.py +++ /dev/null @@ -1,19 +0,0 @@ -import math -from unittest import TestCase - -import simplejson as json - -class TestFloat(TestCase): - def test_floats(self): - for num in [1617161771.7650001, math.pi, math.pi**100, - math.pi**-100, 3.1]: - self.assertEquals(float(json.dumps(num)), num) - self.assertEquals(json.loads(json.dumps(num)), num) - self.assertEquals(json.loads(unicode(json.dumps(num))), num) - - def test_ints(self): - for num in [1, 1L, 1<<32, 1<<64]: - self.assertEquals(json.dumps(num), str(num)) - self.assertEquals(int(json.dumps(num)), num) - self.assertEquals(json.loads(json.dumps(num)), num) - self.assertEquals(json.loads(unicode(json.dumps(num))), num) diff --git a/simplejson/tests/test_indent.py b/simplejson/tests/test_indent.py deleted file mode 100644 index 985831b..0000000 --- a/simplejson/tests/test_indent.py +++ /dev/null @@ -1,53 +0,0 @@ -from unittest import TestCase - -import simplejson as json -import textwrap - -class TestIndent(TestCase): - def test_indent(self): - h = [['blorpie'], ['whoops'], [], 'd-shtaeou', 'd-nthiouh', - 'i-vhbjkhnth', - {'nifty': 87}, {'field': 'yes', 'morefield': False} ] - - expect = textwrap.dedent("""\ - [ - \t[ - \t\t"blorpie" - \t], - \t[ - \t\t"whoops" - \t], - \t[], - \t"d-shtaeou", - \t"d-nthiouh", - \t"i-vhbjkhnth", - \t{ - \t\t"nifty": 87 - \t}, - \t{ - \t\t"field": "yes", - \t\t"morefield": false - \t} - ]""") - - - d1 = json.dumps(h) - d2 = json.dumps(h, indent='\t', sort_keys=True, separators=(',', ': ')) - d3 = json.dumps(h, indent=' ', sort_keys=True, separators=(',', ': ')) - d4 = json.dumps(h, indent=2, sort_keys=True, separators=(',', ': ')) - - h1 = json.loads(d1) - h2 = json.loads(d2) - h3 = json.loads(d3) - h4 = json.loads(d4) - - self.assertEquals(h1, h) - self.assertEquals(h2, h) - self.assertEquals(h3, h) - self.assertEquals(h4, h) - self.assertEquals(d3, expect.replace('\t', ' ')) - self.assertEquals(d4, expect.replace('\t', ' ')) - # NOTE: Python 2.4 textwrap.dedent converts tabs to spaces, - # so the following is expected to fail. Python 2.4 is not a - # supported platform in simplejson 2.1.0+. - self.assertEquals(d2, expect) diff --git a/simplejson/tests/test_pass1.py b/simplejson/tests/test_pass1.py deleted file mode 100644 index c3d6302..0000000 --- a/simplejson/tests/test_pass1.py +++ /dev/null @@ -1,76 +0,0 @@ -from unittest import TestCase - -import simplejson as json - -# from http://json.org/JSON_checker/test/pass1.json -JSON = r''' -[ - "JSON Test Pattern pass1", - {"object with 1 member":["array with 1 element"]}, - {}, - [], - -42, - true, - false, - null, - { - "integer": 1234567890, - "real": -9876.543210, - "e": 0.123456789e-12, - "E": 1.234567890E+34, - "": 23456789012E666, - "zero": 0, - "one": 1, - "space": " ", - "quote": "\"", - "backslash": "\\", - "controls": "\b\f\n\r\t", - "slash": "/ & \/", - "alpha": "abcdefghijklmnopqrstuvwyz", - "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", - "digit": "0123456789", - "special": "`1~!@#$%^&*()_+-={':[,]}|;.?", - "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", - "true": true, - "false": false, - "null": null, - "array":[ ], - "object":{ }, - "address": "50 St. James Street", - "url": "http://www.JSON.org/", - "comment": "// /* */": " ", - " s p a c e d " :[1,2 , 3 - -, - -4 , 5 , 6 ,7 ], - "compact": [1,2,3,4,5,6,7], - "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", - "quotes": "" \u0022 %22 0x22 034 "", - "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" -: "A key can be any string" - }, - 0.5 ,98.6 -, -99.44 -, - -1066 - - -,"rosebud"] -''' - -class TestPass1(TestCase): - def test_parse(self): - # test in/out equivalence and parsing - res = json.loads(JSON) - out = json.dumps(res) - self.assertEquals(res, json.loads(out)) - try: - json.dumps(res, allow_nan=False) - except ValueError: - pass - else: - self.fail("23456789012E666 should be out of range") diff --git a/simplejson/tests/test_pass2.py b/simplejson/tests/test_pass2.py deleted file mode 100644 index de4ee00..0000000 --- a/simplejson/tests/test_pass2.py +++ /dev/null @@ -1,14 +0,0 @@ -from unittest import TestCase -import simplejson as json - -# from http://json.org/JSON_checker/test/pass2.json -JSON = r''' -[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]] -''' - -class TestPass2(TestCase): - def test_parse(self): - # test in/out equivalence and parsing - res = json.loads(JSON) - out = json.dumps(res) - self.assertEquals(res, json.loads(out)) diff --git a/simplejson/tests/test_pass3.py b/simplejson/tests/test_pass3.py deleted file mode 100644 index f591aba..0000000 --- a/simplejson/tests/test_pass3.py +++ /dev/null @@ -1,20 +0,0 @@ -from unittest import TestCase - -import simplejson as json - -# from http://json.org/JSON_checker/test/pass3.json -JSON = r''' -{ - "JSON Test Pattern pass3": { - "The outermost value": "must be an object or array.", - "In this test": "It is an object." - } -} -''' - -class TestPass3(TestCase): - def test_parse(self): - # test in/out equivalence and parsing - res = json.loads(JSON) - out = json.dumps(res) - self.assertEquals(res, json.loads(out)) diff --git a/simplejson/tests/test_recursion.py b/simplejson/tests/test_recursion.py deleted file mode 100644 index 97422a6..0000000 --- a/simplejson/tests/test_recursion.py +++ /dev/null @@ -1,67 +0,0 @@ -from unittest import TestCase - -import simplejson as json - -class JSONTestObject: - pass - - -class RecursiveJSONEncoder(json.JSONEncoder): - recurse = False - def default(self, o): - if o is JSONTestObject: - if self.recurse: - return [JSONTestObject] - else: - return 'JSONTestObject' - return json.JSONEncoder.default(o) - - -class TestRecursion(TestCase): - def test_listrecursion(self): - x = [] - x.append(x) - try: - json.dumps(x) - except ValueError: - pass - else: - self.fail("didn't raise ValueError on list recursion") - x = [] - y = [x] - x.append(y) - try: - json.dumps(x) - except ValueError: - pass - else: - self.fail("didn't raise ValueError on alternating list recursion") - y = [] - x = [y, y] - # ensure that the marker is cleared - json.dumps(x) - - def test_dictrecursion(self): - x = {} - x["test"] = x - try: - json.dumps(x) - except ValueError: - pass - else: - self.fail("didn't raise ValueError on dict recursion") - x = {} - y = {"a": x, "b": x} - # ensure that the marker is cleared - json.dumps(x) - - def test_defaultrecursion(self): - enc = RecursiveJSONEncoder() - self.assertEquals(enc.encode(JSONTestObject), '"JSONTestObject"') - enc.recurse = True - try: - enc.encode(JSONTestObject) - except ValueError: - pass - else: - self.fail("didn't raise ValueError on default recursion") diff --git a/simplejson/tests/test_scanstring.py b/simplejson/tests/test_scanstring.py deleted file mode 100644 index a7fcd46..0000000 --- a/simplejson/tests/test_scanstring.py +++ /dev/null @@ -1,117 +0,0 @@ -import sys -from unittest import TestCase - -import simplejson as json -import simplejson.decoder - -class TestScanString(TestCase): - def test_py_scanstring(self): - self._test_scanstring(simplejson.decoder.py_scanstring) - - def test_c_scanstring(self): - if not simplejson.decoder.c_scanstring: - return - self._test_scanstring(simplejson.decoder.c_scanstring) - - def _test_scanstring(self, scanstring): - self.assertEquals( - scanstring('"z\\ud834\\udd20x"', 1, None, True), - (u'z\U0001d120x', 16)) - - if sys.maxunicode == 65535: - self.assertEquals( - scanstring(u'"z\U0001d120x"', 1, None, True), - (u'z\U0001d120x', 6)) - else: - self.assertEquals( - scanstring(u'"z\U0001d120x"', 1, None, True), - (u'z\U0001d120x', 5)) - - self.assertEquals( - scanstring('"\\u007b"', 1, None, True), - (u'{', 8)) - - self.assertEquals( - scanstring('"A JSON payload should be an object or array, not a string."', 1, None, True), - (u'A JSON payload should be an object or array, not a string.', 60)) - - self.assertEquals( - scanstring('["Unclosed array"', 2, None, True), - (u'Unclosed array', 17)) - - self.assertEquals( - scanstring('["extra comma",]', 2, None, True), - (u'extra comma', 14)) - - self.assertEquals( - scanstring('["double extra comma",,]', 2, None, True), - (u'double extra comma', 21)) - - self.assertEquals( - scanstring('["Comma after the close"],', 2, None, True), - (u'Comma after the close', 24)) - - self.assertEquals( - scanstring('["Extra close"]]', 2, None, True), - (u'Extra close', 14)) - - self.assertEquals( - scanstring('{"Extra comma": true,}', 2, None, True), - (u'Extra comma', 14)) - - self.assertEquals( - scanstring('{"Extra value after close": true} "misplaced quoted value"', 2, None, True), - (u'Extra value after close', 26)) - - self.assertEquals( - scanstring('{"Illegal expression": 1 + 2}', 2, None, True), - (u'Illegal expression', 21)) - - self.assertEquals( - scanstring('{"Illegal invocation": alert()}', 2, None, True), - (u'Illegal invocation', 21)) - - self.assertEquals( - scanstring('{"Numbers cannot have leading zeroes": 013}', 2, None, True), - (u'Numbers cannot have leading zeroes', 37)) - - self.assertEquals( - scanstring('{"Numbers cannot be hex": 0x14}', 2, None, True), - (u'Numbers cannot be hex', 24)) - - self.assertEquals( - scanstring('[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]]', 21, None, True), - (u'Too deep', 30)) - - self.assertEquals( - scanstring('{"Missing colon" null}', 2, None, True), - (u'Missing colon', 16)) - - self.assertEquals( - scanstring('{"Double colon":: null}', 2, None, True), - (u'Double colon', 15)) - - self.assertEquals( - scanstring('{"Comma instead of colon", null}', 2, None, True), - (u'Comma instead of colon', 25)) - - self.assertEquals( - scanstring('["Colon instead of comma": false]', 2, None, True), - (u'Colon instead of comma', 25)) - - self.assertEquals( - scanstring('["Bad value", truth]', 2, None, True), - (u'Bad value', 12)) - - def test_issue3623(self): - self.assertRaises(ValueError, json.decoder.scanstring, "xxx", 1, - "xxx") - self.assertRaises(UnicodeDecodeError, - json.encoder.encode_basestring_ascii, "xx\xff") - - def test_overflow(self): - # Python 2.5 does not have maxsize - maxsize = getattr(sys, 'maxsize', sys.maxint) - self.assertRaises(OverflowError, json.decoder.scanstring, "xxx", - maxsize + 1) - diff --git a/simplejson/tests/test_separators.py b/simplejson/tests/test_separators.py deleted file mode 100644 index cbda93c..0000000 --- a/simplejson/tests/test_separators.py +++ /dev/null @@ -1,42 +0,0 @@ -import textwrap -from unittest import TestCase - -import simplejson as json - - -class TestSeparators(TestCase): - def test_separators(self): - h = [['blorpie'], ['whoops'], [], 'd-shtaeou', 'd-nthiouh', 'i-vhbjkhnth', - {'nifty': 87}, {'field': 'yes', 'morefield': False} ] - - expect = textwrap.dedent("""\ - [ - [ - "blorpie" - ] , - [ - "whoops" - ] , - [] , - "d-shtaeou" , - "d-nthiouh" , - "i-vhbjkhnth" , - { - "nifty" : 87 - } , - { - "field" : "yes" , - "morefield" : false - } - ]""") - - - d1 = json.dumps(h) - d2 = json.dumps(h, indent=' ', sort_keys=True, separators=(' ,', ' : ')) - - h1 = json.loads(d1) - h2 = json.loads(d2) - - self.assertEquals(h1, h) - self.assertEquals(h2, h) - self.assertEquals(d2, expect) diff --git a/simplejson/tests/test_speedups.py b/simplejson/tests/test_speedups.py deleted file mode 100644 index 4bf0875..0000000 --- a/simplejson/tests/test_speedups.py +++ /dev/null @@ -1,21 +0,0 @@ -import decimal -from unittest import TestCase - -from simplejson import decoder, encoder, scanner - -def has_speedups(): - return encoder.c_make_encoder is not None - -class TestDecode(TestCase): - def test_make_scanner(self): - if not has_speedups(): - return - self.assertRaises(AttributeError, scanner.c_make_scanner, 1) - - def test_make_encoder(self): - if not has_speedups(): - return - self.assertRaises(TypeError, encoder.c_make_encoder, - None, - "\xCD\x7D\x3D\x4E\x12\x4C\xF9\x79\xD7\x52\xBA\x82\xF2\x27\x4A\x7D\xA0\xCA\x75", - None) diff --git a/simplejson/tests/test_unicode.py b/simplejson/tests/test_unicode.py deleted file mode 100644 index f73e5bf..0000000 --- a/simplejson/tests/test_unicode.py +++ /dev/null @@ -1,99 +0,0 @@ -from unittest import TestCase - -import simplejson as json - -class TestUnicode(TestCase): - def test_encoding1(self): - encoder = json.JSONEncoder(encoding='utf-8') - u = u'\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}' - s = u.encode('utf-8') - ju = encoder.encode(u) - js = encoder.encode(s) - self.assertEquals(ju, js) - - def test_encoding2(self): - u = u'\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}' - s = u.encode('utf-8') - ju = json.dumps(u, encoding='utf-8') - js = json.dumps(s, encoding='utf-8') - self.assertEquals(ju, js) - - def test_encoding3(self): - u = u'\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}' - j = json.dumps(u) - self.assertEquals(j, '"\\u03b1\\u03a9"') - - def test_encoding4(self): - u = u'\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}' - j = json.dumps([u]) - self.assertEquals(j, '["\\u03b1\\u03a9"]') - - def test_encoding5(self): - u = u'\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}' - j = json.dumps(u, ensure_ascii=False) - self.assertEquals(j, u'"' + u + u'"') - - def test_encoding6(self): - u = u'\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}' - j = json.dumps([u], ensure_ascii=False) - self.assertEquals(j, u'["' + u + u'"]') - - def test_big_unicode_encode(self): - u = u'\U0001d120' - self.assertEquals(json.dumps(u), '"\\ud834\\udd20"') - self.assertEquals(json.dumps(u, ensure_ascii=False), u'"\U0001d120"') - - def test_big_unicode_decode(self): - u = u'z\U0001d120x' - self.assertEquals(json.loads('"' + u + '"'), u) - self.assertEquals(json.loads('"z\\ud834\\udd20x"'), u) - - def test_unicode_decode(self): - for i in range(0, 0xd7ff): - u = unichr(i) - #s = '"\\u{0:04x}"'.format(i) - s = '"\\u%04x"' % (i,) - self.assertEquals(json.loads(s), u) - - def test_object_pairs_hook_with_unicode(self): - s = u'{"xkd":1, "kcw":2, "art":3, "hxm":4, "qrt":5, "pad":6, "hoy":7}' - p = [(u"xkd", 1), (u"kcw", 2), (u"art", 3), (u"hxm", 4), - (u"qrt", 5), (u"pad", 6), (u"hoy", 7)] - self.assertEqual(json.loads(s), eval(s)) - self.assertEqual(json.loads(s, object_pairs_hook=lambda x: x), p) - od = json.loads(s, object_pairs_hook=json.OrderedDict) - self.assertEqual(od, json.OrderedDict(p)) - self.assertEqual(type(od), json.OrderedDict) - # the object_pairs_hook takes priority over the object_hook - self.assertEqual(json.loads(s, - object_pairs_hook=json.OrderedDict, - object_hook=lambda x: None), - json.OrderedDict(p)) - - - def test_default_encoding(self): - self.assertEquals(json.loads(u'{"a": "\xe9"}'.encode('utf-8')), - {'a': u'\xe9'}) - - def test_unicode_preservation(self): - self.assertEquals(type(json.loads(u'""')), unicode) - self.assertEquals(type(json.loads(u'"a"')), unicode) - self.assertEquals(type(json.loads(u'["a"]')[0]), unicode) - - def test_ensure_ascii_false_returns_unicode(self): - # http://code.google.com/p/simplejson/issues/detail?id=48 - self.assertEquals(type(json.dumps([], ensure_ascii=False)), unicode) - self.assertEquals(type(json.dumps(0, ensure_ascii=False)), unicode) - self.assertEquals(type(json.dumps({}, ensure_ascii=False)), unicode) - self.assertEquals(type(json.dumps("", ensure_ascii=False)), unicode) - - def test_ensure_ascii_false_bytestring_encoding(self): - # http://code.google.com/p/simplejson/issues/detail?id=48 - doc1 = {u'quux': 'Arr\xc3\xaat sur images'} - doc2 = {u'quux': u'Arr\xeat sur images'} - doc_ascii = '{"quux": "Arr\\u00eat sur images"}' - doc_unicode = u'{"quux": "Arr\xeat sur images"}' - self.assertEquals(json.dumps(doc1), doc_ascii) - self.assertEquals(json.dumps(doc2), doc_ascii) - self.assertEquals(json.dumps(doc1, ensure_ascii=False), doc_unicode) - self.assertEquals(json.dumps(doc2, ensure_ascii=False), doc_unicode) diff --git a/simplejson/tool.py b/simplejson/tool.py deleted file mode 100644 index 73370db..0000000 --- a/simplejson/tool.py +++ /dev/null @@ -1,39 +0,0 @@ -r"""Command-line tool to validate and pretty-print JSON - -Usage:: - - $ echo '{"json":"obj"}' | python -m simplejson.tool - { - "json": "obj" - } - $ echo '{ 1.2:3.4}' | python -m simplejson.tool - Expecting property name: line 1 column 2 (char 2) - -""" -import sys -import simplejson as json - -def main(): - if len(sys.argv) == 1: - infile = sys.stdin - outfile = sys.stdout - elif len(sys.argv) == 2: - infile = open(sys.argv[1], 'rb') - outfile = sys.stdout - elif len(sys.argv) == 3: - infile = open(sys.argv[1], 'rb') - outfile = open(sys.argv[2], 'wb') - else: - raise SystemExit(sys.argv[0] + " [infile [outfile]]") - try: - obj = json.load(infile, - object_pairs_hook=json.OrderedDict, - use_decimal=True) - except ValueError, e: - raise SystemExit(e) - json.dump(obj, outfile, sort_keys=True, indent=' ', use_decimal=True) - outfile.write('\n') - - -if __name__ == '__main__': - main() diff --git a/tests/test_discovery.py b/tests/test_discovery.py index b5ac753..3b544b0 100644 --- a/tests/test_discovery.py +++ b/tests/test_discovery.py @@ -61,7 +61,7 @@ class DiscoveryErrors(unittest.TestCase): def test_failed_to_parse_discovery_json(self): self.http = HttpMock(datafile('malformed.json'), {'status': '200'}) try: - buzz = build('buzz', 'v1', self.http) + plus = build('plus', 'v1', self.http) self.fail("should have raised an exception over malformed JSON.") except InvalidJsonError: pass @@ -70,16 +70,16 @@ class DiscoveryErrors(unittest.TestCase): class DiscoveryFromDocument(unittest.TestCase): def test_can_build_from_local_document(self): - discovery = file(datafile('buzz.json')).read() - buzz = build_from_document(discovery, base="https://www.googleapis.com/") - self.assertTrue(buzz is not None) + discovery = file(datafile('plus.json')).read() + plus = build_from_document(discovery, base="https://www.googleapis.com/") + self.assertTrue(plus is not None) def test_building_with_base_remembers_base(self): - discovery = file(datafile('buzz.json')).read() + discovery = file(datafile('plus.json')).read() base = "https://www.example.com/" - buzz = build_from_document(discovery, base=base) - self.assertEquals(base + "buzz/v1/", buzz._baseUrl) + plus = build_from_document(discovery, base=base) + self.assertEquals(base + "plus/v1/", plus._baseUrl) class DiscoveryFromHttp(unittest.TestCase): @@ -120,33 +120,26 @@ class DiscoveryFromHttp(unittest.TestCase): class Discovery(unittest.TestCase): def test_method_error_checking(self): - self.http = HttpMock(datafile('buzz.json'), {'status': '200'}) - buzz = build('buzz', 'v1', self.http) + self.http = HttpMock(datafile('plus.json'), {'status': '200'}) + plus = build('plus', 'v1', self.http) # Missing required parameters try: - buzz.activities().list() + plus.activities().list() self.fail() except TypeError, e: self.assertTrue('Missing' in str(e)) # Parameter doesn't match regex try: - buzz.activities().list(scope='@myself', userId='me') - self.fail() - except TypeError, e: - self.assertTrue('not an allowed value' in str(e)) - - # Parameter doesn't match regex - try: - buzz.activities().list(scope='not@', userId='foo') + plus.activities().list(collection='not_a_collection_name', userId='me') self.fail() except TypeError, e: self.assertTrue('not an allowed value' in str(e)) # Unexpected parameter try: - buzz.activities().list(flubber=12) + plus.activities().list(flubber=12) self.fail() except TypeError, e: self.assertTrue('unexpected' in str(e)) @@ -206,21 +199,11 @@ class Discovery(unittest.TestCase): self.assertTrue('x-http-method-override' in resp) - def test_buzz_resources(self): - self.http = HttpMock(datafile('buzz.json'), {'status': '200'}) - buzz = build('buzz', 'v1', self.http) - self.assertTrue(getattr(buzz, 'activities')) - self.assertTrue(getattr(buzz, 'photos')) - self.assertTrue(getattr(buzz, 'people')) - self.assertTrue(getattr(buzz, 'groups')) - self.assertTrue(getattr(buzz, 'comments')) - self.assertTrue(getattr(buzz, 'related')) - - def test_auth(self): - self.http = HttpMock(datafile('buzz.json'), {'status': '200'}) - buzz = build('buzz', 'v1', self.http) - auth = buzz.auth_discovery() - self.assertTrue('request' in auth) + def test_plus_resources(self): + self.http = HttpMock(datafile('plus.json'), {'status': '200'}) + plus = build('plus', 'v1', self.http) + self.assertTrue(getattr(plus, 'activities')) + self.assertTrue(getattr(plus, 'people')) def test_full_featured(self): # Zoo should exercise all discovery facets @@ -331,15 +314,6 @@ class Discovery(unittest.TestCase): self.assertTrue(request.headers['content-type'], 'application/json') class Next(unittest.TestCase): - def test_next_for_people_liked(self): - """Legacy test for Buzz _next support.""" - self.http = HttpMock(datafile('buzz.json'), {'status': '200'}) - buzz = build('buzz', 'v1', self.http) - people = {'links': - {'next': - [{'href': 'http://www.googleapis.com/next-link'}]}} - request = buzz.people().liked_next(people) - self.assertEqual(request.uri, 'http://www.googleapis.com/next-link') def test_next_successful_none_on_no_next_page_token(self): self.http = HttpMock(datafile('tasks.json'), {'status': '200'}) @@ -363,29 +337,5 @@ class Next(unittest.TestCase): request = service.currentLocation().get() -class DeveloperKey(unittest.TestCase): - - def test_param(self): - self.http = HttpMock(datafile('buzz.json'), {'status': '200'}) - buzz = build('buzz', 'v1', self.http, developerKey='foobie_bletch') - activities = {'links': - {'next': - [{'href': 'http://www.googleapis.com/next-link'}]}} - request = buzz.activities().list_next(activities) - parsed = urlparse.urlparse(request.uri) - q = parse_qs(parsed[4]) - self.assertEqual(q['key'], ['foobie_bletch']) - - def test_next_for_activities_list(self): - self.http = HttpMock(datafile('buzz.json'), {'status': '200'}) - buzz = build('buzz', 'v1', self.http, developerKey='foobie_bletch') - activities = {'links': - {'next': - [{'href': 'http://www.googleapis.com/next-link'}]}} - request = buzz.activities().list_next(activities) - self.assertEqual(request.uri, - 'http://www.googleapis.com/next-link?key=foobie_bletch') - - if __name__ == '__main__': unittest.main() diff --git a/tests/test_mocks.py b/tests/test_mocks.py index bfe55cf..6822335 100644 --- a/tests/test_mocks.py +++ b/tests/test_mocks.py @@ -41,103 +41,104 @@ def datafile(filename): class Mocks(unittest.TestCase): def setUp(self): - self.http = HttpMock(datafile('buzz.json'), {'status': '200'}) + self.http = HttpMock(datafile('plus.json'), {'status': '200'}) + self.zoo_http = HttpMock(datafile('zoo.json'), {'status': '200'}) def test_default_response(self): requestBuilder = RequestMockBuilder({}) - buzz = build('buzz', 'v1', http=self.http, requestBuilder=requestBuilder) - activity = buzz.activities().get(postId='tag:blah', userId='@me').execute() + plus = build('plus', 'v1', http=self.http, requestBuilder=requestBuilder) + activity = plus.activities().get(activityId='tag:blah').execute() self.assertEqual({}, activity) def test_simple_response(self): requestBuilder = RequestMockBuilder({ - 'chili.activities.get': (None, '{"data": {"foo": "bar"}}') + 'plus.activities.get': (None, '{"data": {"foo": "bar"}}') }) - buzz = build('buzz', 'v1', http=self.http, requestBuilder=requestBuilder) + plus = build('plus', 'v1', http=self.http, requestBuilder=requestBuilder) - activity = buzz.activities().get(postId='tag:blah', userId='@me').execute() + activity = plus.activities().get(activityId='tag:blah').execute() self.assertEqual({"foo": "bar"}, activity) def test_unexpected_call(self): requestBuilder = RequestMockBuilder({}, check_unexpected=True) - buzz = build('buzz', 'v1', http=self.http, requestBuilder=requestBuilder) + plus = build('plus', 'v1', http=self.http, requestBuilder=requestBuilder) try: - buzz.activities().get(postId='tag:blah', userId='@me').execute() + plus.activities().get(activityId='tag:blah').execute() self.fail('UnexpectedMethodError should have been raised') except UnexpectedMethodError: pass def test_simple_unexpected_body(self): requestBuilder = RequestMockBuilder({ - 'chili.activities.insert': (None, '{"data": {"foo": "bar"}}', None) + 'zoo.animals.insert': (None, '{"data": {"foo": "bar"}}', None) }) - buzz = build('buzz', 'v1', http=self.http, requestBuilder=requestBuilder) + zoo = build('zoo', 'v1', http=self.zoo_http, requestBuilder=requestBuilder) try: - buzz.activities().insert(userId='@me', body='{}').execute() + zoo.animals().insert(body='{}').execute() self.fail('UnexpectedBodyError should have been raised') except UnexpectedBodyError: pass def test_simple_expected_body(self): requestBuilder = RequestMockBuilder({ - 'chili.activities.insert': (None, '{"data": {"foo": "bar"}}', '{}') + 'zoo.animals.insert': (None, '{"data": {"foo": "bar"}}', '{}') }) - buzz = build('buzz', 'v1', http=self.http, requestBuilder=requestBuilder) + zoo = build('zoo', 'v1', http=self.zoo_http, requestBuilder=requestBuilder) try: - buzz.activities().insert(userId='@me', body='').execute() + zoo.animals().insert(body='').execute() self.fail('UnexpectedBodyError should have been raised') except UnexpectedBodyError: pass def test_simple_wrong_body(self): requestBuilder = RequestMockBuilder({ - 'chili.activities.insert': (None, '{"data": {"foo": "bar"}}', + 'zoo.animals.insert': (None, '{"data": {"foo": "bar"}}', '{"data": {"foo": "bar"}}') }) - buzz = build('buzz', 'v1', http=self.http, requestBuilder=requestBuilder) + zoo = build('zoo', 'v1', http=self.zoo_http, requestBuilder=requestBuilder) try: - buzz.activities().insert( - userId='@me', body='{"data": {"foo": "blah"}}').execute() + zoo.animals().insert( + body='{"data": {"foo": "blah"}}').execute() self.fail('UnexpectedBodyError should have been raised') except UnexpectedBodyError: pass def test_simple_matching_str_body(self): requestBuilder = RequestMockBuilder({ - 'chili.activities.insert': (None, '{"data": {"foo": "bar"}}', + 'zoo.animals.insert': (None, '{"data": {"foo": "bar"}}', '{"data": {"foo": "bar"}}') }) - buzz = build('buzz', 'v1', http=self.http, requestBuilder=requestBuilder) + zoo = build('zoo', 'v1', http=self.zoo_http, requestBuilder=requestBuilder) - activity = buzz.activities().insert( - userId='@me', body={'data': {'foo': 'bar'}}).execute() + activity = zoo.animals().insert( + body={'data': {'foo': 'bar'}}).execute() self.assertEqual({'foo': 'bar'}, activity) def test_simple_matching_dict_body(self): requestBuilder = RequestMockBuilder({ - 'chili.activities.insert': (None, '{"data": {"foo": "bar"}}', + 'zoo.animals.insert': (None, '{"data": {"foo": "bar"}}', {'data': {'foo': 'bar'}}) }) - buzz = build('buzz', 'v1', http=self.http, requestBuilder=requestBuilder) + zoo = build('zoo', 'v1', http=self.zoo_http, requestBuilder=requestBuilder) - activity = buzz.activities().insert( - userId='@me', body={'data': {'foo': 'bar'}}).execute() + activity = zoo.animals().insert( + body={'data': {'foo': 'bar'}}).execute() self.assertEqual({'foo': 'bar'}, activity) def test_errors(self): errorResponse = httplib2.Response({'status': 500, 'reason': 'Server Error'}) requestBuilder = RequestMockBuilder({ - 'chili.activities.list': (errorResponse, '{}') + 'plus.activities.list': (errorResponse, '{}') }) - buzz = build('buzz', 'v1', http=self.http, requestBuilder=requestBuilder) + plus = build('plus', 'v1', http=self.http, requestBuilder=requestBuilder) try: - activity = buzz.activities().list(scope='@self', userId='@me').execute() + activity = plus.activities().list(collection='public', userId='me').execute() self.fail('An exception should have been thrown') except HttpError, e: self.assertEqual('{}', e.content)